summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--.gitignore25
-rwxr-xr-xbuild.xml241
-rw-r--r--dbuild-meta.json79
-rw-r--r--docs/LICENSE56
-rw-r--r--src/build/dbuild-meta-json-gen.scala18
-rw-r--r--src/build/maven/maven-deploy.xml5
-rw-r--r--src/build/maven/scala-compiler-pom.xml12
-rw-r--r--src/build/maven/scala-parser-combinators-pom.xml59
-rw-r--r--src/build/maven/scala-xml-pom.xml59
-rw-r--r--src/build/pack.xml15
-rw-r--r--src/compiler/scala/reflect/reify/codegen/GenSymbols.scala8
-rw-r--r--src/compiler/scala/reflect/reify/codegen/GenTrees.scala6
-rw-r--r--src/compiler/scala/reflect/reify/utils/Extractors.scala18
-rw-r--r--src/compiler/scala/reflect/reify/utils/NodePrinters.scala2
-rw-r--r--src/compiler/scala/tools/ant/templates/tool-windows.tmpl68
-rw-r--r--src/compiler/scala/tools/nsc/Global.scala33
-rw-r--r--src/compiler/scala/tools/nsc/ast/TreeGen.scala36
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/Parsers.scala73
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala9
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala3
-rw-r--r--src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala11
-rw-r--r--src/compiler/scala/tools/nsc/transform/Constructors.scala2
-rw-r--r--src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala22
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/Solving.scala9
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Contexts.scala6
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Implicits.scala2
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala17
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Namers.scala8
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala9
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/PatternTypers.scala6
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/RefChecks.scala53
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala351
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala11
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Typers.scala8
-rw-r--r--src/compiler/scala/tools/nsc/util/ClassPath.scala5
-rw-r--r--src/compiler/scala/tools/nsc/util/StackTracing.scala76
-rw-r--r--src/compiler/scala/tools/nsc/util/package.scala18
-rw-r--r--src/compiler/scala/tools/reflect/ToolBoxFactory.scala5
-rw-r--r--src/compiler/scala/tools/reflect/WrappedProperties.scala8
-rw-r--r--src/compiler/scala/tools/reflect/quasiquotes/Parsers.scala34
-rw-r--r--src/compiler/scala/tools/reflect/quasiquotes/Placeholders.scala27
-rw-r--r--src/compiler/scala/tools/reflect/quasiquotes/Quasiquotes.scala6
-rw-r--r--src/compiler/scala/tools/reflect/quasiquotes/Reifiers.scala238
-rw-r--r--src/continuations/plugin/scala/tools/selectivecps/SelectiveCPSTransform.scala8
-rw-r--r--src/interactive/scala/tools/nsc/interactive/Global.scala2
-rw-r--r--src/library/scala/collection/immutable/Map.scala2
-rw-r--r--src/library/scala/collection/mutable/Map.scala2
-rw-r--r--src/library/scala/collection/parallel/immutable/ParMap.scala2
-rw-r--r--src/library/scala/collection/parallel/mutable/ParMap.scala2
-rw-r--r--src/library/scala/util/control/TailCalls.scala63
-rw-r--r--src/parser-combinators/scala/util/parsing/combinator/ImplicitConversions.scala43
-rw-r--r--src/parser-combinators/scala/util/parsing/combinator/JavaTokenParsers.scala62
-rw-r--r--src/parser-combinators/scala/util/parsing/combinator/PackratParsers.scala312
-rw-r--r--src/parser-combinators/scala/util/parsing/combinator/Parsers.scala919
-rw-r--r--src/parser-combinators/scala/util/parsing/combinator/RegexParsers.scala166
-rw-r--r--src/parser-combinators/scala/util/parsing/combinator/lexical/Lexical.scala40
-rw-r--r--src/parser-combinators/scala/util/parsing/combinator/lexical/Scanners.scala63
-rw-r--r--src/parser-combinators/scala/util/parsing/combinator/lexical/StdLexical.scala87
-rw-r--r--src/parser-combinators/scala/util/parsing/combinator/syntactical/StandardTokenParsers.scala32
-rw-r--r--src/parser-combinators/scala/util/parsing/combinator/syntactical/StdTokenParsers.scala52
-rw-r--r--src/parser-combinators/scala/util/parsing/combinator/syntactical/TokenParsers.scala35
-rw-r--r--src/parser-combinators/scala/util/parsing/combinator/token/StdTokens.scala39
-rw-r--r--src/parser-combinators/scala/util/parsing/combinator/token/Tokens.scala43
-rw-r--r--src/parser-combinators/scala/util/parsing/input/CharArrayReader.scala35
-rw-r--r--src/parser-combinators/scala/util/parsing/input/CharSequenceReader.scala66
-rw-r--r--src/parser-combinators/scala/util/parsing/input/NoPosition.scala25
-rw-r--r--src/parser-combinators/scala/util/parsing/input/OffsetPosition.scala73
-rw-r--r--src/parser-combinators/scala/util/parsing/input/PagedSeqReader.scala71
-rw-r--r--src/parser-combinators/scala/util/parsing/input/Position.scala62
-rw-r--r--src/parser-combinators/scala/util/parsing/input/Positional.scala30
-rw-r--r--src/parser-combinators/scala/util/parsing/input/Reader.scala62
-rw-r--r--src/parser-combinators/scala/util/parsing/input/StreamReader.scala76
-rw-r--r--src/parser-combinators/scala/util/parsing/json/JSON.scala97
-rw-r--r--src/parser-combinators/scala/util/parsing/json/Lexer.scala90
-rw-r--r--src/parser-combinators/scala/util/parsing/json/Parser.scala147
-rw-r--r--src/partest-extras/scala/tools/partest/ReplTest.scala50
-rw-r--r--src/partest-extras/scala/tools/partest/ScriptTest.scala21
-rw-r--r--src/reflect/scala/reflect/api/BuildUtils.scala118
-rw-r--r--src/reflect/scala/reflect/internal/BuildUtils.scala349
-rw-r--r--src/reflect/scala/reflect/internal/ClassfileConstants.scala36
-rw-r--r--src/reflect/scala/reflect/internal/Definitions.scala1
-rw-r--r--src/reflect/scala/reflect/internal/ExistentialsAndSkolems.scala9
-rw-r--r--src/reflect/scala/reflect/internal/Importers.scala5
-rw-r--r--src/reflect/scala/reflect/internal/Names.scala108
-rw-r--r--src/reflect/scala/reflect/internal/Printers.scala2
-rw-r--r--src/reflect/scala/reflect/internal/StdNames.scala34
-rw-r--r--src/reflect/scala/reflect/internal/TreeGen.scala79
-rw-r--r--src/reflect/scala/reflect/internal/Trees.scala16
-rw-r--r--src/reflect/scala/reflect/internal/Types.scala15
-rw-r--r--src/reflect/scala/reflect/internal/annotations/uncheckedBounds.scala13
-rw-r--r--src/reflect/scala/reflect/internal/util/SourceFile.scala5
-rw-r--r--src/reflect/scala/reflect/runtime/JavaMirrors.scala35
-rw-r--r--src/reflect/scala/reflect/runtime/ReflectionUtils.scala6
-rw-r--r--src/reflect/scala/reflect/runtime/SymbolLoaders.scala11
-rw-r--r--src/reflect/scala/reflect/runtime/SynchronizedOps.scala5
-rw-r--r--src/repl/scala/tools/nsc/interpreter/ILoop.scala2
-rw-r--r--src/repl/scala/tools/nsc/interpreter/IMain.scala4
-rw-r--r--src/xml/scala/xml/Atom.scala47
-rw-r--r--src/xml/scala/xml/Attribute.scala101
-rw-r--r--src/xml/scala/xml/Comment.scala31
-rw-r--r--src/xml/scala/xml/Document.scala92
-rwxr-xr-xsrc/xml/scala/xml/Elem.scala136
-rw-r--r--src/xml/scala/xml/EntityRef.scala40
-rw-r--r--src/xml/scala/xml/Equality.scala107
-rw-r--r--src/xml/scala/xml/Group.scala42
-rw-r--r--src/xml/scala/xml/MalformedAttributeException.scala15
-rw-r--r--src/xml/scala/xml/MetaData.scala217
-rw-r--r--src/xml/scala/xml/NamespaceBinding.scala83
-rwxr-xr-xsrc/xml/scala/xml/Node.scala198
-rw-r--r--src/xml/scala/xml/NodeBuffer.scala47
-rw-r--r--src/xml/scala/xml/NodeSeq.scala157
-rw-r--r--src/xml/scala/xml/Null.scala62
-rw-r--r--src/xml/scala/xml/PCData.scala44
-rw-r--r--src/xml/scala/xml/PrefixedAttribute.scala61
-rwxr-xr-xsrc/xml/scala/xml/PrettyPrinter.scala263
-rw-r--r--src/xml/scala/xml/ProcInstr.scala39
-rw-r--r--src/xml/scala/xml/QNode.scala20
-rw-r--r--src/xml/scala/xml/SpecialNode.scala33
-rw-r--r--src/xml/scala/xml/Text.scala39
-rw-r--r--src/xml/scala/xml/TextBuffer.scala46
-rw-r--r--src/xml/scala/xml/TopScope.scala31
-rw-r--r--src/xml/scala/xml/TypeSymbol.scala15
-rw-r--r--src/xml/scala/xml/Unparsed.scala36
-rw-r--r--src/xml/scala/xml/UnprefixedAttribute.scala61
-rwxr-xr-xsrc/xml/scala/xml/Utility.scala410
-rwxr-xr-xsrc/xml/scala/xml/XML.scala109
-rw-r--r--src/xml/scala/xml/Xhtml.scala97
-rw-r--r--src/xml/scala/xml/dtd/ContentModel.scala118
-rw-r--r--src/xml/scala/xml/dtd/ContentModelParser.scala129
-rw-r--r--src/xml/scala/xml/dtd/DTD.scala35
-rw-r--r--src/xml/scala/xml/dtd/Decl.scala157
-rw-r--r--src/xml/scala/xml/dtd/DocType.scala39
-rw-r--r--src/xml/scala/xml/dtd/ElementValidator.scala132
-rw-r--r--src/xml/scala/xml/dtd/ExternalID.scala86
-rw-r--r--src/xml/scala/xml/dtd/Scanner.scala79
-rw-r--r--src/xml/scala/xml/dtd/Tokens.scala45
-rw-r--r--src/xml/scala/xml/dtd/ValidationException.scala44
-rw-r--r--src/xml/scala/xml/dtd/impl/Base.scala67
-rw-r--r--src/xml/scala/xml/dtd/impl/BaseBerrySethi.scala98
-rw-r--r--src/xml/scala/xml/dtd/impl/DetWordAutom.scala50
-rw-r--r--src/xml/scala/xml/dtd/impl/Inclusion.scala70
-rw-r--r--src/xml/scala/xml/dtd/impl/NondetWordAutom.scala60
-rw-r--r--src/xml/scala/xml/dtd/impl/PointedHedgeExp.scala37
-rw-r--r--src/xml/scala/xml/dtd/impl/SubsetConstruction.scala108
-rw-r--r--src/xml/scala/xml/dtd/impl/SyntaxError.scala21
-rw-r--r--src/xml/scala/xml/dtd/impl/WordBerrySethi.scala162
-rw-r--r--src/xml/scala/xml/dtd/impl/WordExp.scala59
-rwxr-xr-xsrc/xml/scala/xml/factory/Binder.scala61
-rw-r--r--src/xml/scala/xml/factory/LoggedNodeFactory.scala90
-rw-r--r--src/xml/scala/xml/factory/NodeFactory.scala61
-rw-r--r--src/xml/scala/xml/factory/XMLLoader.scala61
-rw-r--r--src/xml/scala/xml/include/CircularIncludeException.scala25
-rw-r--r--src/xml/scala/xml/include/UnavailableResourceException.scala20
-rw-r--r--src/xml/scala/xml/include/XIncludeException.scala58
-rw-r--r--src/xml/scala/xml/include/sax/EncodingHeuristics.scala98
-rw-r--r--src/xml/scala/xml/include/sax/XIncludeFilter.scala373
-rw-r--r--src/xml/scala/xml/include/sax/XIncluder.scala187
-rw-r--r--src/xml/scala/xml/package.scala19
-rwxr-xr-xsrc/xml/scala/xml/parsing/ConstructingHandler.scala34
-rw-r--r--src/xml/scala/xml/parsing/ConstructingParser.scala55
-rwxr-xr-xsrc/xml/scala/xml/parsing/DefaultMarkupHandler.scala30
-rw-r--r--src/xml/scala/xml/parsing/ExternalSources.scala38
-rw-r--r--src/xml/scala/xml/parsing/FactoryAdapter.scala187
-rw-r--r--src/xml/scala/xml/parsing/FatalError.scala17
-rwxr-xr-xsrc/xml/scala/xml/parsing/MarkupHandler.scala127
-rwxr-xr-xsrc/xml/scala/xml/parsing/MarkupParser.scala938
-rw-r--r--src/xml/scala/xml/parsing/MarkupParserCommon.scala260
-rw-r--r--src/xml/scala/xml/parsing/NoBindingFactoryAdapter.scala37
-rw-r--r--src/xml/scala/xml/parsing/TokenTests.scala101
-rw-r--r--src/xml/scala/xml/parsing/ValidatingMarkupHandler.scala104
-rw-r--r--src/xml/scala/xml/parsing/XhtmlEntities.scala54
-rw-r--r--src/xml/scala/xml/parsing/XhtmlParser.scala31
-rw-r--r--src/xml/scala/xml/persistent/CachedFileStorage.scala129
-rw-r--r--src/xml/scala/xml/persistent/Index.scala17
-rw-r--r--src/xml/scala/xml/persistent/SetStorage.scala42
-rw-r--r--src/xml/scala/xml/pull/XMLEvent.scala60
-rwxr-xr-xsrc/xml/scala/xml/pull/XMLEventReader.scala157
-rw-r--r--src/xml/scala/xml/pull/package.scala42
-rw-r--r--src/xml/scala/xml/transform/BasicTransformer.scala60
-rw-r--r--src/xml/scala/xml/transform/RewriteRule.scala28
-rw-r--r--src/xml/scala/xml/transform/RuleTransformer.scala16
-rw-r--r--test/files/jvm/backendBugUnapply.scala17
-rw-r--r--test/files/jvm/serialization-new.check54
-rw-r--r--test/files/jvm/serialization-new.scala67
-rw-r--r--test/files/jvm/serialization.check54
-rw-r--r--test/files/jvm/serialization.scala65
-rwxr-xr-xtest/files/jvm/t0632.check12
-rw-r--r--test/files/jvm/t0632.scala22
-rwxr-xr-xtest/files/jvm/t1118.check11
-rwxr-xr-xtest/files/jvm/t1118.scala21
-rw-r--r--test/files/jvm/t560bis.check2
-rw-r--r--test/files/jvm/t560bis.scala21
-rw-r--r--test/files/jvm/unittest_xml.scala101
-rwxr-xr-xtest/files/jvm/xml01.check8
-rw-r--r--test/files/jvm/xml01.scala182
-rw-r--r--test/files/jvm/xml02.scala78
-rwxr-xr-xtest/files/jvm/xml03syntax.check27
-rw-r--r--test/files/jvm/xml03syntax.scala97
-rw-r--r--test/files/jvm/xml04embed.check3
-rw-r--r--test/files/jvm/xml04embed.scala10
-rw-r--r--test/files/jvm/xmlattr.check18
-rw-r--r--test/files/jvm/xmlattr.scala70
-rw-r--r--test/files/jvm/xmlmore.check10
-rw-r--r--test/files/jvm/xmlmore.scala29
-rw-r--r--test/files/jvm/xmlpull.scala31
-rw-r--r--test/files/jvm/xmlstuff.check22
-rw-r--r--test/files/jvm/xmlstuff.scala181
-rw-r--r--test/files/neg/classmanifests_new_deprecations.check8
-rw-r--r--test/files/neg/macro-abort.check4
-rw-r--r--test/files/neg/macro-abort/Macros_1.scala9
-rw-r--r--test/files/neg/macro-abort/Test_2.scala3
-rw-r--r--test/files/neg/macro-exception.check7
-rw-r--r--test/files/neg/macro-exception/Macros_1.scala9
-rw-r--r--test/files/neg/macro-exception/Test_2.scala3
-rw-r--r--test/files/neg/macro-invalidusage-presuper.check2
-rw-r--r--test/files/neg/t1011.check4
-rw-r--r--test/files/neg/t1011.scala127
-rw-r--r--test/files/neg/t1017.check4
-rw-r--r--test/files/neg/t1017.scala4
-rw-r--r--test/files/neg/t1845.scala12
-rw-r--r--test/files/neg/t1878-typer.check4
-rw-r--r--test/files/neg/t1878-typer.scala6
-rw-r--r--test/files/neg/t2796.check5
-rw-r--r--test/files/neg/t2796.flags2
-rw-r--r--test/files/neg/t2796.scala3
-rw-r--r--test/files/neg/t3160ambiguous.check6
-rw-r--r--test/files/neg/t3160ambiguous.scala10
-rw-r--r--test/files/neg/t3776.scala8
-rw-r--r--test/files/neg/t7020.check19
-rw-r--r--test/files/neg/t7020.flags1
-rw-r--r--test/files/neg/t7020.scala30
-rw-r--r--test/files/neg/t7185.check7
-rw-r--r--test/files/neg/t7185.scala3
-rw-r--r--test/files/neg/t7694b.check7
-rw-r--r--test/files/neg/t7752.check27
-rw-r--r--test/files/neg/t7752.scala26
-rw-r--r--test/files/neg/t935.check5
-rw-r--r--test/files/pos/matchStarlift.scala7
-rw-r--r--test/files/pos/t0422.scala16
-rw-r--r--test/files/pos/t0646.scala21
-rw-r--r--test/files/pos/t1014.scala15
-rw-r--r--test/files/pos/t1059.scala28
-rw-r--r--test/files/pos/t1203a.scala7
-rw-r--r--test/files/pos/t1626.scala4
-rw-r--r--test/files/pos/t1761.scala10
-rw-r--r--test/files/pos/t2281.scala41
-rw-r--r--test/files/pos/t2698.scala11
-rw-r--r--test/files/pos/t3160.scala6
-rw-r--r--test/files/pos/t4760.scala34
-rw-r--r--test/files/pos/t5858.scala3
-rw-r--r--test/files/pos/t6201.scala13
-rw-r--r--test/files/pos/t6897.scala6
-rw-r--r--test/files/pos/t7014/ThreadSafety.java9
-rw-r--r--test/files/pos/t7014/ThreadSafetyLevel.java8
-rw-r--r--test/files/pos/t7014/t7014.scala4
-rw-r--r--test/files/pos/t715/meredith_1.scala98
-rw-r--r--test/files/pos/t715/runner_2.scala3
-rw-r--r--test/files/pos/t7486-named.scala8
-rw-r--r--test/files/pos/t7486.scala (renamed from test/pending/pos/t7486.scala)0
-rw-r--r--test/files/pos/t7694.scala40
-rw-r--r--test/files/pos/t7716.scala16
-rw-r--r--test/files/pos/t7782.scala25
-rw-r--r--test/files/pos/t7782b.scala25
-rw-r--r--test/files/pos/t880.scala6
-rw-r--r--test/files/pos/t942/Amount_1.java5
-rw-r--r--test/files/pos/t942/Test_2.scala3
-rw-r--r--test/files/presentation/partial-fun.check2
-rw-r--r--test/files/presentation/partial-fun/Runner.scala10
-rw-r--r--test/files/presentation/partial-fun/partial-fun.check1
-rw-r--r--test/files/presentation/partial-fun/src/PartialFun.scala5
-rw-r--r--test/files/run/WeakHashSetTest.scala174
-rw-r--r--test/files/run/analyzerPlugins.scala6
-rw-r--r--test/files/run/deprecate-early-type-defs.check3
-rw-r--r--test/files/run/deprecate-early-type-defs.flags1
-rw-r--r--test/files/run/deprecate-early-type-defs.scala1
-rw-r--r--test/files/run/existential-rangepos.check13
-rw-r--r--test/files/run/existential-rangepos.scala13
-rw-r--r--test/files/run/fors.check46
-rw-r--r--test/files/run/fors.scala97
-rw-r--r--test/files/run/io-position.checkbin126 -> 0 bytes
-rw-r--r--test/files/run/io-position.scala11
-rw-r--r--test/files/run/json.check21
-rw-r--r--test/files/run/json.scala287
-rw-r--r--test/files/run/jtptest.check7
-rw-r--r--test/files/run/jtptest.scala17
-rw-r--r--test/files/run/macro-auto-duplicate.check1
-rw-r--r--test/files/run/macro-auto-duplicate/Macros_1.scala17
-rw-r--r--test/files/run/macro-auto-duplicate/Test_2.scala3
-rw-r--r--test/files/run/macro-duplicate/Impls_Macros_1.scala2
-rw-r--r--test/files/run/nodebuffer-array.check3
-rw-r--r--test/files/run/nodebuffer-array.scala15
-rw-r--r--test/files/run/packrat1.check7
-rw-r--r--test/files/run/packrat1.scala47
-rw-r--r--test/files/run/packrat2.check7
-rw-r--r--test/files/run/packrat2.scala57
-rw-r--r--test/files/run/packrat3.check7
-rw-r--r--test/files/run/packrat3.scala51
-rw-r--r--test/files/run/parserFilter.check9
-rw-r--r--test/files/run/parserFilter.scala15
-rw-r--r--test/files/run/parserForFilter.check1
-rw-r--r--test/files/run/parserForFilter.scala12
-rw-r--r--test/files/run/parserJavaIdent.check26
-rw-r--r--test/files/run/parserJavaIdent.scala26
-rw-r--r--test/files/run/parserNoSuccessMessage.check20
-rw-r--r--test/files/run/parserNoSuccessMessage.scala19
-rw-r--r--test/files/run/reflect-priv-ctor.check1
-rw-r--r--test/files/run/reflect-priv-ctor.scala22
-rw-r--r--test/files/run/reflection-magicsymbols-invoke.check2
-rw-r--r--test/files/run/repl-backticks.check2
-rw-r--r--test/files/run/repl-backticks.scala18
-rw-r--r--test/files/run/repl-trim-stack-trace.scala18
-rw-r--r--test/files/run/t0486.check8
-rw-r--r--test/files/run/t0486.scala24
-rwxr-xr-xtest/files/run/t0663.check1
-rw-r--r--test/files/run/t0663.scala6
-rw-r--r--test/files/run/t0700.check2
-rw-r--r--test/files/run/t0700.scala24
-rw-r--r--test/files/run/t1079.scala3
-rw-r--r--test/files/run/t1100.check4
-rw-r--r--test/files/run/t1100.scala17
-rw-r--r--test/files/run/t1500.check3
-rw-r--r--test/files/run/t1500.scala46
-rw-r--r--test/files/run/t1501.check3
-rw-r--r--test/files/run/t1501.scala56
-rwxr-xr-xtest/files/run/t1620.check6
-rw-r--r--test/files/run/t1620.scala16
-rw-r--r--test/files/run/t1773.scala12
-rwxr-xr-xtest/files/run/t2124.check1
-rw-r--r--test/files/run/t2124.scala25
-rwxr-xr-xtest/files/run/t2125.check1
-rw-r--r--test/files/run/t2125.scala25
-rw-r--r--test/files/run/t2276.check8
-rw-r--r--test/files/run/t2276.scala24
-rw-r--r--test/files/run/t2354.scala17
-rw-r--r--test/files/run/t2721.check2
-rw-r--r--test/files/run/t2721.scala12
-rw-r--r--test/files/run/t3705.scala17
-rw-r--r--test/files/run/t3886.scala11
-rw-r--r--test/files/run/t4124.check4
-rw-r--r--test/files/run/t4124.scala24
-rw-r--r--test/files/run/t4138.check2
-rw-r--r--test/files/run/t4138.scala6
-rw-r--r--test/files/run/t4387.scala12
-rw-r--r--test/files/run/t4929.check1
-rw-r--r--test/files/run/t4929.scala43
-rw-r--r--test/files/run/t5052.scala6
-rw-r--r--test/files/run/t5115.scala14
-rw-r--r--test/files/run/t5514.check19
-rw-r--r--test/files/run/t5514.scala35
-rw-r--r--test/files/run/t5843.check9
-rw-r--r--test/files/run/t5843.scala15
-rw-r--r--test/files/run/t6392b.check2
-rw-r--r--test/files/run/t6939.scala13
-rw-r--r--test/files/run/t6989.check24
-rw-r--r--test/files/run/t7074.check9
-rw-r--r--test/files/run/t7074.scala15
-rw-r--r--test/files/run/t7510.check (renamed from test/files/jvm/xml02.check)0
-rw-r--r--test/files/run/t7510/Ann_1.java4
-rw-r--r--test/files/run/t7510/Test_2.scala9
-rw-r--r--test/files/run/t7775.scala17
-rw-r--r--test/files/run/t7779.scala67
-rw-r--r--test/files/run/t7791-script-linenums.check1
-rw-r--r--test/files/run/t7791-script-linenums.scala16
-rw-r--r--test/files/run/t7791-script-linenums.script8
-rw-r--r--test/files/run/t7805-repl-i.check11
-rw-r--r--test/files/run/t7805-repl-i.scala42
-rw-r--r--test/files/run/t7805-repl-i.script1
-rw-r--r--test/files/run/t7817-tree-gen.check104
-rw-r--r--test/files/run/t7817-tree-gen.flags1
-rw-r--r--test/files/run/t7817-tree-gen.scala65
-rw-r--r--test/files/run/t7817.scala31
-rw-r--r--test/files/run/tailcalls.scala13
-rw-r--r--test/files/run/toolbox_current_run_compiles.check (renamed from test/files/run/t1079.check)1
-rw-r--r--test/files/run/toolbox_current_run_compiles.scala28
-rw-r--r--test/files/run/typed-annotated.check1
-rw-r--r--test/files/run/typed-annotated/Macros_1.scala17
-rw-r--r--test/files/run/typed-annotated/Test_2.scala3
-rw-r--r--test/files/run/unittest_io.scala42
-rw-r--r--test/files/run/xml-attribute.check12
-rw-r--r--test/files/run/xml-attribute.scala37
-rw-r--r--test/files/run/xml-loop-bug.scala14
-rw-r--r--test/files/scalacheck/quasiquotes/DefinitionConstructionProps.scala293
-rw-r--r--test/files/scalacheck/quasiquotes/DefinitionDeconstructionProps.scala147
-rw-r--r--test/files/scalacheck/quasiquotes/PatternConstructionProps.scala2
-rw-r--r--test/files/scalacheck/quasiquotes/QuasiquoteProperties.scala21
-rw-r--r--test/files/scalacheck/quasiquotes/TermConstructionProps.scala213
-rw-r--r--test/files/scalacheck/quasiquotes/TermDeconstructionProps.scala84
-rw-r--r--test/files/scalacheck/quasiquotes/Test.scala2
-rw-r--r--test/files/scalacheck/quasiquotes/TypeConstructionProps.scala11
-rw-r--r--test/files/scalacheck/quasiquotes/TypeDeconstructionProps.scala13
-rw-r--r--test/junit/scala/tools/nsc/util/StackTraceTest.scala159
-rw-r--r--test/pending/pos/t7778/Foo_1.java6
-rw-r--r--test/pending/pos/t7778/Test_2.scala3
-rw-r--r--test/pending/pos/treecheckers.flags1
-rw-r--r--test/pending/pos/treecheckers/c1.scala12
-rw-r--r--test/pending/pos/treecheckers/c2.scala1
-rw-r--r--test/pending/pos/treecheckers/c3.scala8
-rw-r--r--test/pending/pos/treecheckers/c4.scala9
-rw-r--r--test/pending/pos/treecheckers/c5.scala3
-rw-r--r--test/pending/pos/treecheckers/c6.scala4
-rw-r--r--test/pending/run/t7733.check (renamed from test/files/jvm/backendBugUnapply.check)1
-rw-r--r--test/pending/run/t7733/Separate_1.scala5
-rw-r--r--test/pending/run/t7733/Test_2.scala9
-rwxr-xr-xtools/binary-repo-lib.sh2
-rw-r--r--versions.properties2
405 files changed, 3615 insertions, 15122 deletions
diff --git a/.gitignore b/.gitignore
index e60505f663..f90835d970 100644
--- a/.gitignore
+++ b/.gitignore
@@ -9,10 +9,21 @@
# see also test/files/.gitignore
#
-*.jar
-*~
+#
+# JARs aren't checked in, they are fetched by Ant / pull_binary_libs.sh
+#
+# We could be more concise with /lib/**/*.jar but that assumes
+# a late-model git.
+#
+/lib/ant/*.jar
+/lib/*.jar
+/test/files/codelib/*.jar
+/test/files/lib/*.jar
+/test/files/speclib/instrumented.jar
+/tools/*.jar
-build.properties
+# Developer specific Ant properties
+/build.properties
# target directories for ant build
/build/
@@ -33,11 +44,5 @@ build.properties
/.idea
/.settings
-# bak files produced by ./cleanup-commit
-*.bak
-
# Standard symbolic link to build/quick/bin
-qbin
-
-# Mac specific, but that is common enough a dev platform to warrant inclusion.
-.DS_Store
+/qbin
diff --git a/build.xml b/build.xml
index 3ab426b809..f8dbf42242 100755
--- a/build.xml
+++ b/build.xml
@@ -196,12 +196,12 @@ TODO:
</target>
<macrodef name="copy-deps" description="Copy a file set based on maven dependency resolution to a directory. Currently used by the IntelliJ config files.">
- <attribute name="fileset.prefix"></attribute>
- <attribute name="out"></attribute>
+ <attribute name="project" />
+ <attribute name="refid" default="@{project}.fileset"/>
<sequential>
- <delete dir="${build-deps.dir}/@{out}" includes="*.jar"/>
- <copy todir="${build-deps.dir}/@{out}">
- <fileset refid="@{fileset.prefix}.fileset" />
+ <delete dir="${build-deps.dir}/@{project}" includes="*.jar"/>
+ <copy todir="${build-deps.dir}/@{project}">
+ <resources refid="@{refid}" />
<mapper type="flatten" />
</copy>
</sequential>
@@ -233,7 +233,7 @@ TODO:
<artifact:dependencies pathId="junit.classpath" filesetId="junit.fileset">
<dependency groupId="junit" artifactId="junit" version="${junit.version}"/>
</artifact:dependencies>
- <copy-deps fileset.prefix="junit" out="junit"/>
+ <copy-deps project="junit"/>
<!-- Pax runner -->
<property name="pax.exam.version" value="2.5.0"/>
@@ -250,19 +250,90 @@ TODO:
</artifact:dependencies>
+ <artifact:remoteRepository id="sonatype-release" url="https://oss.sonatype.org/content/repositories/releases"/>
+
<artifact:dependencies pathId="partest.classpath" filesetId="partest.fileset" versionsId="partest.versions">
- <!-- to facilitate building and publishing partest locally -->
- <localRepository path="${user.home}/.m2/repository"/>
- <!-- so we don't have to wait for artifacts to synch to maven central: -->
- <artifact:remoteRepository id="sonatype-release" url="https://oss.sonatype.org/content/repositories/releases"/>
+ <!-- uncomment the following if you're deploying your own partest locally -->
+ <!-- <localRepository path="${user.home}/.m2/repository"/> -->
+ <!-- so we don't have to wait for artifacts to synch to maven central
+ (we don't distribute partest with Scala, so the risk of sonatype and maven being out of synch is irrelevant):
+ -->
+ <artifact:remoteRepository refid="sonatype-release"/>
<dependency groupId="org.scala-lang.modules" artifactId="scala-partest_${scala.binary.version}" version="${partest.version.number}" />
</artifact:dependencies>
- <copy-deps fileset.prefix="partest" out="partest"/>
+ <copy-deps project="partest"/>
- <artifact:dependencies pathId="repl.deps.classpath" filesetId="repl.deps.fileset" versionsId="repl.deps.versions">
+ <artifact:dependencies pathId="repl.deps.classpath" filesetId="repl.fileset" versionsId="repl.deps.versions">
<dependency groupId="jline" artifactId="jline" version="${jline.version}"/>
</artifact:dependencies>
- <copy-deps fileset.prefix="repl.deps" out="repl"/>
+ <copy-deps project="repl"/>
+
+ <!-- used by the test.osgi target to create osgi bundles for the xml, parser-combinator jars
+ must specify sourcesFilesetId, javadocFilesetId to download these types of artifacts -->
+ <artifact:dependencies pathId="external-modules.deps.classpath" sourcesFilesetId="external-modules.sources.fileset" javadocFilesetId="external-modules.javadoc.fileset">
+ <!-- sonatype is not enabled by default for modules to avoid a Scala release relying on a JAR that's not on maven (yet) -->
+ <!-- <artifact:remoteRepository refid="sonatype-release"/> -->
+ <dependency groupId="org.scala-lang.modules" artifactId="scala-xml_${scala.binary.version}" version="${scala-xml.version.number}"/>
+ <dependency groupId="org.scala-lang.modules" artifactId="scala-parser-combinators_${scala.binary.version}" version="${scala-parser-combinators.version.number}"/>
+ </artifact:dependencies>
+
+ <!-- External modules, excluding the core -->
+ <path id="external-modules-nocore">
+ <restrict>
+ <path refid="external-modules.deps.classpath"/>
+ <rsel:not><rsel:or>
+ <rsel:name name="scala-library*.jar"/>
+ <rsel:name name="scala-reflect*.jar"/>
+ <rsel:name name="scala-compiler*.jar"/>
+ </rsel:or></rsel:not>
+ </restrict>
+ </path>
+ <copy-deps refid="external-modules-nocore" project="scaladoc"/>
+
+ <!--
+ include partest and its run-time dependencies,
+ but filter out the compiler we just built, as that's what we want to test!
+ TODO: mark partest's compiler dependencies as provided when publishing to maven,
+ so that we don't have to filter them out here...
+ -->
+ <path id="partest-deps-nocore">
+ <restrict>
+ <path refid="partest.classpath"/>
+ <rsel:not><rsel:or>
+ <rsel:name name="scala-library*.jar"/>
+ <rsel:name name="scala-reflect*.jar"/>
+ <rsel:name name="scala-compiler*.jar"/>
+ <!-- TODO: remove actors & scalap, move to external modules -->
+ <rsel:name name="scala-actors*.jar"/>
+ <rsel:name name="scala-scalap*.jar"/>
+ </rsel:or></rsel:not>
+ </restrict>
+ </path>
+
+ <!-- Set property named @{name} to the jar resolved as @{jar}_${scala.binary.version}:jar.
+ @{jar}_${scala.binary.version} must be a maven dependency. -->
+ <macrodef name="propertyForCrossedArtifact">
+ <attribute name="name" />
+ <attribute name="jar" />
+ <sequential>
+ <readProperty name="@{name}" property="@{jar}_${scala.binary.version}:jar" />
+ <readProperty name="@{name}-sources" property="@{jar}_${scala.binary.version}:java-source:sources" />
+ <readProperty name="@{name}-javadoc" property="@{jar}_${scala.binary.version}:java-source:javadoc" />
+ </sequential>
+ </macrodef>
+
+ <!-- Set property named @{name} to the value of the property named @{property}.
+ Helper for performing nested property expansion without using the ant props lib -->
+ <macrodef name="readProperty">
+ <attribute name="name" />
+ <attribute name="property" />
+ <sequential>
+ <property name="@{name}" value="${@{property}}" />
+ </sequential>
+ </macrodef>
+
+ <propertyForCrossedArtifact name="scala-parser-combinators" jar="org.scala-lang.modules:scala-parser-combinators"/>
+ <propertyForCrossedArtifact name="scala-xml" jar="org.scala-lang.modules:scala-xml"/>
<!-- BND support -->
<typedef resource="aQute/bnd/ant/taskdef.properties" classpathref="extra.tasks.classpath" />
@@ -391,9 +462,9 @@ TODO:
<property name="sbt.lib.dir" value="${build-sbt.dir}/${sbt.latest.version}/lib"/>
<property name="sbt.interface.jar" value="${sbt.lib.dir}/interface.jar"/>
- <property name="sbt.interface.url" value="http://typesafe.artifactoryonline.com/typesafe/ivy-releases/org.scala-sbt/interface/${sbt.latest.version}/jars/interface.jar"/>
+ <property name="sbt.interface.url" value="http://private-repo.typesafe.com/typesafe/ivy-releases/org.scala-sbt/interface/${sbt.latest.version}/jars/interface.jar"/>
<property name="sbt.interface.src.jar" value="${sbt.src.dir}/compiler-interface-src.jar"/>
- <property name="sbt.interface.src.url" value="http://typesafe.artifactoryonline.com/typesafe/ivy-releases/org.scala-sbt/compiler-interface/${sbt.latest.version}/jars/compiler-interface-src.jar"/>
+ <property name="sbt.interface.src.url" value="http://private-repo.typesafe.com/typesafe/ivy-releases/org.scala-sbt/compiler-interface/${sbt.latest.version}/jars/compiler-interface-src.jar"/>
<!-- Additional command line arguments for scalac. They are added to all build targets -->
@@ -500,7 +571,7 @@ TODO:
There must be a variable of the shape @{stage}.@{project}.build.path
for all @{stage} in locker, quick, strap
and all @{project} in library, reflect, compiler
- when stage is quick, @{project} also includes: actors, parser-combinators, xml, repl, swing, plugins, scalacheck, interactive, scaladoc, scalap
+ when stage is quick, @{project} also includes: actors, repl, swing, plugins, interactive, scaladoc, scalap
-->
<!-- LOCKER -->
@@ -541,11 +612,6 @@ TODO:
<pathelement location="${build-quick.dir}/classes/actors"/>
</path>
- <path id="quick.parser-combinators.build.path">
- <path refid="quick.library.build.path"/>
- <pathelement location="${build-quick.dir}/classes/parser-combinators"/>
- </path>
-
<path id="quick.reflect.build.path">
<path refid="quick.library.build.path"/>
<pathelement location="${build-quick.dir}/classes/reflect"/>
@@ -563,11 +629,6 @@ TODO:
<path refid="repl.deps.classpath"/>
</path>
- <path id="quick.xml.build.path">
- <path refid="quick.library.build.path"/>
- <pathelement location="${build-quick.dir}/classes/xml"/>
- </path>
-
<path id="quick.swing.build.path">
<path refid="quick.library.build.path"/>
<pathelement location="${build-quick.dir}/classes/swing"/>
@@ -600,10 +661,9 @@ TODO:
</path>
<path id="quick.scaladoc.build.path">
- <path refid="quick.xml.build.path"/>
<path refid="quick.compiler.build.path"/>
- <path refid="quick.parser-combinators.build.path"/>
<path refid="partest.classpath"/>
+ <path refid="external-modules-nocore"/>
<pathelement location="${build-quick.dir}/classes/scaladoc"/>
</path>
@@ -614,12 +674,11 @@ TODO:
</path>
<path id="quick.bin.tool.path">
- <path refid="quick.parser-combinators.build.path"/>
- <path refid="quick.xml.build.path"/>
<path refid="quick.repl.build.path"/>
<path refid="quick.actors.build.path"/>
<pathelement location="${build-quick.dir}/classes/scalap"/>
<pathelement location="${build-quick.dir}/classes/continuations-library"/>
+ <path refid="external-modules-nocore"/>
</path>
<!-- PACK -->
@@ -634,9 +693,9 @@ TODO:
<path id="pack.bin.tool.path">
<pathelement location="${build-pack.dir}/lib/scala-library.jar"/>
- <pathelement location="${build-pack.dir}/lib/scala-actors.jar"/>
- <pathelement location="${build-pack.dir}/lib/scala-parser-combinators.jar"/>
<pathelement location="${build-pack.dir}/lib/scala-xml.jar"/>
+ <pathelement location="${build-pack.dir}/lib/scala-parser-combinators.jar"/>
+ <pathelement location="${build-pack.dir}/lib/scala-actors.jar"/>
<pathelement location="${build-pack.dir}/lib/scala-reflect.jar"/>
<pathelement location="${build-pack.dir}/lib/scala-compiler.jar"/>
<pathelement location="${build-pack.dir}/lib/scalap.jar"/>
@@ -662,8 +721,6 @@ TODO:
<fileset dir="${asm-classes}"/>
</path>
- <path id="pack.parser-combinators.files"> <fileset dir="${build-quick.dir}/classes/parser-combinators"/> </path>
- <path id="pack.xml.files"> <fileset dir="${build-quick.dir}/classes/xml"/> </path>
<path id="pack.swing.files"> <fileset dir="${build-quick.dir}/classes/swing"/> </path>
<path id="pack.reflect.files"> <fileset dir="${build-quick.dir}/classes/reflect"/> </path>
<path id="pack.plugins.files"> <fileset dir="${build-quick.dir}/classes/continuations-plugin"/> </path>
@@ -693,12 +750,10 @@ TODO:
<!-- MISC -->
<path id="docs.compiler.path">
+ <path refid="external-modules-nocore"/>
<pathelement location="${build-pack.dir}/lib/scala-library.jar"/>
- <pathelement location="${build-pack.dir}/lib/scala-parser-combinators.jar"/>
- <pathelement location="${build-pack.dir}/lib/scala-xml.jar"/>
<pathelement location="${build-pack.dir}/lib/scala-reflect.jar"/>
<pathelement location="${build-pack.dir}/lib/scala-compiler.jar"/>
- <!-- <pathelement location="${build-pack.dir}/lib/scala-partest.jar"/> -->
<pathelement location="${build-pack.dir}/lib/scalap.jar"/>
<pathelement location="${build-pack.dir}/lib/scala-actors.jar"/>
<pathelement location="${ant.jar}"/>
@@ -727,36 +782,19 @@ TODO:
<pathelement location="${build-pack.dir}/lib/scala-library.jar"/>
<pathelement location="${build-pack.dir}/lib/scala-reflect.jar"/>
<pathelement location="${build-pack.dir}/lib/scala-compiler.jar"/>
- <pathelement location="${build-pack.dir}/lib/scala-actors.jar"/>
- <pathelement location="${build-pack.dir}/lib/scala-parser-combinators.jar"/>
- <pathelement location="${build-pack.dir}/lib/scala-xml.jar"/>
<!-- to test a quick build without packing, replace the above pathelements with: (may need a little tweaking)
<path refid="quick.bin.tool.path">
<path refid="quick.interactive.build.path">
-->
+ <!-- TODO: move scalap & actors out of repo -->
<pathelement location="${build-pack.dir}/lib/scalap.jar"/>
- <!-- TODO: move scalap out of repo -->
+ <pathelement location="${build-pack.dir}/lib/scala-actors.jar"/>
- <!--
- include partest and its run-time dependencies,
- but filter out the compiler we just built, as that's what we want to test!
- TODO: mark partest's compiler dependencies as provided when publishing to maven,
- so that we don't have to filter them out here...
- -->
- <restrict>
- <path refid="partest.classpath"/>
- <rsel:not><rsel:or>
- <rsel:name name="scala-library*.jar"/>
- <rsel:name name="scala-reflect*.jar"/>
- <rsel:name name="scala-compiler*.jar"/>
- <rsel:name name="scala-actors*.jar"/>
- <rsel:name name="scala-scalap*.jar"/>
- <!-- <rsel:name name="scala-parser-combinators*.jar"/>
- <rsel:name name="scala-xml*.jar"/> -->
- </rsel:or></rsel:not>
- </restrict>
+ <!-- partest dependencies, without the jars we built locally
+ TODO: figure out why scalap tests fail if we move this up-->
+ <path refid="partest-deps-nocore"/>
<!-- partest classes specific to the core compiler build -->
<pathelement location="${build-pack.dir}/lib/scala-partest-extras.jar"/>
@@ -766,16 +804,6 @@ TODO:
<fileset dir="${partest.dir}/files/lib" includes="*.jar" />
</path>
- <!-- obsolete? -->
- <!-- TODO - segregate swing tests (there can't be many) -->
- <!--
- <path id="partest.build.path">
- <path refid="pack.compiler.path"/>
- <fileset dir="${partest.dir}/files/lib" includes="*.jar" />
- <pathelement location="${pack.dir}/lib/scala-swing.jar"/>
- </path>
- -->
-
<path id="test.junit.compiler.build.path">
<pathelement location="${test.junit.classes}"/>
<path refid="quick.compiler.build.path"/>
@@ -788,7 +816,6 @@ TODO:
<pathelement location="${build-osgi.dir}/org.scala-lang.scala-reflect.jar"/>
<pathelement location="${build-osgi.dir}/org.scala-lang.scala-compiler.jar"/>
<pathelement location="${build-osgi.dir}/org.scala-lang.scala-actors.jar"/>
- <pathelement location="${build-osgi.dir}/org.scala-lang.scala-parser-combinators.jar"/>
<path refid="pax.exam.classpath"/>
<path refid="forkjoin.classpath"/>
</path>
@@ -845,7 +872,11 @@ TODO:
<target name="docs.clean"> <clean build="docs"/> <delete dir="${build.dir}/manmaker" includeemptydirs="yes" quiet="yes" failonerror="no"/> </target>
<target name="dist.clean"> <delete dir="${dists.dir}" includeemptydirs="yes" quiet="yes" failonerror="no"/> </target>
- <target name="all.clean" depends="locker.clean, docs.clean"> <clean build="sbt"/> <clean build="osgi"/> </target>
+ <target name="junit.clean"> <clean build="junit"/> </target>
+
+ <target name="all.clean" depends="locker.clean, docs.clean, junit.clean">
+ <clean build="sbt"/> <clean build="osgi"/>
+ </target>
<!-- Used by the scala-installer script -->
<target name="allallclean" depends="all.clean, dist.clean"/>
@@ -1229,9 +1260,6 @@ TODO:
<target name="quick.actors" depends="quick.lib">
<staged-build with="locker" stage="quick" project="actors"/> </target>
- <target name="quick.parser-combinators" depends="quick.lib">
- <staged-build with="locker" stage="quick" project="parser-combinators"/> </target>
-
<target name="quick.reflect" depends="quick.lib">
<staged-build with="locker" stage="quick" project="reflect"/> </target>
@@ -1244,15 +1272,12 @@ TODO:
<target name="quick.scalap" depends="quick.repl">
<staged-build with="locker" stage="quick" project="scalap"/> </target>
- <target name="quick.scaladoc" depends="quick.comp, quick.parser-combinators">
+ <target name="quick.scaladoc" depends="quick.comp">
<staged-build with="locker" stage="quick" project="scaladoc" version="scaladoc"/> </target>
<target name="quick.interactive" depends="quick.comp, quick.scaladoc">
<staged-build with="locker" stage="quick" project="interactive"/> </target>
- <target name="quick.xml" depends="quick.lib">
- <staged-build with="locker" stage="quick" project="xml"/> </target>
-
<target name="quick.swing" depends="quick.actors, quick.lib" if="has.java6">
<staged-build with="locker" stage="quick" project="swing"/> </target>
@@ -1283,7 +1308,7 @@ TODO:
</staged-uptodate>
</target>
- <target name="quick.bin" depends="quick.lib, quick.reflect, quick.comp, quick.repl, quick.scalap, quick.interactive, quick.xml, quick.parser-combinators, quick.swing, quick.plugins, quick.scaladoc">
+ <target name="quick.bin" depends="quick.lib, quick.reflect, quick.comp, quick.repl, quick.scalap, quick.interactive, quick.swing, quick.plugins, quick.scaladoc">
<staged-bin stage="quick" classpathref="quick.bin.tool.path"/>
</target>
@@ -1298,8 +1323,6 @@ TODO:
<staged-pack project="library"/></target>
<target name="pack.actors" depends="quick.lib"> <staged-pack project="actors"/> </target>
- <target name="pack.xml" depends="quick.xml"> <staged-pack project="xml"/> </target>
- <target name="pack.parser-combinators" depends="quick.parser-combinators"> <staged-pack project="parser-combinators"/> </target>
<target name="pack.swing" if="has.java6" depends="quick.swing"> <staged-pack project="swing"/> </target>
<target name="pack.reflect" depends="quick.reflect"> <staged-pack project="reflect"/> </target>
@@ -1307,7 +1330,7 @@ TODO:
<staged-pack project="compiler" manifest="${build-pack.dir}/META-INF/MANIFEST.MF">
<pre> <!-- TODO the files copied here do not influence actuality of this target (nor does the manifest) -->
<copy todir="${build-pack.dir}/lib">
- <resources refid="repl.deps.fileset"/>
+ <resources refid="repl.fileset"/>
<mapper classpathref="maven-ant-tasks.classpath" classname="org.apache.maven.artifact.ant.VersionMapper" from="${repl.deps.versions}" to="flatten"/>
</copy>
<copy todir="${build-pack.dir}/lib">
@@ -1322,9 +1345,11 @@ TODO:
<attribute name="Class-Path" value="scala-reflect.jar scala-library.jar"/>
</manifest>
</pre>
+ <!-- script api is 2.11-only so far
<jar-opts>
<service type="javax.script.ScriptEngineFactory" provider="scala.tools.nsc.interpreter.IMain$Factory"/>
</jar-opts>
+ -->
</staged-pack>
</target>
@@ -1332,7 +1357,12 @@ TODO:
<target name="pack.scalap" depends="quick.scalap"> <staged-pack project="scalap" targetjar="scalap.jar"/> </target>
- <target name="pack.bin" depends="pack.comp, pack.lib, pack.actors, pack.plugins, pack.reflect, pack.scalap, pack.xml, pack.swing, pack.parser-combinators">
+ <target name="pack.bin" depends="pack.comp, pack.lib, pack.actors, pack.plugins, pack.reflect, pack.scalap, pack.swing">
+ <copy todir="${build-pack.dir}/lib">
+ <path refid="external-modules-nocore" />
+ <mapper type="flatten" />
+ </copy>
+
<staged-bin stage="pack"/>
</target>
@@ -1371,7 +1401,7 @@ TODO:
<fileset dir="${asm-classes}"/>
</jar>
<copy todir="${build-palo.dir}/lib">
- <resources refid="repl.deps.fileset"/>
+ <resources refid="repl.fileset"/>
<mapper classpathref="maven-ant-tasks.classpath" classname="org.apache.maven.artifact.ant.VersionMapper" from="${repl.deps.versions}" to="flatten"/>
</copy>
</target>
@@ -1389,13 +1419,14 @@ TODO:
<macrodef name="make-bundle">
<attribute name="name" />
<attribute name="version" />
+ <attribute name="jar" default="${build-pack.dir}/lib/@{name}.jar" />
<sequential>
<copy file="${src.dir}/build/bnd/@{name}.bnd" tofile="${build-osgi.dir}/@{name}.bnd" overwrite="true">
<filterset>
<filter token="VERSION" value="@{version}" />
</filterset>
</copy>
- <bnd classpath="${build-pack.dir}/lib/@{name}.jar"
+ <bnd classpath="@{jar}"
eclipse="false"
failok="false"
exceptions="true"
@@ -1432,11 +1463,11 @@ TODO:
<stopwatch name="osgi.bundle.timer"/>
<make-bundle name="scala-library" version="${osgi.version.number}" />
<make-bundle name="scala-actors" version="${osgi.version.number}" />
- <make-bundle name="scala-parser-combinators" version="${osgi.version.number}" />
+ <make-bundle name="scala-parser-combinators" version="${osgi.version.number}" jar="${scala-parser-combinators}"/>
<make-bundle name="scala-reflect" version="${osgi.version.number}" />
<make-bundle name="scala-compiler" version="${osgi.version.number}" />
<make-plugin-bundle name="continuations" version="${osgi.version.number}" />
- <make-bundle name="scala-xml" version="${osgi.version.number}"/>
+ <make-bundle name="scala-xml" version="${osgi.version.number}" jar="${scala-xml}"/>
<touch file="${build-osgi.dir}/bundles.complete" verbose="no"/>
<if><isset property="has.java6"/><then>
@@ -1486,6 +1517,7 @@ TODO:
<target name="test.osgi" depends="test.osgi.comp">
<stopwatch name="test.osgi.timer"/>
<mkdir dir="${test.osgi.classes}"/>
+
<junit fork="yes" haltonfailure="yes">
<classpath refid="test.osgi.compiler.build.path"/>
<batchtest fork="yes" todir="${build-osgi.dir}">
@@ -1774,18 +1806,6 @@ TODO:
</staged-uptodate>
</target>
- <target name="docs.xml" depends="docs.start">
- <staged-docs project="xml" title="Scala XML Library" docroot="rootdoc.txt">
- <include name="**/*.scala"/>
- </staged-docs>
- </target>
-
- <target name="docs.parser-combinators" depends="docs.start">
- <staged-docs project="parser-combinators" title="Scala Parser Combinator Library" docroot="rootdoc.txt">
- <include name="**/*.scala"/>
- </staged-docs>
- </target>
-
<target name="docs.comp" depends="docs.start">
<staged-docs project="compiler" title="Scala Compiler" docroot="rootdoc.txt">
<include name="**/*.scala"/>
@@ -1841,7 +1861,7 @@ TODO:
</staged-uptodate>
</target>
- <target name="docs.done" depends="docs.comp, docs.man, docs.lib, docs.xml, docs.parser-combinators, docs.scalap, docs.continuations-plugin"/>
+ <target name="docs.done" depends="docs.comp, docs.man, docs.lib, docs.scalap, docs.continuations-plugin"/>
<!-- ===========================================================================
DISTRIBUTION
@@ -1873,8 +1893,9 @@ TODO:
</fileset>
</copy>
+ <!-- TODO -->
<copy todir="${dist.dir}/lib">
- <resources refid="repl.deps.fileset"/>
+ <resources refid="repl.fileset"/>
<mapper classpathref="maven-ant-tasks.classpath" classname="org.apache.maven.artifact.ant.VersionMapper"
from="${repl.deps.versions}" to="flatten"/>
</copy>
@@ -1882,11 +1903,11 @@ TODO:
<mkdir dir="${dist.dir}/bin"/>
<!-- TODO - Stop being inefficient and don't copy OSGi bundles overtop other jars. -->
<copy-bundle name="scala-library"/>
- <copy-bundle name="scala-reflect"/>
<copy-bundle name="scala-xml"/>
+ <copy-bundle name="scala-parser-combinators"/>
+ <copy-bundle name="scala-reflect"/>
<copy-bundle name="scala-swing"/>
<copy-bundle name="scala-actors"/>
- <copy-bundle name="scala-parser-combinators"/>
<copy-bundle name="scala-compiler"/>
<copy toDir="${dist.dir}/bin">
<fileset dir="${build-pack.dir}/bin"/>
@@ -1902,6 +1923,11 @@ TODO:
<target name="dist.doc" depends="dist.base, docs.done">
<mkdir dir="${dist.dir}/doc/scala-devel-docs"/>
+ <copy toDir="${dist.dir}/doc/scala-devel-docs">
+ <file file="${scala-xml-javadoc}"/>
+ <file file="${scala-parser-combinators-javadoc}"/>
+ </copy>
+
<copy file="${docs.dir}/LICENSE" toDir="${dist.dir}/doc"/>
<copy file="${docs.dir}/README" toDir="${dist.dir}/doc"/>
<mkdir dir="${dist.dir}/doc/scala-devel-docs/api"/>
@@ -1931,6 +1957,11 @@ TODO:
<target name="dist.src" depends="dist.base">
<mkdir dir="${dist.dir}/src"/>
+ <copy toDir="${dist.dir}/src">
+ <file file="${scala-xml-sources}"/>
+ <file file="${scala-parser-combinators-sources}"/>
+ </copy>
+
<jar whenmanifestonly="fail" destfile="${dist.dir}/src/scala-library-src.jar">
<fileset dir="${src.dir}/library"/>
<fileset dir="${src.dir}/continuations/library"/>
@@ -1943,10 +1974,8 @@ TODO:
<fileset dir="${src.dir}/interactive"/>
<fileset dir="${src.dir}/continuations/plugin"/>
</jar>
- <jar whenmanifestonly="fail" destfile="${dist.dir}/src/scala-xml-src.jar" basedir="${src.dir}/xml"/>
<jar whenmanifestonly="fail" destfile="${dist.dir}/src/scala-swing-src.jar" basedir="${src.dir}/swing"/>
<jar whenmanifestonly="fail" destfile="${dist.dir}/src/scala-reflect-src.jar" basedir="${src.dir}/reflect"/>
- <jar whenmanifestonly="fail" destfile="${dist.dir}/src/scala-parser-combinators-src.jar" basedir="${src.dir}/parser-combinators"/>
<jar whenmanifestonly="fail" destfile="${dist.dir}/src/scalap-src.jar" basedir="${src.dir}/scalap"/>
</target>
@@ -1994,10 +2023,8 @@ TODO:
<target name="starr.src" depends="starr.jars">
<jar whenmanifestonly="fail" destfile="${lib.dir}/scala-library-src.jar">
<fileset dir="${src.dir}/library"/>
- <fileset dir="${src.dir}/xml"/>
<fileset dir="${src.dir}/swing"/>
<fileset dir="${src.dir}/actors"/>
- <fileset dir="${src.dir}/parser-combinators"/>
<fileset dir="${src.dir}/forkjoin"/>
</jar>
<jar whenmanifestonly="fail" destfile="${lib.dir}/scala-reflect-src.jar" basedir="${src.dir}/reflect"/>
diff --git a/dbuild-meta.json b/dbuild-meta.json
index 3987afa395..8d3b65afd0 100644
--- a/dbuild-meta.json
+++ b/dbuild-meta.json
@@ -45,6 +45,16 @@
"extension": "jar",
"name": "scala-reflect",
"organization": "org.scala-lang"
+ },
+ {
+ "extension": "jar",
+ "name": "scala-xml",
+ "organization": "org.scala-lang.modules"
+ },
+ {
+ "extension": "jar",
+ "name": "scala-parser-combinators",
+ "organization": "org.scala-lang.modules"
}
],
"name": "scala-compiler",
@@ -90,75 +100,6 @@
"artifacts": [
{
"extension": "jar",
- "name": "scala-xml",
- "organization": "org.scala-lang"
- }
- ],
- "dependencies": [
- {
- "extension": "jar",
- "name": "scala-library",
- "organization": "org.scala-lang"
- }
- ],
- "name": "scala-xml",
- "organization": "org.scala-lang"
- },
- {
- "artifacts": [
- {
- "extension": "jar",
- "name": "scala-parser-combinators",
- "organization": "org.scala-lang"
- }
- ],
- "dependencies": [
- {
- "extension": "jar",
- "name": "scala-library",
- "organization": "org.scala-lang"
- }
- ],
- "name": "scala-parser-combinators",
- "organization": "org.scala-lang"
- },
- {
- "artifacts": [
- {
- "extension": "jar",
- "name": "scaladoc",
- "organization": "org.scala-lang"
- }
- ],
- "dependencies": [
- {
- "extension": "jar",
- "name": "scala-compiler",
- "organization": "org.scala-lang"
- },
- {
- "extension": "jar",
- "name": "scala-partest",
- "organization": "org.scala-lang"
- },
- {
- "extension": "jar",
- "name": "scala-xml",
- "organization": "org.scala-lang"
- },
- {
- "extension": "jar",
- "name": "scala-parser-combinators",
- "organization": "org.scala-lang"
- }
- ],
- "name": "scaladoc",
- "organization": "org.scala-lang"
- },
- {
- "artifacts": [
- {
- "extension": "jar",
"name": "scalap",
"organization": "org.scala-lang"
}
diff --git a/docs/LICENSE b/docs/LICENSE
index a9c254ed49..446a1a350b 100644
--- a/docs/LICENSE
+++ b/docs/LICENSE
@@ -1,35 +1,27 @@
-SCALA LICENSE
+Copyright (c) 2002-2013 EPFL
-Copyright (c) 2002-2013 EPFL, Lausanne, unless otherwise specified.
All rights reserved.
-This software was developed by the Programming Methods Laboratory of the
-Swiss Federal Institute of Technology (EPFL), Lausanne, Switzerland.
-
-Permission to use, copy, modify, and distribute this software in source
-or binary form for any purpose with or without fee is hereby granted,
-provided that the following conditions are met:
-
- 1. Redistributions of source code must retain the above copyright
- notice, this list of conditions and the following disclaimer.
-
- 2. Redistributions in binary form must reproduce the above copyright
- notice, this list of conditions and the following disclaimer in the
- documentation and/or other materials provided with the distribution.
-
- 3. Neither the name of the EPFL nor the names of its contributors
- may be used to endorse or promote products derived from this
- software without specific prior written permission.
-
-
-THIS SOFTWARE IS PROVIDED BY THE REGENTS AND CONTRIBUTORS ``AS IS'' AND
-ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
-IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
-ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE
-FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
-DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
-SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
-CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
-LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
-OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
-SUCH DAMAGE.
+Redistribution and use in source and binary forms, with or without modification,
+are permitted provided that the following conditions are met:
+
+ * Redistributions of source code must retain the above copyright notice,
+ this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above copyright notice,
+ this list of conditions and the following disclaimer in the documentation
+ and/or other materials provided with the distribution.
+ * Neither the name of the EPFL nor the names of its contributors
+ may be used to endorse or promote products derived from this software
+ without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
+CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/src/build/dbuild-meta-json-gen.scala b/src/build/dbuild-meta-json-gen.scala
index 73eee8ac3a..ee1976ffa4 100644
--- a/src/build/dbuild-meta-json-gen.scala
+++ b/src/build/dbuild-meta-json-gen.scala
@@ -16,7 +16,11 @@ val meta =
Seq(ProjectRef("scala-library", "org.scala-lang"))),
Project("scala-compiler", "org.scala-lang",
Seq(ProjectRef("scala-compiler", "org.scala-lang")),
- Seq(ProjectRef("scala-reflect", "org.scala-lang"))), // asm
+ Seq(ProjectRef("scala-reflect", "org.scala-lang"),
+ ProjectRef("scala-xml", "org.scala-lang.modules"),
+ ProjectRef("scala-parser-combinators", "org.scala-lang.modules")
+ // asm
+ )),
// Project("scala-repl", "org.scala-lang",
// Seq(ProjectRef("scala-repl", "org.scala-lang")),
@@ -33,16 +37,10 @@ val meta =
Project("scala-actors", "org.scala-lang",
Seq(ProjectRef("scala-actors", "org.scala-lang")),
Seq(ProjectRef("scala-library", "org.scala-lang"))),
- Project("scala-xml", "org.scala-lang",
- Seq(ProjectRef("scala-xml", "org.scala-lang")),
- Seq(ProjectRef("scala-library", "org.scala-lang"))),
- Project("scala-parser-combinators", "org.scala-lang",
- Seq(ProjectRef("scala-parser-combinators", "org.scala-lang")),
- Seq(ProjectRef("scala-library", "org.scala-lang"))),
- Project("scaladoc", "org.scala-lang",
- Seq(ProjectRef("scaladoc", "org.scala-lang")),
- Seq(ProjectRef("scala-compiler", "org.scala-lang"),ProjectRef("scala-partest", "org.scala-lang"), ProjectRef("scala-xml", "org.scala-lang"), ProjectRef("scala-parser-combinators", "org.scala-lang"))),
+ // Project("scaladoc", "org.scala-lang",
+ // Seq(ProjectRef("scaladoc", "org.scala-lang")),
+ // Seq(ProjectRef("scala-compiler", "org.scala-lang"),ProjectRef("scala-partest", "org.scala-lang"), ProjectRef("scala-xml", "org.scala-lang"), ProjectRef("scala-parser-combinators", "org.scala-lang"))),
Project("scalap", "org.scala-lang",
Seq(ProjectRef("scalap", "org.scala-lang")),
diff --git a/src/build/maven/maven-deploy.xml b/src/build/maven/maven-deploy.xml
index f52a7888ce..946b712b6c 100644
--- a/src/build/maven/maven-deploy.xml
+++ b/src/build/maven/maven-deploy.xml
@@ -55,6 +55,9 @@
<copy file="${path}-pom.xml" tofile="${path}-pom-filtered.xml" overwrite="true">
<filterset>
<filter token="VERSION" value="@{version}" />
+ <filter token="SCALA_BINARY_VERSION" value="${scala.binary.version}" />
+ <filter token="XML_VERSION" value="${scala-xml.version.number}" />
+ <filter token="PARSER_COMBINATORS_VERSION" value="${scala-parser-combinators.version.number}" />
<filter token="RELEASE_REPOSITORY" value="${remote.release.repository}" />
<filter token="SNAPSHOT_REPOSITORY" value="${remote.snapshot.repository}" />
<filter token="JLINE_VERSION" value="${jline.version}" />
@@ -108,8 +111,6 @@
<deploy-one name="scala-actors" version="${maven.version.number}" local="@{local}" signed="@{signed}"/>
<deploy-one name="scala-compiler" version="${maven.version.number}" local="@{local}" signed="@{signed}"/>
<deploy-one name="scala-library" version="${maven.version.number}" local="@{local}" signed="@{signed}"/>
- <deploy-one name="scala-xml" version="${maven.version.number}" local="@{local}" signed="@{signed}"/>
- <deploy-one name="scala-parser-combinators" version="${maven.version.number}" local="@{local}" signed="@{signed}"/>
<deploy-one name="scala-reflect" version="${maven.version.number}" local="@{local}" signed="@{signed}"/>
<deploy-one name="scala-swing" version="${maven.version.number}" local="@{local}" signed="@{signed}"/>
<deploy-one name="scalap" version="${maven.version.number}" local="@{local}" signed="@{signed}"/>
diff --git a/src/build/maven/scala-compiler-pom.xml b/src/build/maven/scala-compiler-pom.xml
index 6e7f1a0f2c..8cc42c22ae 100644
--- a/src/build/maven/scala-compiler-pom.xml
+++ b/src/build/maven/scala-compiler-pom.xml
@@ -38,14 +38,14 @@
<version>@VERSION@</version>
</dependency>
<dependency> <!-- for scaladoc -->
- <groupId>org.scala-lang</groupId>
- <artifactId>scala-xml</artifactId>
- <version>@VERSION@</version>
+ <groupId>org.scala-lang.modules</groupId>
+ <artifactId>scala-xml_@SCALA_BINARY_VERSION@</artifactId>
+ <version>@XML_VERSION@</version>
</dependency>
<dependency> <!-- for scaladoc -->
- <groupId>org.scala-lang</groupId>
- <artifactId>scala-parser-combinators</artifactId>
- <version>@VERSION@</version>
+ <groupId>org.scala-lang.modules</groupId>
+ <artifactId>scala-parser-combinators_@SCALA_BINARY_VERSION@</artifactId>
+ <version>@PARSER_COMBINATORS_VERSION@</version>
</dependency>
<dependency>
<groupId>org.scala-lang</groupId>
diff --git a/src/build/maven/scala-parser-combinators-pom.xml b/src/build/maven/scala-parser-combinators-pom.xml
deleted file mode 100644
index cddff269c8..0000000000
--- a/src/build/maven/scala-parser-combinators-pom.xml
+++ /dev/null
@@ -1,59 +0,0 @@
-<project
- xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
- xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
- <modelVersion>4.0.0</modelVersion>
- <groupId>org.scala-lang</groupId>
- <artifactId>scala-parser-combinators</artifactId>
- <packaging>jar</packaging>
- <version>@VERSION@</version>
- <name>Scala Parser Combinators</name>
- <description>Parser Combinator Library for the Scala Programming Language</description>
- <url>http://www.scala-lang.org/</url>
- <inceptionYear>2002</inceptionYear>
- <organization>
- <name>LAMP/EPFL</name>
- <url>http://lamp.epfl.ch/</url>
- </organization>
- <licenses>
- <license>
- <name>BSD-like</name>
- <url>http://www.scala-lang.org/downloads/license.html
- </url>
- <distribution>repo</distribution>
- </license>
- </licenses>
- <scm>
- <connection>scm:git:git://github.com/scala/scala.git</connection>
- <url>https://github.com/scala/scala.git</url>
- </scm>
- <issueManagement>
- <system>JIRA</system>
- <url>https://issues.scala-lang.org/</url>
- </issueManagement>
- <properties>
- <info.apiURL>http://www.scala-lang.org/api/@VERSION@/</info.apiURL>
- </properties>
- <dependencies>
- </dependencies>
- <distributionManagement>
- <repository>
- <id>scala-tools.org</id>
- <url>@RELEASE_REPOSITORY@</url>
- </repository>
- <snapshotRepository>
- <id>scala-tools.org</id>
- <url>@SNAPSHOT_REPOSITORY@</url>
- <uniqueVersion>false</uniqueVersion>
- </snapshotRepository>
- </distributionManagement>
- <developers>
- <developer>
- <id>lamp</id>
- <name>EPFL LAMP</name>
- </developer>
- <developer>
- <id>Typesafe</id>
- <name>Typesafe, Inc.</name>
- </developer>
- </developers>
-</project>
diff --git a/src/build/maven/scala-xml-pom.xml b/src/build/maven/scala-xml-pom.xml
deleted file mode 100644
index 629872c2e2..0000000000
--- a/src/build/maven/scala-xml-pom.xml
+++ /dev/null
@@ -1,59 +0,0 @@
-<project
- xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
- xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
- <modelVersion>4.0.0</modelVersion>
- <groupId>org.scala-lang</groupId>
- <artifactId>scala-xml</artifactId>
- <packaging>jar</packaging>
- <version>@VERSION@</version>
- <name>Scala XML</name>
- <description>XML Library for the Scala Programming Language</description>
- <url>http://www.scala-lang.org/</url>
- <inceptionYear>2002</inceptionYear>
- <organization>
- <name>LAMP/EPFL</name>
- <url>http://lamp.epfl.ch/</url>
- </organization>
- <licenses>
- <license>
- <name>BSD-like</name>
- <url>http://www.scala-lang.org/downloads/license.html
- </url>
- <distribution>repo</distribution>
- </license>
- </licenses>
- <scm>
- <connection>scm:git:git://github.com/scala/scala.git</connection>
- <url>https://github.com/scala/scala.git</url>
- </scm>
- <issueManagement>
- <system>JIRA</system>
- <url>https://issues.scala-lang.org/</url>
- </issueManagement>
- <properties>
- <info.apiURL>http://www.scala-lang.org/api/@VERSION@/</info.apiURL>
- </properties>
- <dependencies>
- </dependencies>
- <distributionManagement>
- <repository>
- <id>scala-tools.org</id>
- <url>@RELEASE_REPOSITORY@</url>
- </repository>
- <snapshotRepository>
- <id>scala-tools.org</id>
- <url>@SNAPSHOT_REPOSITORY@</url>
- <uniqueVersion>false</uniqueVersion>
- </snapshotRepository>
- </distributionManagement>
- <developers>
- <developer>
- <id>lamp</id>
- <name>EPFL LAMP</name>
- </developer>
- <developer>
- <id>Typesafe</id>
- <name>Typesafe, Inc.</name>
- </developer>
- </developers>
-</project>
diff --git a/src/build/pack.xml b/src/build/pack.xml
index fa030300ac..8e2d2f19fa 100644
--- a/src/build/pack.xml
+++ b/src/build/pack.xml
@@ -10,9 +10,12 @@
PROPERTIES
============================================================================ -->
- <property file="${basedir}/build.number.maven"/>
<!-- the maven stuff requires version.major, version.minor and version.patch properties.
the "get-scala-revision" script only returns "version.number" -->
+ <property file="${basedir}/build.number.maven"/>
+ <!-- also need to know scala binary version and versions for xml and parsers -->
+ <property file="${basedir}/versions.properties"/>
+
<!-- ===========================================================================
MAIN DISTRIBUTION PACKAGING
@@ -152,8 +155,6 @@ MAIN DISTRIBUTION PACKAGING
</sequential>
</macrodef>
<mvn-copy-lib mvn.artifact.name="scala-library"/>
- <mvn-copy-lib mvn.artifact.name="scala-xml"/>
- <mvn-copy-lib mvn.artifact.name="scala-parser-combinators"/>
<mvn-copy-lib mvn.artifact.name="scala-reflect"/>
<mvn-copy-lib mvn.artifact.name="scala-compiler"/>
<mvn-copy-lib mvn.artifact.name="scala-swing"/>
@@ -193,14 +194,6 @@ MAIN DISTRIBUTION PACKAGING
basedir="${build-docs.dir}/library">
<include name="**/*"/>
</jar>
- <jar whenmanifestonly="fail" destfile="${dists.dir}/maven/${version.number}/scala-xml/scala-xml-docs.jar"
- basedir="${build-docs.dir}/xml">
- <include name="**/*"/>
- </jar>
- <jar whenmanifestonly="fail" destfile="${dists.dir}/maven/${version.number}/scala-parser-combinators/scala-parser-combinators-docs.jar"
- basedir="${build-docs.dir}/parser-combinators">
- <include name="**/*"/>
- </jar>
<jar whenmanifestonly="fail" destfile="${dists.dir}/maven/${version.number}/scala-compiler/scala-compiler-docs.jar"
basedir="${build-docs.dir}/compiler">
<include name="**/*"/>
diff --git a/src/compiler/scala/reflect/reify/codegen/GenSymbols.scala b/src/compiler/scala/reflect/reify/codegen/GenSymbols.scala
index 90fb41f80b..3a97089d51 100644
--- a/src/compiler/scala/reflect/reify/codegen/GenSymbols.scala
+++ b/src/compiler/scala/reflect/reify/codegen/GenSymbols.scala
@@ -131,9 +131,9 @@ trait GenSymbols {
if (sym.isCapturedVariable) {
assert(binding.isInstanceOf[Ident], showRaw(binding))
val capturedBinding = referenceCapturedVariable(sym)
- Reification(name, capturedBinding, mirrorBuildCall(nme.newFreeTerm, reify(sym.name.toString), capturedBinding, mirrorBuildCall(nme.flagsFromBits, reify(sym.flags)), reify(origin(sym))))
+ Reification(name, capturedBinding, mirrorBuildCall(nme.newFreeTerm, reify(sym.name.toString), capturedBinding, mirrorBuildCall(nme.FlagsRepr, reify(sym.flags)), reify(origin(sym))))
} else {
- Reification(name, binding, mirrorBuildCall(nme.newFreeTerm, reify(sym.name.toString), binding, mirrorBuildCall(nme.flagsFromBits, reify(sym.flags)), reify(origin(sym))))
+ Reification(name, binding, mirrorBuildCall(nme.newFreeTerm, reify(sym.name.toString), binding, mirrorBuildCall(nme.FlagsRepr, reify(sym.flags)), reify(origin(sym))))
}
}
@@ -142,7 +142,7 @@ trait GenSymbols {
if (reifyDebug) println("Free type: %s (%s)".format(sym, sym.accurateKindString))
state.reificationIsConcrete = false
val name: TermName = nme.REIFY_FREE_PREFIX append sym.name
- Reification(name, binding, mirrorBuildCall(nme.newFreeType, reify(sym.name.toString), mirrorBuildCall(nme.flagsFromBits, reify(sym.flags)), reify(origin(sym))))
+ Reification(name, binding, mirrorBuildCall(nme.newFreeType, reify(sym.name.toString), mirrorBuildCall(nme.FlagsRepr, reify(sym.flags)), reify(origin(sym))))
}
def reifySymDef(sym: Symbol): Tree =
@@ -150,7 +150,7 @@ trait GenSymbols {
if (reifyDebug) println("Sym def: %s (%s)".format(sym, sym.accurateKindString))
val name: TermName = nme.REIFY_SYMDEF_PREFIX append sym.name
def reifiedOwner = if (sym.owner.isLocatable) reify(sym.owner) else reifySymDef(sym.owner)
- Reification(name, Ident(sym), mirrorBuildCall(nme.newNestedSymbol, reifiedOwner, reify(sym.name), reify(sym.pos), mirrorBuildCall(nme.flagsFromBits, reify(sym.flags)), reify(sym.isClass)))
+ Reification(name, Ident(sym), mirrorBuildCall(nme.newNestedSymbol, reifiedOwner, reify(sym.name), reify(sym.pos), mirrorBuildCall(nme.FlagsRepr, reify(sym.flags)), reify(sym.isClass)))
}
case class Reification(name: Name, binding: Tree, tree: Tree)
diff --git a/src/compiler/scala/reflect/reify/codegen/GenTrees.scala b/src/compiler/scala/reflect/reify/codegen/GenTrees.scala
index 3507c2a173..9de8451873 100644
--- a/src/compiler/scala/reflect/reify/codegen/GenTrees.scala
+++ b/src/compiler/scala/reflect/reify/codegen/GenTrees.scala
@@ -83,8 +83,12 @@ trait GenTrees {
reifyProduct(tree)
}
+ def reifyFlags(flags: FlagSet) =
+ if (flags != 0) reifyBuildCall(nme.FlagsRepr, flags) else mirrorSelect(nme.NoFlags)
+
def reifyModifiers(m: global.Modifiers) =
- mirrorFactoryCall(nme.Modifiers, mirrorBuildCall(nme.flagsFromBits, reify(m.flags)), reify(m.privateWithin), reify(m.annotations))
+ if (m == NoMods) mirrorSelect(nme.NoMods)
+ else mirrorFactoryCall(nme.Modifiers, reifyFlags(m.flags), reify(m.privateWithin), reify(m.annotations))
private def spliceTree(tree: Tree): Tree = {
tree match {
diff --git a/src/compiler/scala/reflect/reify/utils/Extractors.scala b/src/compiler/scala/reflect/reify/utils/Extractors.scala
index d5f27dc119..9af8f2de2a 100644
--- a/src/compiler/scala/reflect/reify/utils/Extractors.scala
+++ b/src/compiler/scala/reflect/reify/utils/Extractors.scala
@@ -164,6 +164,16 @@ trait Extractors {
}
}
+ // abstract over possible additional .apply select
+ // which is sometimes inserted after desugaring of calls
+ object ApplyCall {
+ def unapply(tree: Tree): Option[(Tree, List[Tree])] = tree match {
+ case Apply(Select(id, nme.apply), args) => Some((id, args))
+ case Apply(id, args) => Some((id, args))
+ case _ => None
+ }
+ }
+
sealed abstract class FreeDefExtractor(acceptTerms: Boolean, acceptTypes: Boolean) {
def unapply(tree: Tree): Option[(Tree, TermName, Tree, Long, String)] = {
def acceptFreeTermFactory(name: Name) = {
@@ -175,10 +185,10 @@ trait Extractors {
ValDef(_, name, _, Apply(
Select(Select(uref1 @ Ident(_), build1), freeTermFactory),
_ :+
- Apply(Select(Select(uref2 @ Ident(_), build2), flagsFromBits), List(Literal(Constant(flags: Long)))) :+
+ ApplyCall(Select(Select(uref2 @ Ident(_), build2), flagsRepr), List(Literal(Constant(flags: Long)))) :+
Literal(Constant(origin: String))))
if uref1.name == nme.UNIVERSE_SHORT && build1 == nme.build && acceptFreeTermFactory(freeTermFactory) &&
- uref2.name == nme.UNIVERSE_SHORT && build2 == nme.build && flagsFromBits == nme.flagsFromBits =>
+ uref2.name == nme.UNIVERSE_SHORT && build2 == nme.build && flagsRepr == nme.FlagsRepr =>
Some((uref1, name, reifyBinding(tree), flags, origin))
case _ =>
None
@@ -208,10 +218,10 @@ trait Extractors {
_,
_,
_,
- Apply(Select(Select(uref2 @ Ident(_), build2), flagsFromBits), List(Literal(Constant(flags: Long)))),
+ ApplyCall(Select(Select(uref2 @ Ident(_), build2), flagsRepr), List(Literal(Constant(flags: Long)))),
Literal(Constant(isClass: Boolean)))))
if uref1.name == nme.UNIVERSE_SHORT && build1 == nme.build && newNestedSymbol == nme.newNestedSymbol &&
- uref2.name == nme.UNIVERSE_SHORT && build2 == nme.build && flagsFromBits == nme.flagsFromBits =>
+ uref2.name == nme.UNIVERSE_SHORT && build2 == nme.build && flagsRepr == nme.FlagsRepr =>
Some((uref1, name, flags, isClass))
case _ =>
None
diff --git a/src/compiler/scala/reflect/reify/utils/NodePrinters.scala b/src/compiler/scala/reflect/reify/utils/NodePrinters.scala
index 0903bc481c..e37b861461 100644
--- a/src/compiler/scala/reflect/reify/utils/NodePrinters.scala
+++ b/src/compiler/scala/reflect/reify/utils/NodePrinters.scala
@@ -32,7 +32,7 @@ trait NodePrinters {
s = "List\\[List\\[.*?\\].*?\\]".r.replaceAllIn(s, "List")
s = "List\\[.*?\\]".r.replaceAllIn(s, "List")
s = s.replace("immutable.this.Nil", "List()")
- s = """build\.flagsFromBits\((\d+)[lL]\)""".r.replaceAllIn(s, m => {
+ s = """build\.FlagsRepr\((\d+)[lL]\)""".r.replaceAllIn(s, m => {
flagsAreUsed = true
show(m.group(1).toLong)
})
diff --git a/src/compiler/scala/tools/ant/templates/tool-windows.tmpl b/src/compiler/scala/tools/ant/templates/tool-windows.tmpl
index a3a95ffd37..1288eb0b7c 100644
--- a/src/compiler/scala/tools/ant/templates/tool-windows.tmpl
+++ b/src/compiler/scala/tools/ant/templates/tool-windows.tmpl
@@ -22,13 +22,74 @@ if "%~1"=="-toolcp" (
goto another_param
)
-set _LINE_PARAMS=%1
+rem We keep in _JAVA_PARAMS all -J-prefixed and -D-prefixed arguments
+set _JAVA_PARAMS=
+
+if [%1]==[] goto param_afterloop
+set _TEST_PARAM=%~1
+if not "%_TEST_PARAM:~0,1%"=="-" goto param_afterloop
+
+rem ignore -e "scala code"
+if "%_TEST_PARAM:~0,2%"=="-e" (
+ shift
+ shift
+ if [%1]==[] goto param_afterloop
+)
+
+set _TEST_PARAM=%~1
+if "%_TEST_PARAM:~0,2%"=="-J" (
+ set _JAVA_PARAMS=%_TEST_PARAM:~2%
+)
+
+if "%_TEST_PARAM:~0,2%"=="-D" (
+ rem test if this was double-quoted property "-Dprop=42"
+ for /F "delims== tokens=1-2" %%G in ("%_TEST_PARAM%") DO (
+ if not "%%G" == "%_TEST_PARAM%" (
+ rem double quoted: "-Dprop=42" -> -Dprop="42"
+ set _JAVA_PARAMS=%%G="%%H"
+ ) else if [%2] neq [] (
+ rem it was a normal property: -Dprop=42 or -Drop="42"
+ set _JAVA_PARAMS=%_TEST_PARAM%=%2
+ shift
+ )
+ )
+)
+
:param_loop
shift
+
if [%1]==[] goto param_afterloop
-set _LINE_PARAMS=%_LINE_PARAMS% %1
+set _TEST_PARAM=%~1
+if not "%_TEST_PARAM:~0,1%"=="-" goto param_afterloop
+
+rem ignore -e "scala code"
+if "%_TEST_PARAM:~0,2%"=="-e" (
+ shift
+ shift
+ if [%1]==[] goto param_afterloop
+)
+
+set _TEST_PARAM=%~1
+if "%_TEST_PARAM:~0,2%"=="-J" (
+ set _JAVA_PARAMS=%_JAVA_PARAMS% %_TEST_PARAM:~2%
+)
+
+if "%_TEST_PARAM:~0,2%"=="-D" (
+ rem test if this was double-quoted property "-Dprop=42"
+ for /F "delims== tokens=1-2" %%G in ("%_TEST_PARAM%") DO (
+ if not "%%G" == "%_TEST_PARAM%" (
+ rem double quoted: "-Dprop=42" -> -Dprop="42"
+ set _JAVA_PARAMS=%_JAVA_PARAMS% %%G="%%H"
+ ) else if [%2] neq [] (
+ rem it was a normal property: -Dprop=42 or -Drop="42"
+ set _JAVA_PARAMS=%_JAVA_PARAMS% %_TEST_PARAM%=%2
+ shift
+ )
+ )
+)
goto param_loop
:param_afterloop
+
if "%OS%" NEQ "Windows_NT" (
echo "Warning, your version of Windows is not supported. Attempting to start scala anyway."
)
@@ -51,6 +112,9 @@ rem We use the value of the JAVA_OPTS environment variable if defined
set _JAVA_OPTS=%JAVA_OPTS%
if not defined _JAVA_OPTS set _JAVA_OPTS=@javaflags@
+rem We append _JAVA_PARAMS java arguments to JAVA_OPTS if necessary
+if defined _JAVA_PARAMS set _JAVA_OPTS=%_JAVA_OPTS% %_JAVA_PARAMS%
+
set _TOOL_CLASSPATH=@classpath@
if "%_TOOL_CLASSPATH%"=="" (
for %%f in ("!_SCALA_HOME!\lib\*") do call :add_cpath "%%f"
diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala
index 3f2d759a6d..0e3b2993c7 100644
--- a/src/compiler/scala/tools/nsc/Global.scala
+++ b/src/compiler/scala/tools/nsc/Global.scala
@@ -352,9 +352,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
// Here comes another one...
override protected val enableTypeVarExperimentals = settings.Xexperimental.value
- def getSourceFile(f: AbstractFile): BatchSourceFile =
- if (settings.script.isSetByUser) ScriptSourceFile(f, reader read f)
- else new BatchSourceFile(f, reader read f)
+ def getSourceFile(f: AbstractFile): BatchSourceFile = new BatchSourceFile(f, reader read f)
def getSourceFile(name: String): SourceFile = {
val f = AbstractFile.getFile(name)
@@ -1490,20 +1488,23 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
}
}
- /** Compile list of source files */
- def compileSources(sources: List[SourceFile]) {
- // there is a problem already, e.g. a plugin was passed a bad option
- if (reporter.hasErrors)
- return
+ /** Compile list of source files,
+ * unless there is a problem already,
+ * such as a plugin was passed a bad option.
+ */
+ def compileSources(sources: List[SourceFile]) = if (!reporter.hasErrors) {
- // nothing to compile, but we should still report use of deprecated options
- if (sources.isEmpty) {
+ def checkDeprecations() = {
checkDeprecatedSettings(newCompilationUnit(""))
reportCompileErrors()
- return
}
- compileUnits(sources map (new CompilationUnit(_)), firstPhase)
+ val units = sources map scripted map (new CompilationUnit(_))
+
+ units match {
+ case Nil => checkDeprecations() // nothing to compile, report deprecated options
+ case _ => compileUnits(units, firstPhase)
+ }
}
def compileUnits(units: List[CompilationUnit], fromPhase: Phase): Unit =
@@ -1605,12 +1606,18 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
catch { case ex: IOException => globalError(ex.getMessage()) }
}
+ /** If this compilation is scripted, convert the source to a script source. */
+ private def scripted(s: SourceFile) = s match {
+ case b: BatchSourceFile if settings.script.isSetByUser => ScriptSourceFile(b)
+ case _ => s
+ }
+
/** Compile abstract file until `globalPhase`, but at least
* to phase "namer".
*/
def compileLate(file: AbstractFile) {
if (!compiledFiles(file.path))
- compileLate(new CompilationUnit(getSourceFile(file)))
+ compileLate(new CompilationUnit(scripted(getSourceFile(file))))
}
/** Compile abstract file until `globalPhase`, but at least to phase "namer".
diff --git a/src/compiler/scala/tools/nsc/ast/TreeGen.scala b/src/compiler/scala/tools/nsc/ast/TreeGen.scala
index ad1977b9aa..7122e864a4 100644
--- a/src/compiler/scala/tools/nsc/ast/TreeGen.scala
+++ b/src/compiler/scala/tools/nsc/ast/TreeGen.scala
@@ -264,42 +264,6 @@ abstract class TreeGen extends scala.reflect.internal.TreeGen with TreeDSL {
mkNew(Nil, emptyValDef, stats1, NoPosition, NoPosition)
}
- /** Create positioned tree representing an object creation <new parents { stats }
- * @param npos the position of the new
- * @param cpos the position of the anonymous class starting with parents
- */
- def mkNew(parents: List[Tree], self: ValDef, stats: List[Tree],
- npos: Position, cpos: Position): Tree =
- if (parents.isEmpty)
- mkNew(List(scalaAnyRefConstr), self, stats, npos, cpos)
- else if (parents.tail.isEmpty && stats.isEmpty) {
- // `Parsers.template` no longer differentiates tpts and their argss
- // e.g. `C()` will be represented as a single tree Apply(Ident(C), Nil)
- // instead of parents = Ident(C), argss = Nil as before
- // this change works great for things that are actually templates
- // but in this degenerate case we need to perform postprocessing
- val app = treeInfo.dissectApplied(parents.head)
- atPos(npos union cpos) { New(app.callee, app.argss) }
- } else {
- val x = tpnme.ANON_CLASS_NAME
- atPos(npos union cpos) {
- Block(
- List(
- atPos(cpos) {
- ClassDef(
- Modifiers(FINAL), x, Nil,
- mkTemplate(parents, self, NoMods, ListOfNil, stats, cpos.focus))
- }),
- atPos(npos) {
- New(
- Ident(x) setPos npos.focus,
- Nil)
- }
- )
- }
- }
-
def mkSyntheticParam(pname: TermName) =
ValDef(Modifiers(PARAM | SYNTHETIC), pname, TypeTree(), EmptyTree)
-
}
diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
index 94270e4cf3..52aa11cb40 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
@@ -1445,7 +1445,7 @@ self =>
// The case still missed is unparenthesized single argument, like "x: Int => x + 1", which
// may be impossible to distinguish from a self-type and so remains an error. (See #1564)
def lhsIsTypedParamList() = t match {
- case Parens(xs) if xs forall (_.isInstanceOf[Typed]) => true
+ case Parens(xs) if xs.forall(isTypedParam) => true
case _ => false
}
if (in.token == ARROW && (location != InTemplate || lhsIsTypedParamList)) {
@@ -1458,6 +1458,8 @@ self =>
parseOther
}
+ def isTypedParam(t: Tree) = t.isInstanceOf[Typed]
+
/** {{{
* Expr ::= implicit Id => Expr
* }}}
@@ -2704,8 +2706,7 @@ self =>
syntaxError("classes are not allowed to be virtual", skipIt = false)
}
val template = templateOpt(mods1, name, constrMods withAnnotations constrAnnots, vparamss, tstart)
- if (isInterface(mods1, template.body)) mods1 |= Flags.INTERFACE
- val result = ClassDef(mods1, name, tparams, template)
+ val result = gen.mkClassDef(mods1, name, tparams, template)
// Context bounds generate implicit parameters (part of the template) with types
// from tparams: we need to ensure these don't overlap
if (!classContextBounds.isEmpty)
@@ -2796,16 +2797,7 @@ self =>
// @S: pre template body cannot stub like post body can!
val (self, body) = templateBody(isPre = true)
if (in.token == WITH && (self eq emptyValDef)) {
- val earlyDefs: List[Tree] = body flatMap {
- case vdef @ ValDef(mods, _, _, _) if !mods.isDeferred =>
- List(copyValDef(vdef)(mods = mods | Flags.PRESUPER))
- case tdef @ TypeDef(mods, name, tparams, rhs) =>
- List(treeCopy.TypeDef(tdef, mods | Flags.PRESUPER, name, tparams, rhs))
- case stat if !stat.isEmpty =>
- syntaxError(stat.pos, "only type definitions and concrete field definitions allowed in early object initialization section", skipIt = false)
- List()
- case _ => List()
- }
+ val earlyDefs: List[Tree] = body.map(ensureEarlyDef).filter(_.nonEmpty)
in.nextToken()
val parents = templateParents()
val (self1, body1) = templateBodyOpt(parenMeansSyntaxError = false)
@@ -2820,8 +2812,18 @@ self =>
}
}
- def isInterface(mods: Modifiers, body: List[Tree]): Boolean =
- mods.isTrait && (body forall treeInfo.isInterfaceMember)
+ def ensureEarlyDef(tree: Tree): Tree = tree match {
+ case vdef @ ValDef(mods, _, _, _) if !mods.isDeferred =>
+ copyValDef(vdef)(mods = mods | Flags.PRESUPER)
+ case tdef @ TypeDef(mods, name, tparams, rhs) =>
+ deprecationWarning(tdef.pos.point, "early type members are deprecated. Move them to the regular body: the semantics are the same.")
+ treeCopy.TypeDef(tdef, mods | Flags.PRESUPER, name, tparams, rhs)
+ case stat if !stat.isEmpty =>
+ syntaxError(stat.pos, "only concrete field definitions allowed in early object initialization section", skipIt = false)
+ EmptyTree
+ case _ =>
+ EmptyTree
+ }
/** {{{
* ClassTemplateOpt ::= `extends' ClassTemplate | [[`extends'] TemplateBody]
@@ -2830,7 +2832,7 @@ self =>
* }}}
*/
def templateOpt(mods: Modifiers, name: Name, constrMods: Modifiers, vparamss: List[List[ValDef]], tstart: Int): Template = {
- val (parents0, self, body) = (
+ val (parents, self, body) = (
if (in.token == EXTENDS || in.token == SUBTYPE && mods.isTrait) {
in.nextToken()
template()
@@ -2841,26 +2843,21 @@ self =>
(List(), self, body)
}
)
- def anyrefParents() = {
- val caseParents = if (mods.isCase) List(productConstr, serializableConstr) else Nil
- parents0 ::: caseParents match {
- case Nil => atInPos(scalaAnyRefConstr) :: Nil
- case ps => ps
- }
- }
def anyvalConstructor() = (
// Not a well-formed constructor, has to be finished later - see note
// regarding AnyVal constructor in AddInterfaces.
DefDef(NoMods, nme.CONSTRUCTOR, Nil, ListOfNil, TypeTree(), Block(Nil, literalUnit))
)
- val tstart0 = if (body.isEmpty && in.lastOffset < tstart) in.lastOffset else tstart
+ val parentPos = o2p(in.offset)
+ val tstart1 = if (body.isEmpty && in.lastOffset < tstart) in.lastOffset else tstart
- atPos(tstart0) {
+ atPos(tstart1) {
// Exclude only the 9 primitives plus AnyVal.
if (inScalaRootPackage && ScalaValueClassNames.contains(name))
- Template(parents0, self, anyvalConstructor :: body)
+ Template(parents, self, anyvalConstructor :: body)
else
- gen.mkTemplate(anyrefParents(), self, constrMods, vparamss, body, o2p(tstart))
+ gen.mkTemplate(gen.mkParents(mods, parents, parentPos),
+ self, constrMods, vparamss, body, o2p(tstart))
}
}
@@ -3011,19 +3008,23 @@ self =>
def refineStatSeq(): List[Tree] = checkNoEscapingPlaceholders {
val stats = new ListBuffer[Tree]
while (!isStatSeqEnd) {
- if (isDclIntro) { // don't IDE hook
- stats ++= joinComment(defOrDcl(in.offset, NoMods))
- } else if (!isStatSep) {
- syntaxErrorOrIncomplete(
- "illegal start of declaration"+
- (if (inFunReturnType) " (possible cause: missing `=' in front of current method body)"
- else ""), skipIt = true)
- }
+ stats ++= refineStat()
if (in.token != RBRACE) acceptStatSep()
}
stats.toList
}
+ def refineStat(): List[Tree] =
+ if (isDclIntro) { // don't IDE hook
+ joinComment(defOrDcl(in.offset, NoMods))
+ } else if (!isStatSep) {
+ syntaxErrorOrIncomplete(
+ "illegal start of declaration"+
+ (if (inFunReturnType) " (possible cause: missing `=' in front of current method body)"
+ else ""), skipIt = true)
+ Nil
+ } else Nil
+
/** overridable IDE hook for local definitions of blockStatSeq
* Here's an idea how to fill in start and end positions.
def localDef : List[Tree] = {
@@ -3066,7 +3067,7 @@ self =>
while (!isStatSeqEnd && !isCaseDefStart) {
if (in.token == IMPORT) {
stats ++= importClause()
- acceptStatSep()
+ acceptStatSepOpt()
}
else if (isExprIntro) {
stats += statement(InBlock)
diff --git a/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala b/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala
index ed694023d7..28e3217449 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala
@@ -229,11 +229,7 @@ abstract class TreeBuilder {
}
/** Create block of statements `stats` */
- def makeBlock(stats: List[Tree]): Tree =
- if (stats.isEmpty) Literal(Constant(()))
- else if (!stats.last.isTerm) Block(stats, Literal(Constant(())))
- else if (stats.length == 1) stats.head
- else Block(stats.init, stats.last)
+ def makeBlock(stats: List[Tree]): Tree = gen.mkBlock(stats)
def makeFilter(tree: Tree, condition: Tree, scrutineeName: String): Tree = {
val cases = List(
@@ -520,8 +516,7 @@ abstract class TreeBuilder {
}
/** Create a tree representing the function type (argtpes) => restpe */
- def makeFunctionTypeTree(argtpes: List[Tree], restpe: Tree): Tree =
- AppliedTypeTree(rootScalaDot(newTypeName("Function" + argtpes.length)), argtpes ::: List(restpe))
+ def makeFunctionTypeTree(argtpes: List[Tree], restpe: Tree): Tree = gen.mkFunctionTypeTree(argtpes, restpe)
/** Append implicit parameter section if `contextBounds` nonempty */
def addEvidenceParams(owner: Name, vparamss: List[List[ValDef]], contextBounds: List[Tree]): List[List[ValDef]] = {
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala
index 3947db2dd4..edb1c55224 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala
@@ -2316,7 +2316,8 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
lastLineNr = currentLineNr
val lineLab = new asm.Label
jmethod.visitLabel(lineLab)
- lnEntries ::= LineNumberEntry(currentLineNr, lineLab)
+ val actual = iPos inUltimateSource iPos.source
+ lnEntries ::= LineNumberEntry(actual.line, lineLab)
}
}
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
index 14e3f5b642..2b96961291 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
@@ -874,9 +874,14 @@ abstract class ClassfileParser {
case ENUM_TAG =>
val t = pool.getType(index)
val n = readName()
- val s = t.typeSymbol.companionModule.info.decls.lookup(n)
- assert(s != NoSymbol, t)
- Some(LiteralAnnotArg(Constant(s)))
+ val module = t.typeSymbol.companionModule
+ val s = module.info.decls.lookup(n)
+ if (s != NoSymbol) Some(LiteralAnnotArg(Constant(s)))
+ else {
+ warning(s"""While parsing annotations in ${in.file}, could not find $n in enum $module.\nThis is likely due to an implementation restriction: an annotation argument cannot refer to a member of the annotated class (SI-7014).""")
+ None
+ }
+
case ARRAY_TAG =>
val arr = new ArrayBuffer[ClassfileAnnotArg]()
var hasError = false
diff --git a/src/compiler/scala/tools/nsc/transform/Constructors.scala b/src/compiler/scala/tools/nsc/transform/Constructors.scala
index cbe4f69d25..2ec7e97ac5 100644
--- a/src/compiler/scala/tools/nsc/transform/Constructors.scala
+++ b/src/compiler/scala/tools/nsc/transform/Constructors.scala
@@ -174,7 +174,7 @@ abstract class Constructors extends Transform with ast.TreeDSL {
omittables ++= outerCandidatesForElision
val bodyOfOuterAccessor: Map[Symbol, DefDef] =
- defBuf collect { case dd: DefDef if outerCandidatesForElision(dd.symbol) => dd.symbol -> dd } toMap
+ defBuf.collect { case dd: DefDef if outerCandidatesForElision(dd.symbol) => dd.symbol -> dd }.toMap
// no point traversing further once omittables is empty, all candidates ruled out already.
object detectUsages extends Traverser {
diff --git a/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala b/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala
index 48263a496e..d6a6e027cb 100644
--- a/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala
+++ b/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala
@@ -373,16 +373,10 @@ abstract class ExplicitOuter extends InfoTransform
/** The definition tree of the outer accessor of current class
*/
- def outerAccessorDef: Tree = {
- val outerAcc = outerAccessor(currentClass)
- val rhs: Tree =
- if (outerAcc.isDeferred) EmptyTree
- else This(currentClass) DOT outerField(currentClass)
-
- /* If we don't re-type the tree, we see self-type related crashes like #266.
- */
- localTyper typed {
- (DEF(outerAcc) withPos currentClass.pos withType null) === rhs
+ def outerAccessorDef: Tree = localTyper typed {
+ outerAccessor(currentClass) match {
+ case acc if acc.isDeferred => DefDef(acc, EmptyTree)
+ case acc => DefDef(acc, Select(This(currentClass), outerField(currentClass)))
}
}
@@ -404,12 +398,8 @@ abstract class ExplicitOuter extends InfoTransform
else if (mixinPrefix.typeArgs.nonEmpty) gen.mkAttributedThis(mixinPrefix.typeSymbol)
else gen.mkAttributedQualifier(mixinPrefix)
)
- localTyper typed {
- (DEF(outerAcc) withPos currentClass.pos) === {
- // Need to cast for nested outer refs in presence of self-types. See ticket #3274.
- gen.mkCast(transformer.transform(path), outerAcc.info.resultType)
- }
- }
+ // Need to cast for nested outer refs in presence of self-types. See ticket #3274.
+ localTyper typed DefDef(outerAcc, gen.mkCast(transformer.transform(path), outerAcc.info.resultType))
}
/** The main transformation method */
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/Solving.scala b/src/compiler/scala/tools/nsc/transform/patmat/Solving.scala
index 3c7dc79636..114bcba5df 100644
--- a/src/compiler/scala/tools/nsc/transform/patmat/Solving.scala
+++ b/src/compiler/scala/tools/nsc/transform/patmat/Solving.scala
@@ -202,15 +202,16 @@ trait Solving extends Logic {
withLit(findModelFor(dropUnit(f, unitLit)), unitLit)
case _ =>
// partition symbols according to whether they appear in positive and/or negative literals
- val pos = new mutable.HashSet[Sym]()
- val neg = new mutable.HashSet[Sym]()
+ // SI-7020 Linked- for deterministic counter examples.
+ val pos = new mutable.LinkedHashSet[Sym]()
+ val neg = new mutable.LinkedHashSet[Sym]()
f.foreach{_.foreach{ lit =>
if (lit.pos) pos += lit.sym else neg += lit.sym
}}
// appearing in both positive and negative
- val impures = pos intersect neg
+ val impures: mutable.LinkedHashSet[Sym] = pos intersect neg
// appearing only in either positive/negative positions
- val pures = (pos ++ neg) -- impures
+ val pures: mutable.LinkedHashSet[Sym] = (pos ++ neg) -- impures
if (pures nonEmpty) {
val pureSym = pures.head
diff --git a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
index 8d42bf94f3..cd2b9b3a97 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
@@ -19,6 +19,8 @@ trait Contexts { self: Analyzer =>
import definitions.{ JavaLangPackage, ScalaPackage, PredefModule, ScalaXmlTopScope, ScalaXmlPackage }
import ContextMode._
+ protected def onTreeCheckerError(pos: Position, msg: String): Unit = ()
+
object NoContext
extends Context(EmptyTree, NoSymbol, EmptyScope, NoCompilationUnit,
null) { // We can't pass the uninitialized `this`. Instead, we treat null specially in `Context#outer`
@@ -531,8 +533,8 @@ trait Contexts { self: Analyzer =>
if (msg endsWith ds) msg else msg + ds
}
- private def unitError(pos: Position, msg: String) =
- unit.error(pos, if (checking) "\n**** ERROR DURING INTERNAL CHECKING ****\n" + msg else msg)
+ private def unitError(pos: Position, msg: String): Unit =
+ if (checking) onTreeCheckerError(pos, msg) else unit.error(pos, msg)
@inline private def issueCommon(err: AbsTypeError)(pf: PartialFunction[AbsTypeError, Unit]) {
if (settings.Yissuedebug) {
diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
index 3a6b25f1cd..fbe8cd77fb 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
@@ -316,7 +316,7 @@ trait Implicits {
*/
class ImplicitSearch(tree: Tree, pt: Type, isView: Boolean, context0: Context, pos0: Position = NoPosition) extends Typer(context0) with ImplicitsContextErrors {
val searchId = implicitSearchId()
- private def typingLog(what: String, msg: String) =
+ private def typingLog(what: String, msg: => String) =
typingStack.printTyping(tree, f"[search #$searchId] $what $msg")
import infer._
diff --git a/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala b/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala
index 3a5845c8ca..263b5ad784 100644
--- a/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala
@@ -397,6 +397,12 @@ trait MethodSynthesis {
if (mods.isDeferred) basisSym
else basisSym.getter(enclClass)
)
+ // Range position errors ensue if we don't duplicate this in some
+ // circumstances (at least: concrete vals with existential types.)
+ private def tptOriginal = (
+ if (mods.isDeferred) tree.tpt // keep type tree of original abstract field
+ else tree.tpt.duplicate setPos tree.tpt.pos.focus // focused position of original tpt
+ )
override def derivedTree: DefDef = {
// For existentials, don't specify a type for the getter, even one derived
@@ -407,16 +413,11 @@ trait MethodSynthesis {
// starts compiling (instead of failing like it's supposed to) because the typer
// expects to be able to identify escaping locals in typedDefDef, and fails to
// spot that brand of them. In other words it's an artifact of the implementation.
- val tpt = derivedSym.tpe.finalResultType match {
+ val tpt = atPos(derivedSym.pos.focus)(derivedSym.tpe.finalResultType match {
case ExistentialType(_, _) => TypeTree()
case _ if mods.isDeferred => TypeTree()
case tp => TypeTree(tp)
- }
- tpt setPos derivedSym.pos.focus
- // keep type tree of original abstract field
- if (mods.isDeferred)
- tpt setOriginal tree.tpt
-
+ })
// TODO - reconcile this with the DefDef creator in Trees (which
// at this writing presented no way to pass a tree in for tpt.)
atPos(derivedSym.pos) {
@@ -425,7 +426,7 @@ trait MethodSynthesis {
derivedSym.name.toTermName,
Nil,
Nil,
- tpt,
+ tpt setOriginal tptOriginal,
if (mods.isDeferred) EmptyTree else fieldSelection
) setSymbol derivedSym
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala
index 95d6ca52ec..454f913412 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala
@@ -1418,14 +1418,6 @@ trait Namers extends MethodSynthesis {
annCtx.setReportErrors()
// need to be lazy, #1782. beforeTyper to allow inferView in annotation args, SI-5892.
AnnotationInfo lazily {
- if (typer.context ne ctx)
- log(sm"""|The var `typer.context` in ${Namer.this} was mutated before the annotation ${ann} was forced.
- |
- |current value = ${typer.context}
- |original value = $ctx
- |
- |This confirms the hypothesis for the cause of SI-7603. If you see this message, please comment on that ticket.""")
-
enteringTyper(newTyper(annCtx) typedAnnotation ann)
}
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala
index 8e9933f734..dea4c46e79 100644
--- a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala
@@ -162,7 +162,7 @@ trait NamesDefaults { self: Analyzer =>
// never used for constructor calls, they always have a stable qualifier
def blockWithQualifier(qual: Tree, selected: Name) = {
- val sym = blockTyper.context.owner.newValue(unit.freshTermName("qual$"), qual.pos, newFlags = ARTIFACT) setInfo qual.tpe
+ val sym = blockTyper.context.owner.newValue(unit.freshTermName("qual$"), qual.pos, newFlags = ARTIFACT) setInfo uncheckedBounds(qual.tpe)
blockTyper.context.scope enter sym
val vd = atPos(sym.pos)(ValDef(sym, qual) setType NoType)
// it stays in Vegas: SI-5720, SI-5727
@@ -289,9 +289,10 @@ trait NamesDefaults { self: Analyzer =>
arg.tpe
}
).widen // have to widen or types inferred from literal defaults will be singletons
- val s = context.owner.newValue(unit.freshTermName("x$"), arg.pos, newFlags = ARTIFACT) setInfo (
- if (byName) functionType(Nil, argTpe) else argTpe
- )
+ val s = context.owner.newValue(unit.freshTermName("x$"), arg.pos, newFlags = ARTIFACT) setInfo {
+ val tp = if (byName) functionType(Nil, argTpe) else argTpe
+ uncheckedBounds(tp)
+ }
Some((context.scope.enter(s), byName, repeated))
})
map2(symPs, args) {
diff --git a/src/compiler/scala/tools/nsc/typechecker/PatternTypers.scala b/src/compiler/scala/tools/nsc/typechecker/PatternTypers.scala
index 8bf9ce49be..38a3f18bf8 100644
--- a/src/compiler/scala/tools/nsc/typechecker/PatternTypers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/PatternTypers.scala
@@ -45,12 +45,6 @@ trait PatternTypers {
}
}
- // when true:
- // - we may virtualize matches (if -Xexperimental and there's a suitable __match in scope)
- // - we synthesize PartialFunction implementations for `x => x match {...}` and `match {...}` when the expected type is PartialFunction
- // this is disabled by: interactive compilation (we run it for scaladoc due to SI-5933)
- protected def newPatternMatching = true // presently overridden in the presentation compiler
-
trait PatternTyper {
self: Typer =>
diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
index 5929cab1d1..32e908e03b 100644
--- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
@@ -1414,17 +1414,35 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
false
}
- private def checkTypeRef(tp: Type, tree: Tree) = tp match {
+ private def checkTypeRef(tp: Type, tree: Tree, skipBounds: Boolean) = tp match {
case TypeRef(pre, sym, args) =>
checkDeprecated(sym, tree.pos)
if(sym.isJavaDefined)
sym.typeParams foreach (_.cookJavaRawInfo())
- if (!tp.isHigherKinded)
+ if (!tp.isHigherKinded && !skipBounds)
checkBounds(tree, pre, sym.owner, sym.typeParams, args)
case _ =>
}
- private def checkAnnotations(tpes: List[Type], tree: Tree) = tpes foreach (tp => checkTypeRef(tp, tree))
+ private def checkTypeRefBounds(tp: Type, tree: Tree) = {
+ var skipBounds = false
+ tp match {
+ case AnnotatedType(ann :: Nil, underlying, selfSym) if ann.symbol == UncheckedBoundsClass =>
+ skipBounds = true
+ underlying
+ case TypeRef(pre, sym, args) =>
+ if (!tp.isHigherKinded && !skipBounds)
+ checkBounds(tree, pre, sym.owner, sym.typeParams, args)
+ tp
+ case _ =>
+ tp
+ }
+ }
+
+ private def checkAnnotations(tpes: List[Type], tree: Tree) = tpes foreach { tp =>
+ checkTypeRef(tp, tree, skipBounds = false)
+ checkTypeRefBounds(tp, tree)
+ }
private def doTypeTraversal(tree: Tree)(f: Type => Unit) = if (!inPattern) tree.tpe foreach f
private def applyRefchecksToAnnotations(tree: Tree): Unit = {
@@ -1453,8 +1471,9 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
}
doTypeTraversal(tree) {
- case AnnotatedType(annots, _, _) => applyChecks(annots)
- case _ =>
+ case tp @ AnnotatedType(annots, _, _) =>
+ applyChecks(annots)
+ case tp =>
}
case _ =>
}
@@ -1639,13 +1658,27 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
}
val existentialParams = new ListBuffer[Symbol]
- doTypeTraversal(tree) { // check all bounds, except those that are existential type parameters
- case ExistentialType(tparams, tpe) =>
+ var skipBounds = false
+ // check all bounds, except those that are existential type parameters
+ // or those within typed annotated with @uncheckedBounds
+ doTypeTraversal(tree) {
+ case tp @ ExistentialType(tparams, tpe) =>
existentialParams ++= tparams
- case t: TypeRef =>
- checkTypeRef(deriveTypeWithWildcards(existentialParams.toList)(t), tree)
+ case ann: AnnotatedType if ann.hasAnnotation(UncheckedBoundsClass) =>
+ // SI-7694 Allow code synthetizers to disable checking of bounds for TypeTrees based on inferred LUBs
+ // which might not conform to the constraints.
+ skipBounds = true
+ case tp: TypeRef =>
+ val tpWithWildcards = deriveTypeWithWildcards(existentialParams.toList)(tp)
+ checkTypeRef(tpWithWildcards, tree, skipBounds)
case _ =>
}
+ if (skipBounds) {
+ tree.tpe = tree.tpe.map {
+ _.filterAnnotations(_.symbol != UncheckedBoundsClass)
+ }
+ }
+
tree
case TypeApply(fn, args) =>
@@ -1715,6 +1748,8 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
inPattern = false
super.transform(result)
}
+ case ValDef(_, _, _, _) if treeInfo.hasSynthCaseSymbol(result) =>
+ deriveValDef(result)(transform) // SI-7716 Don't refcheck the tpt of the synthetic val that holds the selector.
case _ =>
super.transform(result)
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala b/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala
index 1c8d37ef39..3a188c0044 100644
--- a/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala
@@ -9,15 +9,69 @@ package typechecker
import scala.collection.mutable
import mutable.ListBuffer
import util.returning
+import scala.reflect.internal.util.shortClassOfInstance
+import scala.reflect.internal.util.StringOps._
abstract class TreeCheckers extends Analyzer {
import global._
- private def classstr(x: AnyRef) = (x.getClass.getName split """\\.|\\$""").last
+ override protected def onTreeCheckerError(pos: Position, msg: String) {
+ if (settings.fatalWarnings)
+ currentUnit.warning(pos, "\n** Error during internal checking:\n" + msg)
+ }
+
+ case class DiffResult[T](lost: List[T], gained: List[T]) {
+ def isEmpty = lost.isEmpty && gained.isEmpty
+ def lost_s = if (lost.isEmpty) "" else lost.mkString("lost: ", ", ", "")
+ def gained_s = if (gained.isEmpty) "" else gained.mkString("gained: ", ", ", "")
+ override def toString = ojoin(lost_s, gained_s)
+ }
+
+ def diffList[T](xs: List[T], ys: List[T]): DiffResult[T] =
+ DiffResult(xs filterNot ys.contains, ys filterNot xs.contains)
+
+ def diffTrees(t1: Tree, t2: Tree): DiffResult[Tree] =
+ diffList(t1 filter (_ ne t1), t2 filter (_ ne t2))
+
+ def diffTemplates(t1: Template, t2: Template): String = {
+ val parents = diffList(t1.parents, t2.parents).toString match { case "" => "" case s => "parents " + s }
+ val stats = diffList(t1.body, t2.body).toString match { case "" => "" case s => "stats " + s }
+ oempty(parents, stats) mkString ", "
+ }
+
+ def diff(t1: Tree, t2: Tree): String = (t1, t2) match {
+ case (_: Literal, _: Literal) => ""
+ case (t1: ImplDef, t2: ImplDef) => diff(t1.impl, t2.impl)
+ case (t1: Template, t2: Template) => diffTemplates(t1, t2)
+ case _ => diffTrees(t1, t2).toString // "<error: different tree classes>"
+ }
+
+ private def clean_s(s: String) = s.replaceAllLiterally("scala.collection.", "s.c.")
private def typestr(x: Type) = " (tpe = " + x + ")"
- private def treestr(t: Tree) = t + " [" + classstr(t) + "]" + typestr(t.tpe)
+ private def treestr(t: Tree) = t + " [" + classString(t) + "]" + typestr(t.tpe)
private def ownerstr(s: Symbol) = "'" + s + "'" + s.locationString
private def wholetreestr(t: Tree) = nodeToString(t) + "\n"
+ private def truncate(str: String, len: Int): String = (
+ if (str.length <= len) str
+ else (str takeWhile (_ != '\n') take len - 3) + "..."
+ )
+ private def signature(sym: Symbol) = clean_s(sym match {
+ case null => "null"
+ case _: ClassSymbol => sym.name + ": " + sym.tpe_*
+ case _ => sym.defString
+ })
+ private def classString(x: Any) = x match {
+ case null => ""
+ case t: Tree => t.shortClass
+ case s: Symbol => s.shortSymbolClass
+ case x: AnyRef => shortClassOfInstance(x)
+ }
+ private def nonPackageOwners(s: Symbol) = s.ownerChain drop 1 takeWhile (!_.hasPackageFlag)
+ private def nonPackageOwnersPlusOne(s: Symbol) = nonPackageOwners(s) ::: (s.ownerChain dropWhile (!_.hasPackageFlag) take 1)
+ private def ownersString(s: Symbol) = nonPackageOwnersPlusOne(s) match {
+ case Nil => "NoSymbol"
+ case xs => xs mkString " -> "
+ }
private def beststr(t: Tree) = "<" + {
if (t.symbol != null && t.symbol != NoSymbol) "sym=" + ownerstr(t.symbol)
@@ -25,46 +79,50 @@ abstract class TreeCheckers extends Analyzer {
else t match {
case x: DefTree => "name=" + x.name
case x: RefTree => "reference=" + x.name
- case _ => "clazz=" + classstr(t)
+ case _ => "clazz=" + classString(t)
}
} + ">"
/** This is a work in progress, don't take it too seriously.
*/
object SymbolTracker extends Traverser {
- type PhaseMap = mutable.HashMap[Symbol, List[Tree]]
+ type PhaseMap = mutable.Map[Symbol, List[Tree]]
+ def symbolTreeMap[T <: Tree]() = mutable.Map[Symbol, List[T]]() withDefaultValue Nil
- val maps = ListBuffer[(Phase, PhaseMap)]()
- def prev = maps.init.last._2
- def latest = maps.last._2
- val defSyms = mutable.HashMap[Symbol, List[DefTree]]()
+ var maps: List[(Phase, PhaseMap)] = ((NoPhase, null)) :: Nil
+ def prev = maps.tail.head._2
+ def latest = maps.head._2
+ val defSyms = symbolTreeMap[DefTree]()
val newSyms = mutable.HashSet[Symbol]()
val movedMsgs = new ListBuffer[String]
def sortedNewSyms = newSyms.toList.distinct sortBy (_.name.toString)
- def inPrev(sym: Symbol) = {
- (maps.size >= 2) && (prev contains sym)
- }
- def record(sym: Symbol, tree: Tree) = {
- if (latest contains sym) latest(sym) = latest(sym) :+ tree
- else latest(sym) = List(tree)
+ def record(tree: Tree) {
+ val sym = tree.symbol
+ if ((sym eq null) || (sym eq NoSymbol)) return
- if (inPrev(sym)) {
- val prevTrees = prev(sym)
+ val prevMap = maps.tail.head._2
+ val prevTrees = if (prevMap eq null) Nil else prevMap(sym)
- if (prevTrees exists (t => (t eq tree) || (t.symbol == sym))) ()
- else if (prevTrees exists (_.symbol.owner == sym.owner.implClass)) {
- errorFn("Noticed " + ownerstr(sym) + " moving to implementation class.")
- }
- else {
- val s1 = (prevTrees map wholetreestr).sorted.distinct
- val s2 = wholetreestr(tree)
- if (s1 contains s2) ()
- else movedMsgs += ("\n** %s moved:\n** Previously:\n%s\n** Currently:\n%s".format(ownerstr(sym), s1 mkString ", ", s2))
- }
+ tree match {
+ case t: DefTree => defSyms(sym) ::= t
+ case _ =>
+ }
+
+ if (prevTrees.isEmpty)
+ newSyms += sym
+ else if (prevTrees exists (t => (t eq tree) || (t.symbol == sym)))
+ ()
+ else if (prevTrees exists (_.symbol.owner == sym.owner.implClass))
+ errorFn("Noticed " + ownerstr(sym) + " moving to implementation class.")
+ else {
+ val s1 = (prevTrees map wholetreestr).sorted.distinct
+ val s2 = wholetreestr(tree)
+ if (s1 contains s2) ()
+ else movedMsgs += ("\n** %s moved:\n** Previously:\n%s\n** Currently:\n%s".format(ownerstr(sym), s1 mkString ", ", s2))
}
- else newSyms += sym
}
+
def reportChanges(): Unit = {
// new symbols
if (newSyms.nonEmpty) {
@@ -88,37 +146,34 @@ abstract class TreeCheckers extends Analyzer {
}
def check(ph: Phase, unit: CompilationUnit): Unit = {
- if (maps.isEmpty || maps.last._1 != ph)
- maps += ((ph, new PhaseMap))
-
+ maps match {
+ case ((`ph`, _)) :: _ =>
+ case _ => maps ::= ((ph, symbolTreeMap[Tree]()))
+ }
traverse(unit.body)
reportChanges()
}
- override def traverse(tree: Tree): Unit = {
- val sym = tree.symbol
- if (sym != null && sym != NoSymbol) {
- record(sym, tree)
- tree match {
- case x: DefTree =>
- if (defSyms contains sym) defSyms(sym) = defSyms(sym) :+ x
- else defSyms(sym) = List(x)
- case _ => ()
- }
- }
-
+ override def traverse(tree: Tree) {
+ record(tree)
super.traverse(tree)
}
}
lazy val tpeOfTree = mutable.HashMap[Tree, Type]()
+ private lazy val reportedAlready = mutable.HashSet[(Tree, Symbol)]()
+
+ def posstr(t: Tree): String = if (t eq null) "" else posstr(t.pos)
+ def posstr(p: Position): String = (
+ if (p eq null) "" else {
+ try p.source.path + ":" + p.line
+ catch { case _: UnsupportedOperationException => p.toString }
+ }
+ )
- def posstr(p: Position) =
- try p.source.path + ":" + p.line
- catch { case _: UnsupportedOperationException => p.toString }
- private var hasError: Boolean = false
- def errorFn(msg: Any): Unit = {hasError = true; println("[check: %s] %s".format(phase.prev, msg))}
+ def errorFn(msg: Any): Unit = Console.err println "[check: %s] %s".format(phase.prev, msg)
def errorFn(pos: Position, msg: Any): Unit = errorFn(posstr(pos) + ": " + msg)
+
def informFn(msg: Any) {
if (settings.verbose || settings.debug)
println("[check: %s] %s".format(phase.prev, msg))
@@ -127,12 +182,13 @@ abstract class TreeCheckers extends Analyzer {
def assertFn(cond: Boolean, msg: => Any) =
if (!cond) errorFn(msg)
- private def wrap[T](msg: => Any)(body: => Unit) {
+ private def wrap[T](msg: => Any)(body: => T): T = {
try body
catch { case x: Throwable =>
Console.println("Caught " + x)
Console.println(msg)
x.printStackTrace
+ null.asInstanceOf[T]
}
}
@@ -144,7 +200,6 @@ abstract class TreeCheckers extends Analyzer {
}
def runWithUnit[T](unit: CompilationUnit)(body: => Unit): Unit = {
- hasError = false
val unit0 = currentUnit
currentRun.currentUnit = unit
body
@@ -163,22 +218,28 @@ abstract class TreeCheckers extends Analyzer {
checker.precheck.traverse(unit.body)
checker.typed(unit.body)
checker.postcheck.traverse(unit.body)
- if (hasError) unit.warning(NoPosition, "TreeCheckers detected non-compliant trees in " + unit)
}
}
override def newTyper(context: Context): Typer = new TreeChecker(context)
class TreeChecker(context0: Context) extends Typer(context0) {
- override protected def finishMethodSynthesis(templ: Template, clazz: Symbol, context: Context): Template = {
- // If we don't intercept this all the synthetics get added at every phase,
- // with predictably unfortunate results.
- templ
- }
+ // If we don't intercept this all the synthetics get added at every phase,
+ // with predictably unfortunate results.
+ override protected def finishMethodSynthesis(templ: Template, clazz: Symbol, context: Context): Template = templ
// XXX check for tree.original on TypeTrees.
- private def treesDiffer(t1: Tree, t2: Tree) =
- errorFn(t1.pos, "trees differ\n old: " + treestr(t1) + "\n new: " + treestr(t2))
+ private def treesDiffer(t1: Tree, t2: Tree): Unit = {
+ def len1 = t1.toString.length
+ def len2 = t2.toString.length
+ def name = t1 match {
+ case t: NameTree => t.name
+ case _ => t1.summaryString
+ }
+ def summary = s"${t1.shortClass} $name differs, bytes $len1 -> $len2, "
+ errorFn(t1.pos, summary + diff(t1, t2))
+ }
+
private def typesDiffer(tree: Tree, tp1: Type, tp2: Type) =
errorFn(tree.pos, "types differ\n old: " + tp1 + "\n new: " + tp2 + "\n tree: " + tree)
@@ -192,27 +253,45 @@ abstract class TreeCheckers extends Analyzer {
if (t.symbol == NoSymbol)
errorFn(t.pos, "no symbol: " + treestr(t))
- override def typed(tree: Tree, mode: Mode, pt: Type): Tree = returning(tree) {
- case EmptyTree | TypeTree() => ()
- case _ if tree.tpe != null =>
- tpeOfTree.getOrElseUpdate(tree, try tree.tpe finally tree.clearType())
-
- wrap(tree)(super.typed(tree, mode, pt) match {
- case _: Literal => ()
- case x if x ne tree => treesDiffer(tree, x)
- case _ => ()
- })
- case _ => ()
+ private def passThrough(tree: Tree) = tree match {
+ case EmptyTree | TypeTree() => true
+ case _ => tree.tpe eq null
+ }
+ override def typed(tree: Tree, mode: Mode, pt: Type): Tree = (
+ if (passThrough(tree))
+ super.typed(tree, mode, pt)
+ else
+ checkedTyped(tree, mode, pt)
+ )
+ private def checkedTyped(tree: Tree, mode: Mode, pt: Type): Tree = {
+ def tpe = try tree.tpe finally tree.clearType()
+ val recorded = tpeOfTree.getOrElseUpdate(tree, tpe)
+ val typed = wrap(tree)(super.typed(tree, mode, pt))
+
+ if (tree ne typed)
+ treesDiffer(tree, typed)
+
+ tree
}
object precheck extends TreeStackTraverser {
- override def traverse(tree: Tree) {
- checkSymbolRefsRespectScope(tree)
+ private var enclosingMemberDefs: List[MemberDef] = Nil
+ private def pushMemberDef[T](md: MemberDef)(body: => T): T = {
+ enclosingMemberDefs ::= md
+ try body finally enclosingMemberDefs = enclosingMemberDefs.tail
+ }
+ override def traverse(tree: Tree): Unit = tree match {
+ case md: MemberDef => pushMemberDef(md)(traverseInternal(tree))
+ case _ => traverseInternal(tree)
+ }
+
+ private def traverseInternal(tree: Tree) {
+ checkSymbolRefsRespectScope(enclosingMemberDefs takeWhile (md => !md.symbol.hasPackageFlag), tree)
checkReturnReferencesDirectlyEnclosingDef(tree)
val sym = tree.symbol
def accessed = sym.accessed
- def fail(msg: String) = errorFn(tree.pos, msg + classstr(tree) + " / " + tree)
+ def fail(msg: String) = errorFn(tree.pos, msg + tree.shortClass + " / " + tree)
tree match {
case DefDef(_, _, _, _, _, _) =>
@@ -254,7 +333,7 @@ abstract class TreeCheckers extends Analyzer {
case _ =>
}
- if (tree.pos == NoPosition && tree != EmptyTree)
+ if (tree.canHaveAttrs && tree.pos == NoPosition)
noPos(tree)
else if (tree.tpe == null && phase.id > currentRun.typerPhase.id)
noType(tree)
@@ -281,57 +360,99 @@ abstract class TreeCheckers extends Analyzer {
super.traverse(tree)
}
- private def checkSymbolRefsRespectScope(tree: Tree) {
- def symbolOf(t: Tree): Symbol = Option(tree.symbol).getOrElse(NoSymbol)
- val info = Option(symbolOf(tree).info).getOrElse(NoType)
- val referencedSymbols: List[Symbol] = {
- val directRef = tree match {
- case _: RefTree => symbolOf(tree).toOption
- case _ => None
+ private def checkSymbolRefsRespectScope(enclosingMemberDefs: List[MemberDef], tree: Tree) {
+ def symbolOf(t: Tree): Symbol = if (t.symbol eq null) NoSymbol else t.symbol
+ def typeOf(t: Tree): Type = if (t.tpe eq null) NoType else t.tpe
+ def infoOf(t: Tree): Type = symbolOf(t).info
+ def referencesInType(tp: Type) = tp collect { case TypeRef(_, sym, _) => sym }
+ def referencesInTree(t: Tree) = referencesInType(typeOf(t)) ++ referencesInType(infoOf(t))
+ def symbolRefsInTree(t: Tree) = t collect { case t: RefTree => symbolOf(t) }
+ // Accessors are known to steal the type of the underlying field without cloning existential symbols at the new owner.
+ // This happens in Namer#accessorTypeCompleter. We just look the other way here.
+ if (symbolOf(tree).isAccessor)
+ return
+
+ val treeSym = symbolOf(tree)
+ val treeInfo = infoOf(tree)
+ val treeTpe = typeOf(tree)
+
+ def isOk(sym: Symbol) = treeSym hasTransOwner sym.enclosingSuchThat(x => !x.isTypeParameterOrSkolem) // account for higher order type params
+ def isEligible(sym: Symbol) = (sym ne NoSymbol) && (
+ sym.isTypeParameter
+ || sym.isLocal
+ )
+ val direct = tree match {
+ case _: RefTree => treeSym
+ case _ => NoSymbol
+ }
+ val referencedSymbols = (treeSym :: referencesInType(treeInfo)).distinct filter (sym => isEligible(sym) && !isOk(sym))
+ def mk[T](what: String, x: T, str: T => String = (x: T) => "" + x): ((Any, String)) =
+ x -> s"%10s %-20s %s".format(what, classString(x), truncate(str(x), 80).trim)
+
+ def encls = enclosingMemberDefs.filterNot(_.symbol == treeSym).zipWithIndex map { case (md, i) => mk(s"encl(${i+1})", md.symbol, signature) }
+
+ def mkErrorMsg(outOfScope: Symbol): String = {
+
+ def front = List(
+ mk[Tree]("tree", tree),
+ mk[Position]("position", tree.pos, posstr),
+ mk("with sym", treeSym, signature)
+ )
+ def tpes = treeTpe match {
+ case NoType => Nil
+ case _ => mk[Type]("and tpe", treeTpe) :: Nil
}
- def referencedSyms(tp: Type) = (tp collect {
- case TypeRef(_, sym, _) => sym
- }).toList
- val indirectRefs = referencedSyms(info)
- (indirectRefs ++ directRef).distinct
+ def ref = mk[Symbol]("ref to", outOfScope, (s: Symbol) => s.nameString + " (" + s.debugFlagString + ")")
+
+ val pairs = front ++ tpes ++ encls ++ (ref :: Nil)
+ val width = pairs.map(_._2.length).max
+ val fmt = "%-" + width + "s"
+ val lines = pairs map {
+ case (s: Symbol, msg) => fmt.format(msg) + " in " + ownersString(s)
+ case (x, msg) => fmt.format(msg)
+ }
+ lines.mkString("Out of scope symbol reference {\n", "\n", "\n}")
}
- for {
- sym <- referencedSymbols
- // Accessors are known to steal the type of the underlying field without cloning existential symbols at the new owner.
- // This happens in Namer#accessorTypeCompleter. We just look the other way here.
- if !tree.symbol.isAccessor
- if (sym.isTypeParameter || sym.isLocal) && !(tree.symbol hasTransOwner sym.owner)
- } errorFn(s"The symbol, tpe or info of tree `(${tree}) : ${info}` refers to a out-of-scope symbol, ${sym.fullLocationString}. tree.symbol.ownerChain: ${tree.symbol.ownerChain.mkString(", ")}")
- }
- private def checkReturnReferencesDirectlyEnclosingDef(tree: Tree) {
- tree match {
- case _: Return =>
- path.collectFirst {
- case dd: DefDef => dd
- } match {
- case None => errorFn(s"Return node ($tree) must be enclosed in a DefDef")
- case Some(dd) =>
- if (tree.symbol != dd.symbol) errorFn(s"Return symbol (${tree.symbol}} does not reference directly enclosing DefDef (${dd.symbol})")
+ referencedSymbols foreach (sym =>
+ if (reportedAlready((tree, sym))) {
+ def what = tree match {
+ case tt: TypeTree => s"TypeTree(${tt.tpe})"
+ case _ => tree.shortClass + "(" + tree.symbol.nameString + ")"
}
- case _ =>
- }
+ }
+ else {
+ errorFn("\n" + mkErrorMsg(sym))
+ reportedAlready += ((tree, sym))
+ }
+ )
+ }
+
+ private def checkReturnReferencesDirectlyEnclosingDef(tree: Tree): Unit = tree match {
+ case _: Return =>
+ path collectFirst { case dd: DefDef => dd } match {
+ case None => errorFn(s"Return node ($tree) must be enclosed in a DefDef")
+ case Some(dd) if tree.symbol != dd.symbol => errorFn(s"Return symbol (${tree.symbol}} does not reference directly enclosing DefDef (${dd.symbol})")
+ case _ =>
+ }
+ case _ =>
}
}
object postcheck extends Traverser {
- override def traverse(tree: Tree) {
- tree match {
- case EmptyTree | TypeTree() => ()
- case _ =>
- tpeOfTree get tree foreach { oldtpe =>
- if (oldtpe =:= tree.tpe) ()
- else typesDiffer(tree, oldtpe, tree.tpe)
-
- tree setType oldtpe
- super.traverse(tree)
- }
- }
+ override def traverse(tree: Tree): Unit = tree match {
+ case EmptyTree | TypeTree() => ()
+ case _ =>
+ tpeOfTree get tree foreach { oldtpe =>
+ if (tree.tpe eq null)
+ errorFn(s"tree.tpe=null for " + tree.shortClass + " (symbol: " + classString(tree.symbol) + " " + signature(tree.symbol) + "), last seen tpe was " + oldtpe)
+ else if (oldtpe =:= tree.tpe)
+ ()
+ else
+ typesDiffer(tree, oldtpe, tree.tpe)
+
+ super.traverse(tree setType oldtpe)
+ }
}
}
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala
index 13fa2a947d..695a1e2e24 100644
--- a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala
@@ -350,11 +350,14 @@ trait TypeDiagnostics {
val strings = mutable.Map[String, Set[TypeDiag]]() withDefaultValue Set()
val names = mutable.Map[Name, Set[TypeDiag]]() withDefaultValue Set()
- def record(t: Type, sym: Symbol) = {
- val diag = TypeDiag(t, sym)
+ val localsSet = locals.toSet
- strings("" + t) += diag
- names(sym.name) += diag
+ def record(t: Type, sym: Symbol) = {
+ if (!localsSet(sym)) {
+ val diag = TypeDiag(t, sym)
+ strings("" + t) += diag
+ names(sym.name) += diag
+ }
}
for (tpe <- types ; t <- tpe) {
t match {
diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
index dd16b5be85..629513ada3 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
@@ -2436,7 +2436,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
// TODO: add fallback __match sentinel to predef
val matchStrategy: Tree =
- if (!(newPatternMatching && settings.Xexperimental && context.isNameInScope(vpmName._match))) null // fast path, avoiding the next line if there's no __match to be seen
+ if (!(settings.Xexperimental && context.isNameInScope(vpmName._match))) null // fast path, avoiding the next line if there's no __match to be seen
else newTyper(context.makeImplicit(reportAmbiguousErrors = false)).silent(_.typed(Ident(vpmName._match)), reportAmbiguousErrors = false) orElse (_ => null)
if (matchStrategy ne null) // virtualize
@@ -2713,7 +2713,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
fun.body match {
// translate `x => x match { <cases> }` : PartialFunction to
// `new PartialFunction { def applyOrElse(x, default) = x match { <cases> } def isDefinedAt(x) = ... }`
- case Match(sel, cases) if (sel ne EmptyTree) && newPatternMatching && (pt.typeSymbol == PartialFunctionClass) =>
+ case Match(sel, cases) if (sel ne EmptyTree) && (pt.typeSymbol == PartialFunctionClass) =>
// go to outer context -- must discard the context that was created for the Function since we're discarding the function
// thus, its symbol, which serves as the current context.owner, is not the right owner
// you won't know you're using the wrong owner until lambda lift crashes (unless you know better than to use the wrong owner)
@@ -3789,7 +3789,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
// we need symbol-ful originals for reification
// hence we go the extra mile to hand-craft tis guy
val original = arg1 match {
- case tt @ TypeTree() => Annotated(ann, tt.original)
+ case tt @ TypeTree() if tt.original != null => Annotated(ann, tt.original)
// this clause is needed to correctly compile stuff like "new C @D" or "@(inline @getter)"
case _ => Annotated(ann, arg1)
}
@@ -3997,7 +3997,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
val selector = tree.selector
val cases = tree.cases
if (selector == EmptyTree) {
- if (newPatternMatching && (pt.typeSymbol == PartialFunctionClass))
+ if (pt.typeSymbol == PartialFunctionClass)
synthesizePartialFunction(newTermName(context.unit.fresh.newName("x")), tree.pos, tree, mode, pt)
else {
val arity = if (isFunctionType(pt)) pt.dealiasWiden.typeArgs.length - 1 else 1
diff --git a/src/compiler/scala/tools/nsc/util/ClassPath.scala b/src/compiler/scala/tools/nsc/util/ClassPath.scala
index 7f9b81e1ec..906a575d90 100644
--- a/src/compiler/scala/tools/nsc/util/ClassPath.scala
+++ b/src/compiler/scala/tools/nsc/util/ClassPath.scala
@@ -16,6 +16,7 @@ import File.pathSeparator
import scala.collection.convert.WrapAsScala.enumerationAsScalaIterator
import java.net.MalformedURLException
import java.util.regex.PatternSyntaxException
+import scala.reflect.runtime.ReflectionUtils
/** <p>
* This module provides star expansion of '-classpath' option arguments, behaves the same as
@@ -80,7 +81,7 @@ object ClassPath {
}
/** A useful name filter. */
- def isTraitImplementation(name: String) = name endsWith "$class.class"
+ def isTraitImplementation(name: String) = ReflectionUtils.isTraitImplementation(name)
def specToURL(spec: String): Option[URL] =
try Some(new URL(spec))
@@ -139,7 +140,7 @@ object ClassPath {
}
object DefaultJavaContext extends JavaContext {
- override def isValidName(name: String) = !isTraitImplementation(name)
+ override def isValidName(name: String) = !ReflectionUtils.scalacShouldntLoadClassfile(name)
}
private def endsClass(s: String) = s.length > 6 && s.substring(s.length - 6) == ".class"
diff --git a/src/compiler/scala/tools/nsc/util/StackTracing.scala b/src/compiler/scala/tools/nsc/util/StackTracing.scala
new file mode 100644
index 0000000000..fa4fe29f28
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/util/StackTracing.scala
@@ -0,0 +1,76 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ */
+
+package scala.tools.nsc.util
+
+private[util] trait StackTracing extends Any {
+
+ /** Format a stack trace, returning the prefix consisting of frames that satisfy
+ * a given predicate.
+ * The format is similar to the typical case described in the JavaDoc
+ * for [[java.lang.Throwable#printStackTrace]].
+ * If a stack trace is truncated, it will be followed by a line of the form
+ * `... 3 elided`, by analogy to the lines `... 3 more` which indicate
+ * shared stack trace segments.
+ * @param e the exception
+ * @param p the predicate to select the prefix
+ */
+ def stackTracePrefixString(e: Throwable)(p: StackTraceElement => Boolean): String = {
+ import collection.mutable.{ ArrayBuffer, ListBuffer }
+ import compat.Platform.EOL
+ import util.Properties.isJavaAtLeast
+
+ val sb = ListBuffer.empty[String]
+
+ type TraceRelation = String
+ val Self = new TraceRelation("")
+ val CausedBy = new TraceRelation("Caused by: ")
+ val Suppressed = new TraceRelation("Suppressed: ")
+
+ val suppressable = isJavaAtLeast("1.7")
+
+ def clazz(e: Throwable) = e.getClass.getName
+ def because(e: Throwable): String = e.getCause match { case null => null ; case c => header(c) }
+ def msg(e: Throwable): String = e.getMessage match { case null => because(e) ; case s => s }
+ def txt(e: Throwable): String = msg(e) match { case null => "" ; case s => s": $s" }
+ def header(e: Throwable): String = s"${clazz(e)}${txt(e)}"
+
+ val indent = "\u0020\u0020"
+
+ val seen = new ArrayBuffer[Throwable](16)
+ def unseen(t: Throwable) = {
+ def inSeen = seen exists (_ eq t)
+ val interesting = (t != null) && !inSeen
+ if (interesting) seen += t
+ interesting
+ }
+
+ def print(e: Throwable, r: TraceRelation, share: Array[StackTraceElement], indents: Int): Unit = if (unseen(e)) {
+ val trace = e.getStackTrace
+ val frames = (
+ if (share.nonEmpty) {
+ val spare = share.reverseIterator
+ val trimmed = trace.reverse dropWhile (spare.hasNext && spare.next == _)
+ trimmed.reverse
+ } else trace
+ )
+ val prefix = frames takeWhile p
+ val margin = indent * indents
+ val indented = margin + indent
+ sb append s"${margin}${r}${header(e)}"
+ prefix foreach (f => sb append s"${indented}at $f")
+ if (frames.size < trace.size) sb append s"$indented... ${trace.size - frames.size} more"
+ if (r == Self && prefix.size < frames.size) sb append s"$indented... ${frames.size - prefix.size} elided"
+ print(e.getCause, CausedBy, trace, indents)
+ if (suppressable) {
+ import scala.language.reflectiveCalls
+ type Suppressing = { def getSuppressed(): Array[Throwable] }
+ for (s <- e.asInstanceOf[Suppressing].getSuppressed) print(s, Suppressed, frames, indents + 1)
+ }
+ }
+ print(e, Self, share = Array.empty, indents = 0)
+
+ sb mkString EOL
+ }
+}
diff --git a/src/compiler/scala/tools/nsc/util/package.scala b/src/compiler/scala/tools/nsc/util/package.scala
index ea3c9d8dde..72a4bbf5c0 100644
--- a/src/compiler/scala/tools/nsc/util/package.scala
+++ b/src/compiler/scala/tools/nsc/util/package.scala
@@ -8,7 +8,6 @@ package tools
package nsc
import java.io.{ OutputStream, PrintStream, ByteArrayOutputStream, PrintWriter, StringWriter }
-import scala.compat.Platform.EOL
package object util {
@@ -79,12 +78,17 @@ package object util {
s"$clazz$msg @ $frame"
}
- def stackTracePrefixString(ex: Throwable)(p: StackTraceElement => Boolean): String = {
- val frames = ex.getStackTrace takeWhile p map (" at " + _)
- val msg = ex.getMessage match { case null => "" ; case s => s": $s" }
- val clazz = ex.getClass.getName
-
- s"$clazz$msg" +: frames mkString EOL
+ implicit class StackTraceOps(val e: Throwable) extends AnyVal with StackTracing {
+ /** Format the stack trace, returning the prefix consisting of frames that satisfy
+ * a given predicate.
+ * The format is similar to the typical case described in the JavaDoc
+ * for [[java.lang.Throwable#printStackTrace]].
+ * If a stack trace is truncated, it will be followed by a line of the form
+ * `... 3 elided`, by analogy to the lines `... 3 more` which indicate
+ * shared stack trace segments.
+ * @param p the predicate to select the prefix
+ */
+ def stackTracePrefixString(p: StackTraceElement => Boolean): String = stackTracePrefixString(e)(p)
}
lazy val trace = new SimpleTracer(System.out)
diff --git a/src/compiler/scala/tools/reflect/ToolBoxFactory.scala b/src/compiler/scala/tools/reflect/ToolBoxFactory.scala
index 8d2f200e99..57ebe1b30d 100644
--- a/src/compiler/scala/tools/reflect/ToolBoxFactory.scala
+++ b/src/compiler/scala/tools/reflect/ToolBoxFactory.scala
@@ -12,6 +12,7 @@ import java.lang.{Class => jClass}
import scala.compat.Platform.EOL
import scala.reflect.NameTransformer
import scala.reflect.api.JavaUniverse
+import scala.reflect.io.NoAbstractFile
abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
@@ -136,7 +137,9 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
val wrapper2 = if (!withMacrosDisabled) (currentTyper.context.withMacrosEnabled[Tree] _) else (currentTyper.context.withMacrosDisabled[Tree] _)
def wrapper (tree: => Tree) = wrapper1(wrapper2(tree))
- phase = (new Run).typerPhase // need to set a phase to something <= typerPhase, otherwise implicits in typedSelect will be disabled
+ val run = new Run
+ run.symSource(ownerClass) = NoAbstractFile // need to set file to something different from null, so that currentRun.defines works
+ phase = run.typerPhase // need to set a phase to something <= typerPhase, otherwise implicits in typedSelect will be disabled
currentTyper.context.setReportErrors() // need to manually set context mode, otherwise typer.silent will throw exceptions
reporter.reset()
diff --git a/src/compiler/scala/tools/reflect/WrappedProperties.scala b/src/compiler/scala/tools/reflect/WrappedProperties.scala
index 20567719be..523287fc66 100644
--- a/src/compiler/scala/tools/reflect/WrappedProperties.scala
+++ b/src/compiler/scala/tools/reflect/WrappedProperties.scala
@@ -27,9 +27,13 @@ trait WrappedProperties extends PropertiesTrait {
override def envOrNone(name: String) = wrap(super.envOrNone(name)).flatten
override def envOrSome(name: String, alt: Option[String]) = wrap(super.envOrNone(name)).flatten orElse alt
- def systemProperties: Iterator[(String, String)] = {
+ def systemProperties: List[(String, String)] = {
import scala.collection.JavaConverters._
- wrap(System.getProperties.asScala.iterator) getOrElse Iterator.empty
+ wrap {
+ val props = System.getProperties
+ // SI-7269 Be careful to avoid `ConcurrentModificationException` if another thread modifies the properties map
+ props.stringPropertyNames().asScala.toList.map(k => (k, props.get(k).asInstanceOf[String]))
+ } getOrElse Nil
}
}
diff --git a/src/compiler/scala/tools/reflect/quasiquotes/Parsers.scala b/src/compiler/scala/tools/reflect/quasiquotes/Parsers.scala
index 18a806e5ff..19888fa8d2 100644
--- a/src/compiler/scala/tools/reflect/quasiquotes/Parsers.scala
+++ b/src/compiler/scala/tools/reflect/quasiquotes/Parsers.scala
@@ -50,6 +50,10 @@ trait Parsers { self: Quasiquotes =>
def entryPoint: QuasiquoteParser => Tree
class QuasiquoteParser(source0: SourceFile) extends SourceFileParser(source0) {
+ def isHole: Boolean = isIdent && isHole(in.name)
+
+ def isHole(name: Name): Boolean = holeMap.contains(name)
+
override val treeBuilder = new ParserTreeBuilder {
// q"(..$xs)"
override def makeTupleTerm(trees: List[Tree], flattenUnary: Boolean): Tree =
@@ -61,9 +65,13 @@ trait Parsers { self: Quasiquotes =>
// q"{ $x }"
override def makeBlock(stats: List[Tree]): Tree = stats match {
- case (head @ Ident(name)) :: Nil if holeMap.contains(name) => Block(Nil, head)
+ case (head @ Ident(name)) :: Nil if isHole(name) => Block(Nil, head)
case _ => super.makeBlock(stats)
}
+
+ // tq"$a => $b"
+ override def makeFunctionTypeTree(argtpes: List[Tree], restpe: Tree): Tree =
+ AppliedTypeTree(Ident(tpnme.QUASIQUOTE_FUNCTION), argtpes :+ restpe)
}
import treeBuilder.{global => _, _}
@@ -79,7 +87,10 @@ trait Parsers { self: Quasiquotes =>
} else
super.caseClause()
- def isHole: Boolean = isIdent && holeMap.contains(in.name)
+ override def caseBlock(): Tree = super.caseBlock() match {
+ case Block(Nil, expr) => expr
+ case other => other
+ }
override def isAnnotation: Boolean = super.isAnnotation || (isHole && lookingAhead { isAnnotation })
@@ -105,13 +116,30 @@ trait Parsers { self: Quasiquotes =>
case AT =>
in.nextToken()
annot :: readAnnots(annot)
- case _ if isHole && lookingAhead { in.token == AT || isModifier || isDefIntro || isIdent} =>
+ case _ if isHole && lookingAhead { isAnnotation || isModifier || isDefIntro || isIdent || isStatSep || in.token == LPAREN } =>
val ann = Apply(Select(New(Ident(tpnme.QUASIQUOTE_MODS)), nme.CONSTRUCTOR), List(Literal(Constant(in.name.toString))))
in.nextToken()
ann :: readAnnots(annot)
case _ =>
Nil
}
+
+ override def refineStat(): List[Tree] =
+ if (isHole && !isDclIntro) {
+ val result = ValDef(NoMods, in.name, Ident(tpnme.QUASIQUOTE_REFINE_STAT), EmptyTree) :: Nil
+ in.nextToken()
+ result
+ } else super.refineStat()
+
+ override def ensureEarlyDef(tree: Tree) = tree match {
+ case Ident(name: TermName) if isHole(name) => ValDef(NoMods | Flag.PRESUPER, name, Ident(tpnme.QUASIQUOTE_EARLY_DEF), EmptyTree)
+ case _ => super.ensureEarlyDef(tree)
+ }
+
+ override def isTypedParam(tree: Tree) = super.isTypedParam(tree) || (tree match {
+ case Ident(name) if isHole(name) => true
+ case _ => false
+ })
}
}
diff --git a/src/compiler/scala/tools/reflect/quasiquotes/Placeholders.scala b/src/compiler/scala/tools/reflect/quasiquotes/Placeholders.scala
index b3ac1e293a..e20d98c0f1 100644
--- a/src/compiler/scala/tools/reflect/quasiquotes/Placeholders.scala
+++ b/src/compiler/scala/tools/reflect/quasiquotes/Placeholders.scala
@@ -92,10 +92,9 @@ trait Placeholders { self: Quasiquotes =>
}
}
- object AnnotPlaceholder {
- def unapply(tree: Tree): Option[(Tree, Location, Cardinality, List[Tree])] = tree match {
- case Apply(Select(New(Placeholder(tree, loc, card)), nme.CONSTRUCTOR), args) => Some(tree, loc, card, args)
- case _ => None
+ object AnnotPlaceholder extends HolePlaceholder {
+ def matching = {
+ case Apply(Select(New(Ident(name)), nme.CONSTRUCTOR), Nil) => name
}
}
@@ -113,6 +112,13 @@ trait Placeholders { self: Quasiquotes =>
}
}
+ object FunctionTypePlaceholder {
+ def unapply(tree: Tree): Option[(List[Tree], Tree)] = tree match {
+ case AppliedTypeTree(Ident(tpnme.QUASIQUOTE_FUNCTION), args :+ res) => Some((args, res))
+ case _ => None
+ }
+ }
+
object SymbolPlaceholder {
def unapply(scrutinee: Any): Option[Tree] = scrutinee match {
case Placeholder(tree, SymbolLocation, _) => Some(tree)
@@ -127,9 +133,16 @@ trait Placeholders { self: Quasiquotes =>
}
}
- object ClassPlaceholder {
- def unapply(tree: Tree): Option[Tree] = tree match {
- case ClassDef(_, _, _, _) => Some(tree)
+ object RefineStatPlaceholder {
+ def unapply(tree: Tree): Option[(Tree, Location, Cardinality)] = tree match {
+ case ValDef(_, Placeholder(tree, location, card), Ident(tpnme.QUASIQUOTE_REFINE_STAT), _) => Some((tree, location, card))
+ case _ => None
+ }
+ }
+
+ object EarlyDefPlaceholder {
+ def unapply(tree: Tree): Option[(Tree, Location, Cardinality)] = tree match {
+ case ValDef(_, Placeholder(tree, location, card), Ident(tpnme.QUASIQUOTE_EARLY_DEF), _) => Some((tree, location, card))
case _ => None
}
}
diff --git a/src/compiler/scala/tools/reflect/quasiquotes/Quasiquotes.scala b/src/compiler/scala/tools/reflect/quasiquotes/Quasiquotes.scala
index ee99a5e280..1305e25240 100644
--- a/src/compiler/scala/tools/reflect/quasiquotes/Quasiquotes.scala
+++ b/src/compiler/scala/tools/reflect/quasiquotes/Quasiquotes.scala
@@ -41,11 +41,11 @@ abstract class Quasiquotes extends Parsers
lazy val universeTypes = new definitions.UniverseDependentTypes(universe)
def expandQuasiquote = {
- debug(s"\ncode to parse=\n$code\n")
+ debug(s"\ncode to parse:\n$code\n")
val tree = parse(code)
- debug(s"parsed tree\n=${tree}\n=${showRaw(tree)}\n")
+ debug(s"parsed:\n${showRaw(tree)}\n$tree\n")
val reified = reify(tree)
- debug(s"reified tree\n=${reified}\n=${showRaw(reified)}\n")
+ debug(s"reified tree:\n$reified\n")
reified
}
}
diff --git a/src/compiler/scala/tools/reflect/quasiquotes/Reifiers.scala b/src/compiler/scala/tools/reflect/quasiquotes/Reifiers.scala
index 900237b00d..af4e34536c 100644
--- a/src/compiler/scala/tools/reflect/quasiquotes/Reifiers.scala
+++ b/src/compiler/scala/tools/reflect/quasiquotes/Reifiers.scala
@@ -7,7 +7,10 @@ import scala.reflect.internal.Flags._
trait Reifiers { self: Quasiquotes =>
import global._
- import global.build.SyntacticClassDef
+ import global.build.{SyntacticClassDef, SyntacticTraitDef, SyntacticModuleDef,
+ SyntacticDefDef, SyntacticValDef, SyntacticVarDef,
+ SyntacticBlock, SyntacticApplied, SyntacticTypeApplied,
+ SyntacticFunction, SyntacticNew}
import global.treeInfo._
import global.definitions._
import Cardinality._
@@ -35,13 +38,9 @@ trait Reifiers { self: Quasiquotes =>
reified
}
- override def reifyTree(tree: Tree): Tree = {
- val reified =
- reifyTreePlaceholder(tree) orElse
- reifyTreeSyntactically(tree)
- //println(s"reified ${showRaw(tree)} as $reified")
- reified
- }
+ override def reifyTree(tree: Tree): Tree =
+ reifyTreePlaceholder(tree) orElse
+ reifyTreeSyntactically(tree)
def reifyTreePlaceholder(tree: Tree): Tree = tree match {
case Placeholder(tree, TreeLocation(_), _) if isReifyingExpressions => tree
@@ -49,11 +48,49 @@ trait Reifiers { self: Quasiquotes =>
case Placeholder(tree, _, card @ Dot()) => c.abort(tree.pos, s"Can't $action with $card here")
case TuplePlaceholder(args) => reifyTuple(args)
case TupleTypePlaceholder(args) => reifyTupleType(args)
+ case FunctionTypePlaceholder(argtpes, restpe) => reifyFunctionType(argtpes, restpe)
case CasePlaceholder(tree, location, _) => reifyCase(tree, location)
- case ClassPlaceholder(tree) => reifyClass(tree)
+ case RefineStatPlaceholder(tree, _, _) => reifyRefineStat(tree)
+ case EarlyDefPlaceholder(tree, _, _) => reifyEarlyDef(tree)
case _ => EmptyTree
}
+ override def reifyTreeSyntactically(tree: Tree) = tree match {
+ case SyntacticTraitDef(mods, name, tparams, earlyDefs, parents, selfdef, body) =>
+ reifyBuildCall(nme.SyntacticTraitDef, mods, name, tparams, earlyDefs, parents, selfdef, body)
+ case SyntacticClassDef(mods, name, tparams, constrmods, vparamss, earlyDefs, parents, selfdef, body) =>
+ reifyBuildCall(nme.SyntacticClassDef, mods, name, tparams, constrmods, vparamss,
+ earlyDefs, parents, selfdef, body)
+ case SyntacticModuleDef(mods, name, earlyDefs, parents, selfdef, body) =>
+ reifyBuildCall(nme.SyntacticModuleDef, mods, name, earlyDefs, parents, selfdef, body)
+ case SyntacticNew(earlyDefs, parents, selfdef, body) =>
+ reifyBuildCall(nme.SyntacticNew, earlyDefs, parents, selfdef, body)
+ case SyntacticDefDef(mods, name, tparams, vparamss, tpt, rhs) =>
+ reifyBuildCall(nme.SyntacticDefDef, mods, name, tparams, vparamss, tpt, rhs)
+ case SyntacticValDef(mods, name, tpt, rhs) =>
+ reifyBuildCall(nme.SyntacticValDef, mods, name, tpt, rhs)
+ case SyntacticVarDef(mods, name, tpt, rhs) =>
+ reifyBuildCall(nme.SyntacticVarDef, mods, name, tpt, rhs)
+ case SyntacticApplied(fun, argss) if argss.length > 1 =>
+ reifyBuildCall(nme.SyntacticApplied, fun, argss)
+ case SyntacticApplied(fun, argss @ (_ :+ (_ :+ Placeholder(_, _, DotDotDot)))) =>
+ reifyBuildCall(nme.SyntacticApplied, fun, argss)
+ case SyntacticTypeApplied(fun, targs) if targs.nonEmpty =>
+ reifyBuildCall(nme.SyntacticTypeApplied, fun, targs)
+ case SyntacticFunction(args, body) =>
+ reifyBuildCall(nme.SyntacticFunction, args, body)
+ case Block(stats, last) =>
+ reifyBuildCall(nme.SyntacticBlock, stats :+ last)
+ // parser emits trees with scala package symbol to ensure
+ // that some names hygienically point to various scala package
+ // members; we need to preserve this symbol to preserve
+ // correctness of the trees produced by quasiquotes
+ case Select(id @ Ident(nme.scala_), name) if id.symbol == ScalaPackage =>
+ reifyBuildCall(nme.ScalaDot, name)
+ case _ =>
+ super.reifyTreeSyntactically(tree)
+ }
+
override def reifyName(name: Name): Tree = name match {
case Placeholder(tree, location, _) =>
if (holesHaveTypes && !(location.tpe <:< nameType)) c.abort(tree.pos, s"$nameType expected but ${location.tpe} found")
@@ -70,26 +107,30 @@ trait Reifiers { self: Quasiquotes =>
def reifyTuple(args: List[Tree]) = args match {
case Nil => reify(Literal(Constant(())))
case List(hole @ Placeholder(_, _, NoDot)) => reify(hole)
- case List(Placeholder(_, _, _)) => reifyBuildCall(nme.TupleN, args)
+ case List(Placeholder(_, _, _)) => reifyBuildCall(nme.SyntacticTuple, args)
// in a case we only have one element tuple without
// any cardinality annotations this means that this is
// just an expression wrapped in parentheses
case List(other) => reify(other)
- case _ => reifyBuildCall(nme.TupleN, args)
+ case _ => reifyBuildCall(nme.SyntacticTuple, args)
}
def reifyTupleType(args: List[Tree]) = args match {
case Nil => reify(Select(Ident(nme.scala_), tpnme.Unit))
case List(hole @ Placeholder(_, _, NoDot)) => reify(hole)
- case List(Placeholder(_, _, _)) => reifyBuildCall(nme.TupleTypeN, args)
+ case List(Placeholder(_, _, _)) => reifyBuildCall(nme.SyntacticTupleType, args)
case List(other) => reify(other)
- case _ => reifyBuildCall(nme.TupleTypeN, args)
+ case _ => reifyBuildCall(nme.SyntacticTupleType, args)
}
- def reifyClass(tree: Tree) = {
- val SyntacticClassDef(mods, name, tparams, constrmods, argss, parents, selfval, body) = tree
- reifyBuildCall(nme.SyntacticClassDef, mods, name, tparams, constrmods, argss, parents, selfval, body)
- }
+ def reifyFunctionType(argtpes: List[Tree], restpe: Tree) =
+ reifyBuildCall(nme.SyntacticFunctionType, argtpes, restpe)
+
+ def reifyRefineStat(tree: Tree) = tree
+
+ def reifyEarlyDef(tree: Tree) = tree
+
+ def reifyAnnotation(tree: Tree) = tree
/** Splits list into a list of groups where subsequent elements are considered
* similar by the corresponding function.
@@ -143,15 +184,33 @@ trait Reifiers { self: Quasiquotes =>
override def reifyList(xs: List[Any]): Tree = reifyMultiCardinalityList(xs) {
case Placeholder(tree, _, DotDot) => tree
case CasePlaceholder(tree, _, DotDot) => tree
+ case RefineStatPlaceholder(tree, _, DotDot) => reifyRefineStat(tree)
+ case EarlyDefPlaceholder(tree, _, DotDot) => reifyEarlyDef(tree)
case List(Placeholder(tree, _, DotDotDot)) => tree
} {
reify(_)
}
- def reifyAnnotList(annots: List[Tree]): Tree
+ def reifyAnnotList(annots: List[Tree]): Tree = reifyMultiCardinalityList(annots) {
+ case AnnotPlaceholder(tree, _, DotDot) => reifyAnnotation(tree)
+ } {
+ case AnnotPlaceholder(tree, UnknownLocation | TreeLocation(_), NoDot) => reifyAnnotation(tree)
+ case other => reify(other)
+ }
- def ensureNoExplicitFlags(m: Modifiers, pos: Position) =
- if ((m.flags & ExplicitFlags) != 0L) c.abort(pos, s"Can't $action modifiers together with flags, consider merging flags into modifiers")
+ // These are explicit flags except those that are used
+ // to overload the same tree for two different concepts:
+ // - MUTABLE that is used to override ValDef for vars
+ // - TRAIT that is used to override ClassDef for traits
+ val nonoverloadedExplicitFlags = ExplicitFlags & ~MUTABLE & ~TRAIT
+
+ def ensureNoExplicitFlags(m: Modifiers, pos: Position) = {
+ // Traits automatically have ABSTRACT flag assigned to
+ // them so in that case it's not an explicit flag
+ val flags = if (m.isTrait) m.flags & ~ABSTRACT else m.flags
+ if ((flags & nonoverloadedExplicitFlags) != 0L)
+ c.abort(pos, s"Can't $action modifiers together with flags, consider merging flags into modifiers")
+ }
override def mirrorSelect(name: String): Tree =
Select(universe, TermName(name))
@@ -167,19 +226,10 @@ trait Reifiers { self: Quasiquotes =>
def isReifyingExpressions = true
override def reifyTreeSyntactically(tree: Tree): Tree = tree match {
- case Block(stats, p @ Placeholder(_, _, _)) => reifyBuildCall(nme.Block, stats :+ p)
- case Apply(f, List(Placeholder(argss, _, DotDotDot))) => reifyCallWithArgss(f, argss)
- case RefTree(qual, SymbolPlaceholder(tree)) => mirrorBuildCall(nme.RefTree, reify(qual), tree)
- case _ => super.reifyTreeSyntactically(tree)
- }
-
- def reifyCallWithArgss(f: Tree, argss: Tree) = {
- val f1 = reifyTree(f)
- val foldLeftF1 = Apply(TypeApply(Select(argss, nme.foldLeft), List(Select(u, tpnme.Tree))), List(f1))
- val uDotApply = Function(
- List(gen.mkSyntheticParam(nme.x_1), gen.mkSyntheticParam(nme.x_2)),
- Apply(Select(u, nme.Apply), List(Ident(nme.x_1), Ident(nme.x_2))))
- Apply(foldLeftF1, List(uDotApply))
+ case RefTree(qual, SymbolPlaceholder(tree)) =>
+ mirrorBuildCall(nme.RefTree, reify(qual), tree)
+ case _ =>
+ super.reifyTreeSyntactically(tree)
}
override def reifyMultiCardinalityList[T](xs: List[T])(fill: PartialFunction[T, Tree])(fallback: T => Tree): Tree = xs match {
@@ -193,61 +243,47 @@ trait Reifiers { self: Quasiquotes =>
tail.foldLeft[Tree](reifyGroup(head)) { (tree, lst) => Apply(Select(tree, nme.PLUSPLUS), List(reifyGroup(lst))) }
}
- override def reifyAnnotList(annots: List[Tree]): Tree = reifyMultiCardinalityList(annots) {
- case AnnotPlaceholder(tree, _, DotDot, args) =>
- val x: TermName = c.freshName()
- val xToAnnotationCtor = Function(
- List(ValDef(Modifiers(PARAM), x, TypeTree(), EmptyTree)),
- mirrorBuildCall(nme.mkAnnotationCtor, Ident(x), reify(args)))
- Apply(Select(tree, nme.map), List(xToAnnotationCtor))
- } {
- case AnnotPlaceholder(tree, _: TreeLocation, _, args) =>
- mirrorBuildCall(nme.mkAnnotationCtor, tree, reify(args))
- case other => reify(other)
- }
-
- override def reifyModifiers(m: Modifiers) = {
- val (modsPlaceholders, annots) = m.annotations.partition {
- case ModsPlaceholder(_, _, _) => true
- case _ => false
- }
- val (mods, flags) = modsPlaceholders.map {
- case ModsPlaceholder(tree, location, card) => (tree, location)
- }.partition { case (tree, location) =>
- location match {
- case ModsLocation => true
- case FlagsLocation => false
- case _ => c.abort(tree.pos, s"$flagsType or $modsType expected but ${tree.tpe} found")
+ override def reifyModifiers(m: Modifiers) =
+ if (m == NoMods) super.reifyModifiers(m)
+ else {
+ val (modsPlaceholders, annots) = m.annotations.partition {
+ case ModsPlaceholder(_, _, _) => true
+ case _ => false
+ }
+ val (mods, flags) = modsPlaceholders.map {
+ case ModsPlaceholder(tree, location, card) => (tree, location)
+ }.partition { case (tree, location) =>
+ location match {
+ case ModsLocation => true
+ case FlagsLocation => false
+ case _ => c.abort(tree.pos, s"$flagsType or $modsType expected but ${tree.tpe} found")
+ }
+ }
+ mods match {
+ case (tree, _) :: Nil =>
+ if (flags.nonEmpty) c.abort(flags(0)._1.pos, "Can't splice flags together with modifiers, consider merging flags into modifiers")
+ if (annots.nonEmpty) c.abort(tree.pos, "Can't splice modifiers together with annotations, consider merging annotations into modifiers")
+ ensureNoExplicitFlags(m, tree.pos)
+ tree
+ case _ :: (second, _) :: Nil =>
+ c.abort(second.pos, "Can't splice multiple modifiers, consider merging them into a single modifiers instance")
+ case _ =>
+ val baseFlags = reifyFlags(m.flags)
+ val reifiedFlags = flags.foldLeft[Tree](baseFlags) { case (flag, (tree, _)) => Apply(Select(flag, nme.OR), List(tree)) }
+ mirrorFactoryCall(nme.Modifiers, reifiedFlags, reify(m.privateWithin), reifyAnnotList(annots))
}
}
- mods match {
- case (tree, _) :: Nil =>
- if (flags.nonEmpty) c.abort(flags(0)._1.pos, "Can't splice flags together with modifiers, consider merging flags into modifiers")
- if (annots.nonEmpty) c.abort(tree.pos, "Can't splice modifiers together with annotations, consider merging annotations into modifiers")
- ensureNoExplicitFlags(m, tree.pos)
- tree
- case _ :: (second, _) :: Nil =>
- c.abort(second.pos, "Can't splice multiple modifiers, consider merging them into a single modifiers instance")
- case _ =>
- val baseFlags = reifyBuildCall(nme.flagsFromBits, m.flags)
- val reifiedFlags = flags.foldLeft[Tree](baseFlags) { case (flag, (tree, _)) => Apply(Select(flag, nme.OR), List(tree)) }
- mirrorFactoryCall(nme.Modifiers, reifiedFlags, reify(m.privateWithin), reifyAnnotList(annots))
- }
- }
+
+ override def reifyRefineStat(tree: Tree) = mirrorBuildCall(nme.mkRefineStat, tree)
+
+ override def reifyEarlyDef(tree: Tree) = mirrorBuildCall(nme.mkEarlyDef, tree)
+
+ override def reifyAnnotation(tree: Tree) = mirrorBuildCall(nme.mkAnnotation, tree)
}
class UnapplyReifier extends Reifier {
def isReifyingExpressions = false
- override def reifyTreeSyntactically(tree: Tree): Tree = tree match {
- case treeInfo.Applied(fun, Nil, argss) if fun != tree && !tree.isInstanceOf[AppliedTypeTree] =>
- reifyBuildCall(nme.Applied, fun, argss)
- case treeInfo.Applied(fun, targs, argss) if fun != tree & !tree.isInstanceOf[AppliedTypeTree] =>
- mirrorBuildCall(nme.Applied, reifyBuildCall(nme.TypeApplied, fun, targs), reifyList(argss))
- case _ =>
- super.reifyTreeSyntactically(tree)
- }
-
override def scalaFactoryCall(name: String, args: Tree*): Tree =
call("scala." + name, args: _*)
@@ -261,30 +297,20 @@ trait Reifiers { self: Quasiquotes =>
mkList(xs.map(fallback))
}
- override def reifyAnnotList(annots: List[Tree]): Tree = reifyMultiCardinalityList(annots) {
- case AnnotPlaceholder(tree, _, DotDot, Nil) => tree
- } {
- case AnnotPlaceholder(tree, _, NoDot, Nil) => tree
- case AnnotPlaceholder(tree, _, NoDot, args) =>
- val selectCONSTRUCTOR = Apply(Select(u, nme.Select), List(Apply(Select(u, nme.New), List(tree)), Select(Select(u, nme.nmeNme), nme.nmeCONSTRUCTOR)))
- Apply(Select(u, nme.Apply), List(selectCONSTRUCTOR, reify(args)))
- case other =>
- reify(other)
- }
-
- override def reifyModifiers(m: Modifiers) = {
- val mods = m.annotations.collect { case ModsPlaceholder(tree, _, _) => tree }
- mods match {
- case tree :: Nil =>
- if (m.annotations.length != 1) c.abort(tree.pos, "Can't extract modifiers together with annotations, consider extracting just modifiers")
- ensureNoExplicitFlags(m, tree.pos)
- tree
- case _ :: second :: rest =>
- c.abort(second.pos, "Can't extract multiple modifiers together, consider extracting a single modifiers instance")
- case Nil =>
- mirrorFactoryCall(nme.Modifiers, reifyBuildCall(nme.FlagsAsBits, m.flags),
- reify(m.privateWithin), reifyAnnotList(m.annotations))
+ override def reifyModifiers(m: Modifiers) =
+ if (m == NoMods) super.reifyModifiers(m)
+ else {
+ val mods = m.annotations.collect { case ModsPlaceholder(tree, _, _) => tree }
+ mods match {
+ case tree :: Nil =>
+ if (m.annotations.length != 1) c.abort(tree.pos, "Can't extract modifiers together with annotations, consider extracting just modifiers")
+ ensureNoExplicitFlags(m, tree.pos)
+ tree
+ case _ :: second :: rest =>
+ c.abort(second.pos, "Can't extract multiple modifiers together, consider extracting a single modifiers instance")
+ case Nil =>
+ mirrorFactoryCall(nme.Modifiers, reifyFlags(m.flags), reify(m.privateWithin), reifyAnnotList(m.annotations))
+ }
}
- }
}
-} \ No newline at end of file
+}
diff --git a/src/continuations/plugin/scala/tools/selectivecps/SelectiveCPSTransform.scala b/src/continuations/plugin/scala/tools/selectivecps/SelectiveCPSTransform.scala
index 908ffb3713..a2c2ebc3e3 100644
--- a/src/continuations/plugin/scala/tools/selectivecps/SelectiveCPSTransform.scala
+++ b/src/continuations/plugin/scala/tools/selectivecps/SelectiveCPSTransform.scala
@@ -330,8 +330,6 @@ abstract class SelectiveCPSTransform extends PluginComponent with
}
}
- def mkBlock(stms: List[Tree], expr: Tree) = if (stms.nonEmpty) Block(stms, expr) else expr
-
try {
if (specialCaseTrivial) {
debuglog("will optimize possible tail call: " + bodyExpr)
@@ -350,9 +348,9 @@ abstract class SelectiveCPSTransform extends PluginComponent with
val argSym = currentOwner.newValue(vd.symbol.name.toTermName).setInfo(tpe)
val argDef = localTyper.typed(ValDef(argSym, Select(ctxRef, ctxRef.tpe.member(cpsNames.getTrivialValue))))
val switchExpr = localTyper.typedPos(vd.symbol.pos) {
- val body2 = mkBlock(bodyStms, bodyExpr).duplicate // dup before typing!
+ val body2 = gen.mkBlock(bodyStms :+ bodyExpr).duplicate // dup before typing!
If(Select(ctxRef, ctxSym.tpe.member(cpsNames.isTrivial)),
- applyTrivial(argSym, mkBlock(argDef::bodyStms, bodyExpr)),
+ applyTrivial(argSym, gen.mkBlock((argDef :: bodyStms) :+ bodyExpr)),
applyCombinatorFun(ctxRef, body2))
}
(List(ctxDef), switchExpr)
@@ -360,7 +358,7 @@ abstract class SelectiveCPSTransform extends PluginComponent with
// ctx.flatMap { <lhs> => ... }
// or
// ctx.map { <lhs> => ... }
- (Nil, applyCombinatorFun(rhs1, mkBlock(bodyStms, bodyExpr)))
+ (Nil, applyCombinatorFun(rhs1, gen.mkBlock(bodyStms :+ bodyExpr)))
}
} catch {
case ex:TypeError =>
diff --git a/src/interactive/scala/tools/nsc/interactive/Global.scala b/src/interactive/scala/tools/nsc/interactive/Global.scala
index 492f0f4fb4..bc6df9eb25 100644
--- a/src/interactive/scala/tools/nsc/interactive/Global.scala
+++ b/src/interactive/scala/tools/nsc/interactive/Global.scala
@@ -32,7 +32,6 @@ trait InteractiveAnalyzer extends Analyzer {
override def newTyper(context: Context): InteractiveTyper = new Typer(context) with InteractiveTyper
override def newNamer(context: Context): InteractiveNamer = new Namer(context) with InteractiveNamer
- override protected def newPatternMatching = false
trait InteractiveTyper extends Typer {
override def canAdaptConstantTypeToLiteral = false
@@ -140,6 +139,7 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "")
abort("originalOwner is not kept in presentation compiler runs.")
override def forInteractive = true
+ override protected def synchronizeNames = true
override def newAsSeenFromMap(pre: Type, clazz: Symbol): AsSeenFromMap =
new InteractiveAsSeenFromMap(pre, clazz)
diff --git a/src/library/scala/collection/immutable/Map.scala b/src/library/scala/collection/immutable/Map.scala
index a3f5c85961..72a7f2a030 100644
--- a/src/library/scala/collection/immutable/Map.scala
+++ b/src/library/scala/collection/immutable/Map.scala
@@ -52,7 +52,7 @@ trait Map[A, +B] extends Iterable[(A, B)]
*
* Invoking transformer methods (e.g. `map`) will not preserve the default value.
*
- * @param d the function mapping keys to values, used for non-present keys
+ * @param d default value used for non-present keys
* @return a wrapper of the map with a default value
*/
def withDefaultValue[B1 >: B](d: B1): immutable.Map[A, B1] = new Map.WithDefault[A, B1](this, x => d)
diff --git a/src/library/scala/collection/mutable/Map.scala b/src/library/scala/collection/mutable/Map.scala
index 7280aaec25..01f6f725ab 100644
--- a/src/library/scala/collection/mutable/Map.scala
+++ b/src/library/scala/collection/mutable/Map.scala
@@ -43,7 +43,7 @@ trait Map[A, B]
*
* Invoking transformer methods (e.g. `map`) will not preserve the default value.
*
- * @param d the function mapping keys to values, used for non-present keys
+ * @param d default value used for non-present keys
* @return a wrapper of the map with a default value
*/
def withDefaultValue(d: B): mutable.Map[A, B] = new Map.WithDefault[A, B](this, x => d)
diff --git a/src/library/scala/collection/parallel/immutable/ParMap.scala b/src/library/scala/collection/parallel/immutable/ParMap.scala
index 4d99006d0f..2956c2a883 100644
--- a/src/library/scala/collection/parallel/immutable/ParMap.scala
+++ b/src/library/scala/collection/parallel/immutable/ParMap.scala
@@ -63,7 +63,7 @@ self =>
*
* Invoking transformer methods (e.g. `map`) will not preserve the default value.
*
- * @param d the function mapping keys to values, used for non-present keys
+ * @param d default value used for non-present keys
* @return a wrapper of the map with a default value
*/
def withDefaultValue[U >: V](d: U): scala.collection.parallel.immutable.ParMap[K, U] = new ParMap.WithDefault[K, U](this, x => d)
diff --git a/src/library/scala/collection/parallel/mutable/ParMap.scala b/src/library/scala/collection/parallel/mutable/ParMap.scala
index e43e72e559..8110f9dc0a 100644
--- a/src/library/scala/collection/parallel/mutable/ParMap.scala
+++ b/src/library/scala/collection/parallel/mutable/ParMap.scala
@@ -55,7 +55,7 @@ extends GenMap[K, V]
*
* Invoking transformer methods (e.g. `map`) will not preserve the default value.
*
- * @param d the function mapping keys to values, used for non-present keys
+ * @param d default value used for non-present keys
* @return a wrapper of the map with a default value
*/
def withDefaultValue(d: V): scala.collection.parallel.mutable.ParMap[K, V] = new ParMap.WithDefault[K, V](this, x => d)
diff --git a/src/library/scala/util/control/TailCalls.scala b/src/library/scala/util/control/TailCalls.scala
index ba3044c718..c3e7d98073 100644
--- a/src/library/scala/util/control/TailCalls.scala
+++ b/src/library/scala/util/control/TailCalls.scala
@@ -13,7 +13,11 @@ package util.control
* Tail calling methods have to return their result using `done` or call the
* next method using `tailcall`. Both return a `TailRec` object. The result
* of evaluating a tailcalling function can be retrieved from a `Tailrec`
- * value using method `result`. Here's a usage example:
+ * value using method `result`.
+ * Implemented as described in "Stackless Scala with Free Monads"
+ * http://blog.higher-order.com/assets/trampolines.pdf
+ *
+ * Here's a usage example:
* {{{
* import scala.util.control.TailCalls._
*
@@ -24,6 +28,14 @@ package util.control
* if (xs.isEmpty) done(false) else tailcall(isEven(xs.tail))
*
* isEven((1 to 100000).toList).result
+ *
+ * def fib(n: Int): TailRec[Int] =
+ * if (n < 2) done(n) else for {
+ * x <- tailcall(fib(n - 1))
+ * y <- tailcall(fib(n - 2))
+ * } yield (x + y)
+ *
+ * fib(40).result
* }}}
*/
object TailCalls {
@@ -31,14 +43,43 @@ object TailCalls {
/** This class represents a tailcalling computation
*/
abstract class TailRec[+A] {
+
+ /** Continue the computation with `f`. */
+ final def map[B](f: A => B): TailRec[B] =
+ flatMap(a => Call(() => Done(f(a))))
+
+ /** Continue the computation with `f` and merge the trampolining
+ * of this computation with that of `f`. */
+ final def flatMap[B](f: A => TailRec[B]): TailRec[B] =
+ this match {
+ case Done(a) => Call(() => f(a))
+ case c@Call(_) => Cont(c, f)
+ // Take advantage of the monad associative law to optimize the size of the required stack
+ case Cont(s, g) => Cont(s, (x:Any) => g(x).flatMap(f))
+ }
+
+ /** Returns either the next step of the tailcalling computation,
+ * or the result if there are no more steps. */
+ @annotation.tailrec final def resume: Either[() => TailRec[A], A] = this match {
+ case Done(a) => Right(a)
+ case Call(k) => Left(k)
+ case Cont(a, f) => a match {
+ case Done(v) => f(v).resume
+ case Call(k) => Left(() => k().flatMap(f))
+ case Cont(b, g) => b.flatMap(x => g(x) flatMap f).resume
+ }
+ }
+
/** Returns the result of the tailcalling computation.
*/
- def result: A = {
- def loop(body: TailRec[A]): A = body match {
- case Call(rest) => loop(rest())
- case Done(result) => result
+ @annotation.tailrec final def result: A = this match {
+ case Done(a) => a
+ case Call(t) => t().result
+ case Cont(a, f) => a match {
+ case Done(v) => f(v).result
+ case Call(t) => t().flatMap(f).result
+ case Cont(b, g) => b.flatMap(x => g(x) flatMap f).result
}
- loop(this)
}
}
@@ -47,19 +88,23 @@ object TailCalls {
/** Internal class representing the final result returned from a tailcalling
* computation */
- protected case class Done[A](override val result: A) extends TailRec[A]
+ protected case class Done[A](value: A) extends TailRec[A]
+
+ /** Internal class representing a continuation with function A => TailRec[B].
+ * It is needed for the flatMap to be implemented. */
+ protected case class Cont[A, B](a: TailRec[A], f: A => TailRec[B]) extends TailRec[B]
/** Performs a tailcall
* @param rest the expression to be evaluated in the tailcall
* @return a `TailRec` object representing the expression `rest`
*/
- def tailcall[A](rest: => TailRec[A]): TailRec[A] = new Call(() => rest)
+ def tailcall[A](rest: => TailRec[A]): TailRec[A] = Call(() => rest)
/** Used to return final result from tailcalling computation
* @param `result` the result value
* @return a `TailRec` object representing a computation which immediately
* returns `result`
*/
- def done[A](result: A): TailRec[A] = new Done(result)
+ def done[A](result: A): TailRec[A] = Done(result)
}
diff --git a/src/parser-combinators/scala/util/parsing/combinator/ImplicitConversions.scala b/src/parser-combinators/scala/util/parsing/combinator/ImplicitConversions.scala
deleted file mode 100644
index 0683ea927d..0000000000
--- a/src/parser-combinators/scala/util/parsing/combinator/ImplicitConversions.scala
+++ /dev/null
@@ -1,43 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-package scala
-package util.parsing.combinator
-
-import scala.language.implicitConversions
-
-/** This object contains implicit conversions that come in handy when using the `^^` combinator.
- *
- * Refer to [[scala.util.parsing.combinator.Parsers]] to construct an AST from the concrete syntax.
- *
- * The reason for this is that the sequential composition combinator (`~`) combines its constituents
- * into a ~. When several `~`s are combined, this results in nested `~`s (to the left).
- * The `flatten*` coercions makes it easy to apply an `n`-argument function to a nested `~` of
- * depth `n-1`
- *
- * The `headOptionTailToFunList` converts a function that takes a `List[A]` to a function that
- * accepts a `~[A, Option[List[A]]]` (this happens when parsing something of the following
- * shape: `p ~ opt("." ~ repsep(p, "."))` -- where `p` is a parser that yields an `A`).
- *
- * @author Martin Odersky
- * @author Iulian Dragos
- * @author Adriaan Moors
- */
-trait ImplicitConversions { self: Parsers =>
- implicit def flatten2[A, B, C] (f: (A, B) => C) =
- (p: ~[A, B]) => p match {case a ~ b => f(a, b)}
- implicit def flatten3[A, B, C, D] (f: (A, B, C) => D) =
- (p: ~[~[A, B], C]) => p match {case a ~ b ~ c => f(a, b, c)}
- implicit def flatten4[A, B, C, D, E] (f: (A, B, C, D) => E) =
- (p: ~[~[~[A, B], C], D]) => p match {case a ~ b ~ c ~ d => f(a, b, c, d)}
- implicit def flatten5[A, B, C, D, E, F](f: (A, B, C, D, E) => F) =
- (p: ~[~[~[~[A, B], C], D], E]) => p match {case a ~ b ~ c ~ d ~ e=> f(a, b, c, d, e)}
- implicit def headOptionTailToFunList[A, T] (f: List[A] => T)=
- (p: ~[A, Option[List[A]]]) => f(p._1 :: (p._2 match { case Some(xs) => xs case None => Nil}))
-}
diff --git a/src/parser-combinators/scala/util/parsing/combinator/JavaTokenParsers.scala b/src/parser-combinators/scala/util/parsing/combinator/JavaTokenParsers.scala
deleted file mode 100644
index 01288a182e..0000000000
--- a/src/parser-combinators/scala/util/parsing/combinator/JavaTokenParsers.scala
+++ /dev/null
@@ -1,62 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-package scala
-package util.parsing.combinator
-
-import scala.annotation.migration
-
-/** `JavaTokenParsers` differs from [[scala.util.parsing.combinator.RegexParsers]]
- * by adding the following definitions:
- *
- * - `ident`
- * - `wholeNumber`
- * - `decimalNumber`
- * - `stringLiteral`
- * - `floatingPointNumber`
- */
-trait JavaTokenParsers extends RegexParsers {
- /** Anything that is a valid Java identifier, according to
- * <a href="http://docs.oracle.com/javase/specs/jls/se7/html/jls-3.html#jls-3.8">The Java Language Spec</a>.
- * Generally, this means a letter, followed by zero or more letters or numbers.
- */
- def ident: Parser[String] =
- """\p{javaJavaIdentifierStart}\p{javaJavaIdentifierPart}*""".r
- /** An integer, without sign or with a negative sign. */
- def wholeNumber: Parser[String] =
- """-?\d+""".r
- /** Number following one of these rules:
- *
- * - An integer. For example: `13`
- * - An integer followed by a decimal point. For example: `3.`
- * - An integer followed by a decimal point and fractional part. For example: `3.14`
- * - A decimal point followed by a fractional part. For example: `.1`
- */
- def decimalNumber: Parser[String] =
- """(\d+(\.\d*)?|\d*\.\d+)""".r
- /** Double quotes (`"`) enclosing a sequence of:
- *
- * - Any character except double quotes, control characters or backslash (`\`)
- * - A backslash followed by another backslash, a single or double quote, or one
- * of the letters `b`, `f`, `n`, `r` or `t`
- * - `\` followed by `u` followed by four hexadecimal digits
- */
- @migration("`stringLiteral` allows escaping single and double quotes, but not forward slashes any longer.", "2.10.0")
- def stringLiteral: Parser[String] =
- ("\""+"""([^"\p{Cntrl}\\]|\\[\\'"bfnrt]|\\u[a-fA-F0-9]{4})*"""+"\"").r
- /** A number following the rules of `decimalNumber`, with the following
- * optional additions:
- *
- * - Preceded by a negative sign
- * - Followed by `e` or `E` and an optionally signed integer
- * - Followed by `f`, `f`, `d` or `D` (after the above rule, if both are used)
- */
- def floatingPointNumber: Parser[String] =
- """-?(\d+(\.\d*)?|\d*\.\d+)([eE][+-]?\d+)?[fFdD]?""".r
-}
diff --git a/src/parser-combinators/scala/util/parsing/combinator/PackratParsers.scala b/src/parser-combinators/scala/util/parsing/combinator/PackratParsers.scala
deleted file mode 100644
index a11dd18e62..0000000000
--- a/src/parser-combinators/scala/util/parsing/combinator/PackratParsers.scala
+++ /dev/null
@@ -1,312 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala
-package util.parsing.combinator
-
-import scala.util.parsing.input.{ Reader, Position }
-import scala.collection.mutable
-import scala.language.implicitConversions
-
-/**
- * `PackratParsers` is a component that extends the parser combinators
- * provided by [[scala.util.parsing.combinator.Parsers]] with a memoization
- * facility (''Packrat Parsing'').
- *
- * Packrat Parsing is a technique for implementing backtracking,
- * recursive-descent parsers, with the advantage that it guarantees
- * unlimited lookahead and a linear parse time. Using this technique,
- * left recursive grammars can also be accepted.
- *
- * Using `PackratParsers` is very similar to using `Parsers`:
- * - any class/trait that extends `Parsers` (directly or through a subclass)
- * can mix in `PackratParsers`.
- * Example: `'''object''' MyGrammar '''extends''' StandardTokenParsers '''with''' PackratParsers`
- * - each grammar production previously declared as a `def` without formal
- * parameters becomes a `lazy val`, and its type is changed from
- * `Parser[Elem]` to `PackratParser[Elem]`.
- * So, for example, `'''def''' production: Parser[Int] = {...}`
- * becomes `'''lazy val''' production: PackratParser[Int] = {...}`
- * - Important: using `PackratParser`s is not an ''all or nothing'' decision.
- * They can be free mixed with regular `Parser`s in a single grammar.
- *
- * Cached parse results are attached to the ''input'', not the grammar.
- * Therefore, `PackratsParser`s require a `PackratReader` as input, which
- * adds memoization to an underlying `Reader`. Programmers can create
- * `PackratReader` objects either manually, as in
- * `production('''new''' PackratReader('''new''' lexical.Scanner("input")))`,
- * but the common way should be to rely on the combinator `phrase` to wrap
- * a given input with a `PackratReader` if the input is not one itself.
- *
- * @see Bryan Ford: "Packrat Parsing: Simple, Powerful, Lazy, Linear Time." ICFP'02
- * @see Alessandro Warth, James R. Douglass, Todd Millstein: "Packrat Parsers Can Support Left Recursion." PEPM'08
- *
- * @since 2.8
- * @author Manohar Jonnalagedda
- * @author Tiark Rompf
- */
-
-trait PackratParsers extends Parsers {
-
- //type Input = PackratReader[Elem]
-
- /**
- * A specialized `Reader` class that wraps an underlying `Reader`
- * and provides memoization of parse results.
- */
- class PackratReader[+T](underlying: Reader[T]) extends Reader[T] { outer =>
-
- /*
- * caching of intermediate parse results and information about recursion
- */
- private[PackratParsers] val cache = mutable.HashMap.empty[(Parser[_], Position), MemoEntry[_]]
-
- private[PackratParsers] def getFromCache[T](p: Parser[T]): Option[MemoEntry[T]] = {
- cache.get((p, pos)).asInstanceOf[Option[MemoEntry[T]]]
- }
-
- private[PackratParsers] def updateCacheAndGet[T](p: Parser[T], w: MemoEntry[T]): MemoEntry[T] = {
- cache.put((p, pos),w)
- w
- }
-
- /* a cache for storing parser heads: allows to know which parser is involved
- in a recursion*/
- private[PackratParsers] val recursionHeads: mutable.HashMap[Position, Head] = mutable.HashMap.empty
-
- //a stack that keeps a list of all involved rules
- private[PackratParsers] var lrStack: List[LR] = Nil
-
- override def source: java.lang.CharSequence = underlying.source
- override def offset: Int = underlying.offset
-
- def first: T = underlying.first
- def rest: Reader[T] = new PackratReader(underlying.rest) {
- override private[PackratParsers] val cache = outer.cache
- override private[PackratParsers] val recursionHeads = outer.recursionHeads
- lrStack = outer.lrStack
- }
-
- def pos: Position = underlying.pos
- def atEnd: Boolean = underlying.atEnd
- }
-
- /**
- * A parser generator delimiting whole phrases (i.e. programs).
- *
- * Overridden to make sure any input passed to the argument parser
- * is wrapped in a `PackratReader`.
- */
- override def phrase[T](p: Parser[T]) = {
- val q = super.phrase(p)
- new PackratParser[T] {
- def apply(in: Input) = in match {
- case in: PackratReader[_] => q(in)
- case in => q(new PackratReader(in))
- }
- }
- }
-
- private def getPosFromResult(r: ParseResult[_]): Position = r.next.pos
-
- // auxiliary data structures
-
- private case class MemoEntry[+T](var r: Either[LR,ParseResult[_]]){
- def getResult: ParseResult[T] = r match {
- case Left(LR(res,_,_)) => res.asInstanceOf[ParseResult[T]]
- case Right(res) => res.asInstanceOf[ParseResult[T]]
- }
- }
-
- private case class LR(var seed: ParseResult[_], var rule: Parser[_], var head: Option[Head]){
- def getPos: Position = getPosFromResult(seed)
- }
-
- private case class Head(var headParser: Parser[_], var involvedSet: List[Parser[_]], var evalSet: List[Parser[_]]){
- def getHead = headParser
- }
-
- /**
- * The root class of packrat parsers.
- */
- abstract class PackratParser[+T] extends super.Parser[T]
-
- /**
- * Implicitly convert a parser to a packrat parser.
- * The conversion is triggered by giving the appropriate target type:
- * {{{
- * val myParser: PackratParser[MyResult] = aParser
- * }}} */
- implicit def parser2packrat[T](p: => super.Parser[T]): PackratParser[T] = {
- lazy val q = p
- memo(super.Parser {in => q(in)})
- }
-
- /*
- * An unspecified function that is called when a packrat reader is applied.
- * It verifies whether we are in the process of growing a parse or not.
- * In the former case, it makes sure that rules involved in the recursion are evaluated.
- * It also prevents non-involved rules from getting evaluated further
- */
- private def recall(p: super.Parser[_], in: PackratReader[Elem]): Option[MemoEntry[_]] = {
- val cached = in.getFromCache(p)
- val head = in.recursionHeads.get(in.pos)
-
- head match {
- case None => /*no heads*/ cached
- case Some(h@Head(hp, involved, evalSet)) => {
- //heads found
- if(cached == None && !(hp::involved contains p)) {
- //Nothing in the cache, and p is not involved
- return Some(MemoEntry(Right(Failure("dummy ",in))))
- }
- if(evalSet contains p){
- //something in cache, and p is in the evalSet
- //remove the rule from the evalSet of the Head
- h.evalSet = h.evalSet.filterNot(_==p)
- val tempRes = p(in)
- //we know that cached has an entry here
- val tempEntry: MemoEntry[_] = cached.get // match {case Some(x: MemoEntry[_]) => x}
- //cache is modified
- tempEntry.r = Right(tempRes)
- }
- cached
- }
- }
- }
-
- /*
- * setting up the left-recursion. We have the LR for the rule head
- * we modify the involvedSets of all LRs in the stack, till we see
- * the current parser again
- */
- private def setupLR(p: Parser[_], in: PackratReader[_], recDetect: LR): Unit = {
- if(recDetect.head == None) recDetect.head = Some(Head(p, Nil, Nil))
-
- in.lrStack.takeWhile(_.rule != p).foreach {x =>
- x.head = recDetect.head
- recDetect.head.map(h => h.involvedSet = x.rule::h.involvedSet)
- }
- }
-
- /*
- * growing, if needed the recursion
- * check whether the parser we are growing is the head of the rule.
- * Not => no grow
- */
-
- /*
- * Once the result of the recall function is known, if it is nil, then we need to store a dummy
-failure into the cache (much like in the previous listings) and compute the future parse. If it
-is not, however, this means we have detected a recursion, and we use the setupLR function
-to update each parser involved in the recursion.
- */
-
- private def lrAnswer[T](p: Parser[T], in: PackratReader[Elem], growable: LR): ParseResult[T] = growable match {
- //growable will always be having a head, we can't enter lrAnswer otherwise
- case LR(seed ,rule, Some(head)) =>
- if(head.getHead != p) /*not head rule, so not growing*/ seed.asInstanceOf[ParseResult[T]]
- else {
- in.updateCacheAndGet(p, MemoEntry(Right[LR, ParseResult[T]](seed.asInstanceOf[ParseResult[T]])))
- seed match {
- case f@Failure(_,_) => f
- case e@Error(_,_) => e
- case s@Success(_,_) => /*growing*/ grow(p, in, head)
- }
- }
- case _=> throw new Exception("lrAnswer with no head !!")
- }
-
- //p here should be strict (cannot be non-strict) !!
- //failing left-recursive grammars: This is done by simply storing a failure if nothing is found
-
- /**
- * Explicitly convert a given parser to a memoizing packrat parser.
- * In most cases, client code should avoid calling `memo` directly
- * and rely on implicit conversion instead.
- */
- def memo[T](p: super.Parser[T]): PackratParser[T] = {
- new PackratParser[T] {
- def apply(in: Input) = {
- /*
- * transformed reader
- */
- val inMem = in.asInstanceOf[PackratReader[Elem]]
-
- //look in the global cache if in a recursion
- val m = recall(p, inMem)
- m match {
- //nothing has been done due to recall
- case None =>
- val base = LR(Failure("Base Failure",in), p, None)
- inMem.lrStack = base::inMem.lrStack
- //cache base result
- inMem.updateCacheAndGet(p,MemoEntry(Left(base)))
- //parse the input
- val tempRes = p(in)
- //the base variable has passed equality tests with the cache
- inMem.lrStack = inMem.lrStack.tail
- //check whether base has changed, if yes, we will have a head
- base.head match {
- case None =>
- /*simple result*/
- inMem.updateCacheAndGet(p,MemoEntry(Right(tempRes)))
- tempRes
- case s@Some(_) =>
- /*non simple result*/
- base.seed = tempRes
- //the base variable has passed equality tests with the cache
- val res = lrAnswer(p, inMem, base)
- res
- }
-
- case Some(mEntry) => {
- //entry found in cache
- mEntry match {
- case MemoEntry(Left(recDetect)) => {
- setupLR(p, inMem, recDetect)
- //all setupLR does is change the heads of the recursions, so the seed will stay the same
- recDetect match {case LR(seed, _, _) => seed.asInstanceOf[ParseResult[T]]}
- }
- case MemoEntry(Right(res: ParseResult[_])) => res.asInstanceOf[ParseResult[T]]
- }
- }
- }
- }
- }
- }
-
- private def grow[T](p: super.Parser[T], rest: PackratReader[Elem], head: Head): ParseResult[T] = {
- //store the head into the recursionHeads
- rest.recursionHeads.put(rest.pos, head /*match {case Head(hp,involved,_) => Head(hp,involved,involved)}*/)
- val oldRes: ParseResult[T] = rest.getFromCache(p).get match {
- case MemoEntry(Right(x)) => x.asInstanceOf[ParseResult[T]]
- case _ => throw new Exception("impossible match")
- }
-
- //resetting the evalSet of the head of the recursion at each beginning of growth
- head.evalSet = head.involvedSet
- val tempRes = p(rest); tempRes match {
- case s@Success(_,_) =>
- if(getPosFromResult(oldRes) < getPosFromResult(tempRes)) {
- rest.updateCacheAndGet(p, MemoEntry(Right(s)))
- grow(p, rest, head)
- } else {
- //we're done with growing, we can remove data from recursion head
- rest.recursionHeads -= rest.pos
- rest.getFromCache(p).get match {
- case MemoEntry(Right(x: ParseResult[_])) => x.asInstanceOf[ParseResult[T]]
- case _ => throw new Exception("impossible match")
- }
- }
- case f =>
- rest.recursionHeads -= rest.pos
- /*rest.updateCacheAndGet(p, MemoEntry(Right(f)));*/oldRes
- }
- }
-}
diff --git a/src/parser-combinators/scala/util/parsing/combinator/Parsers.scala b/src/parser-combinators/scala/util/parsing/combinator/Parsers.scala
deleted file mode 100644
index 16754646fd..0000000000
--- a/src/parser-combinators/scala/util/parsing/combinator/Parsers.scala
+++ /dev/null
@@ -1,919 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala
-package util.parsing.combinator
-
-import scala.util.parsing.input._
-import scala.collection.mutable.ListBuffer
-import scala.annotation.tailrec
-import scala.annotation.migration
-import scala.language.implicitConversions
-import scala.util.DynamicVariable
-
-// TODO: better error handling (labelling like parsec's <?>)
-
-/** `Parsers` is a component that ''provides'' generic parser combinators.
- *
- * There are two abstract members that must be defined in order to
- * produce parsers: the type `Elem` and
- * [[scala.util.parsing.combinator.Parsers.Parser]]. There are helper
- * methods that produce concrete `Parser` implementations -- see ''primitive
- * parser'' below.
- *
- * A `Parsers` may define multiple `Parser` instances, which are combined
- * to produced the desired parser.
- *
- * The type of the elements these parsers should parse must be defined
- * by declaring `Elem`
- * (each parser is polymorphic in the type of result it produces).
- *
- * There are two aspects to the result of a parser:
- * 1. success or failure
- * 1. the result.
- *
- * A [[scala.util.parsing.combinator.Parsers.Parser]] produces both kinds of information,
- * by returning a [[scala.util.parsing.combinator.Parsers.ParseResult]] when its `apply`
- * method is called on an input.
- *
- * The term ''parser combinator'' refers to the fact that these parsers
- * are constructed from primitive parsers and composition operators, such
- * as sequencing, alternation, optionality, repetition, lifting, and so on. For example,
- * given `p1` and `p2` of type [[scala.util.parsing.combinator.Parsers.Parser]]:
- *
- * {{{
- * p1 ~ p2 // sequencing: must match p1 followed by p2
- * p1 | p2 // alternation: must match either p1 or p2, with preference given to p1
- * p1.? // optionality: may match p1 or not
- * p1.* // repetition: matches any number of repetitions of p1
- * }}}
- *
- * These combinators are provided as methods on [[scala.util.parsing.combinator.Parsers.Parser]],
- * or as methods taking one or more `Parsers` and returning a `Parser` provided in
- * this class.
- *
- * A ''primitive parser'' is a parser that accepts or rejects a single
- * piece of input, based on a certain criterion, such as whether the
- * input...
- * - is equal to some given object (see method `accept`),
- * - satisfies a certain predicate (see method `acceptIf`),
- * - is in the domain of a given partial function (see method `acceptMatch`)
- * - or other conditions, by using one of the other methods available, or subclassing `Parser`
- *
- * Even more primitive parsers always produce the same result, irrespective of the input. See
- * methods `success`, `err` and `failure` as examples.
- *
- * @see [[scala.util.parsing.combinator.RegexParsers]] and other known subclasses for practical examples.
- *
- * @author Martin Odersky
- * @author Iulian Dragos
- * @author Adriaan Moors
- */
-trait Parsers {
- /** the type of input elements the provided parsers consume (When consuming
- * invidual characters, a parser is typically called a ''scanner'', which
- * produces ''tokens'' that are consumed by what is normally called a ''parser''.
- * Nonetheless, the same principles apply, regardless of the input type.) */
- type Elem
-
- /** The parser input is an abstract reader of input elements, i.e. the type
- * of input the parsers in this component expect. */
- type Input = Reader[Elem]
-
- /** A base class for parser results. A result is either successful or not
- * (failure may be fatal, i.e., an Error, or not, i.e., a Failure). On
- * success, provides a result of type `T` which consists of some result
- * (and the rest of the input). */
- sealed abstract class ParseResult[+T] {
- /** Functional composition of ParseResults.
- *
- * @param f the function to be lifted over this result
- * @return `f` applied to the result of this `ParseResult`, packaged up as a new `ParseResult`
- */
- def map[U](f: T => U): ParseResult[U]
-
- /** Partial functional composition of ParseResults.
- *
- * @param f the partial function to be lifted over this result
- * @param error a function that takes the same argument as `f` and
- * produces an error message to explain why `f` wasn't applicable
- * (it is called when this is the case)
- * @return if `f` f is defined at the result in this `ParseResult`, `f`
- * applied to the result of this `ParseResult`, packaged up as
- * a new `ParseResult`. If `f` is not defined, `Failure`.
- */
- def mapPartial[U](f: PartialFunction[T, U], error: T => String): ParseResult[U]
-
- def flatMapWithNext[U](f: T => Input => ParseResult[U]): ParseResult[U]
-
- def filterWithError(p: T => Boolean, error: T => String, position: Input): ParseResult[T]
-
- def append[U >: T](a: => ParseResult[U]): ParseResult[U]
-
- def isEmpty = !successful
-
- /** Returns the embedded result. */
- def get: T
-
- def getOrElse[B >: T](default: => B): B =
- if (isEmpty) default else this.get
-
- val next: Input
-
- val successful: Boolean
- }
-
- /** The success case of `ParseResult`: contains the result and the remaining input.
- *
- * @param result The parser's output
- * @param next The parser's remaining input
- */
- case class Success[+T](result: T, override val next: Input) extends ParseResult[T] {
- def map[U](f: T => U) = Success(f(result), next)
- def mapPartial[U](f: PartialFunction[T, U], error: T => String): ParseResult[U]
- = if(f.isDefinedAt(result)) Success(f(result), next)
- else Failure(error(result), next)
-
- def flatMapWithNext[U](f: T => Input => ParseResult[U]): ParseResult[U]
- = f(result)(next)
-
- def filterWithError(p: T => Boolean, error: T => String, position: Input): ParseResult[T] =
- if (p(result)) this
- else Failure(error(result), position)
-
- def append[U >: T](a: => ParseResult[U]): ParseResult[U] = this
-
- def get: T = result
-
- /** The toString method of a Success. */
- override def toString = "["+next.pos+"] parsed: "+result
-
- val successful = true
- }
-
- private lazy val lastNoSuccessVar = new DynamicVariable[Option[NoSuccess]](None)
-
- /** A common super-class for unsuccessful parse results. */
- sealed abstract class NoSuccess(val msg: String, override val next: Input) extends ParseResult[Nothing] { // when we don't care about the difference between Failure and Error
- val successful = false
-
- if (lastNoSuccessVar.value forall (v => !(next.pos < v.next.pos)))
- lastNoSuccessVar.value = Some(this)
-
- def map[U](f: Nothing => U) = this
- def mapPartial[U](f: PartialFunction[Nothing, U], error: Nothing => String): ParseResult[U] = this
-
- def flatMapWithNext[U](f: Nothing => Input => ParseResult[U]): ParseResult[U]
- = this
-
- def filterWithError(p: Nothing => Boolean, error: Nothing => String, position: Input): ParseResult[Nothing] = this
-
- def get: Nothing = scala.sys.error("No result when parsing failed")
- }
- /** An extractor so `NoSuccess(msg, next)` can be used in matches. */
- object NoSuccess {
- def unapply[T](x: ParseResult[T]) = x match {
- case Failure(msg, next) => Some((msg, next))
- case Error(msg, next) => Some((msg, next))
- case _ => None
- }
- }
-
- /** The failure case of `ParseResult`: contains an error-message and the remaining input.
- * Parsing will back-track when a failure occurs.
- *
- * @param msg An error message string describing the failure.
- * @param next The parser's unconsumed input at the point where the failure occurred.
- */
- case class Failure(override val msg: String, override val next: Input) extends NoSuccess(msg, next) {
- /** The toString method of a Failure yields an error message. */
- override def toString = "["+next.pos+"] failure: "+msg+"\n\n"+next.pos.longString
-
- def append[U >: Nothing](a: => ParseResult[U]): ParseResult[U] = { val alt = a; alt match {
- case Success(_, _) => alt
- case ns: NoSuccess => if (alt.next.pos < next.pos) this else alt
- }}
- }
-
- /** The fatal failure case of ParseResult: contains an error-message and
- * the remaining input.
- * No back-tracking is done when a parser returns an `Error`.
- *
- * @param msg An error message string describing the error.
- * @param next The parser's unconsumed input at the point where the error occurred.
- */
- case class Error(override val msg: String, override val next: Input) extends NoSuccess(msg, next) {
- /** The toString method of an Error yields an error message. */
- override def toString = "["+next.pos+"] error: "+msg+"\n\n"+next.pos.longString
- def append[U >: Nothing](a: => ParseResult[U]): ParseResult[U] = this
- }
-
- def Parser[T](f: Input => ParseResult[T]): Parser[T]
- = new Parser[T]{ def apply(in: Input) = f(in) }
-
- def OnceParser[T](f: Input => ParseResult[T]): Parser[T] with OnceParser[T]
- = new Parser[T] with OnceParser[T] { def apply(in: Input) = f(in) }
-
- /** The root class of parsers.
- * Parsers are functions from the Input type to ParseResult.
- */
- abstract class Parser[+T] extends (Input => ParseResult[T]) {
- private var name: String = ""
- def named(n: String): this.type = {name=n; this}
- override def toString() = "Parser ("+ name +")"
-
- /** An unspecified method that defines the behaviour of this parser. */
- def apply(in: Input): ParseResult[T]
-
- def flatMap[U](f: T => Parser[U]): Parser[U]
- = Parser{ in => this(in) flatMapWithNext(f)}
-
- def map[U](f: T => U): Parser[U] //= flatMap{x => success(f(x))}
- = Parser{ in => this(in) map(f)}
-
- def filter(p: T => Boolean): Parser[T]
- = withFilter(p)
-
- def withFilter(p: T => Boolean): Parser[T]
- = Parser{ in => this(in) filterWithError(p, "Input doesn't match filter: "+_, in)}
-
- // no filter yet, dealing with zero is tricky!
-
- @migration("The call-by-name argument is evaluated at most once per constructed Parser object, instead of on every need that arises during parsing.", "2.9.0")
- def append[U >: T](p0: => Parser[U]): Parser[U] = { lazy val p = p0 // lazy argument
- Parser{ in => this(in) append p(in)}
- }
-
- // the operator formerly known as +++, ++, &, but now, behold the venerable ~
- // it's short, light (looks like whitespace), has few overloaded meaning (thanks to the recent change from ~ to unary_~)
- // and we love it! (or do we like `,` better?)
-
- /** A parser combinator for sequential composition.
- *
- * `p ~ q` succeeds if `p` succeeds and `q` succeeds on the input left over by `p`.
- *
- * @param q a parser that will be executed after `p` (this parser)
- * succeeds -- evaluated at most once, and only when necessary.
- * @return a `Parser` that -- on success -- returns a `~` (like a `Pair`,
- * but easier to pattern match on) that contains the result of `p` and
- * that of `q`. The resulting parser fails if either `p` or `q` fails.
- */
- @migration("The call-by-name argument is evaluated at most once per constructed Parser object, instead of on every need that arises during parsing.", "2.9.0")
- def ~ [U](q: => Parser[U]): Parser[~[T, U]] = { lazy val p = q // lazy argument
- (for(a <- this; b <- p) yield new ~(a,b)).named("~")
- }
-
- /** A parser combinator for sequential composition which keeps only the right result.
- *
- * `p ~> q` succeeds if `p` succeeds and `q` succeeds on the input left over by `p`.
- *
- * @param q a parser that will be executed after `p` (this parser)
- * succeeds -- evaluated at most once, and only when necessary.
- * @return a `Parser` that -- on success -- returns the result of `q`.
- */
- @migration("The call-by-name argument is evaluated at most once per constructed Parser object, instead of on every need that arises during parsing.", "2.9.0")
- def ~> [U](q: => Parser[U]): Parser[U] = { lazy val p = q // lazy argument
- (for(a <- this; b <- p) yield b).named("~>")
- }
-
- /** A parser combinator for sequential composition which keeps only the left result.
- *
- * `p <~ q` succeeds if `p` succeeds and `q` succeeds on the input
- * left over by `p`.
- *
- * @note <~ has lower operator precedence than ~ or ~>.
- *
- * @param q a parser that will be executed after `p` (this parser) succeeds -- evaluated at most once, and only when necessary
- * @return a `Parser` that -- on success -- returns the result of `p`.
- */
- @migration("The call-by-name argument is evaluated at most once per constructed Parser object, instead of on every need that arises during parsing.", "2.9.0")
- def <~ [U](q: => Parser[U]): Parser[T] = { lazy val p = q // lazy argument
- (for(a <- this; b <- p) yield a).named("<~")
- }
-
- /* not really useful: V cannot be inferred because Parser is covariant in first type parameter (V is always trivially Nothing)
- def ~~ [U, V](q: => Parser[U])(implicit combine: (T, U) => V): Parser[V] = new Parser[V] {
- def apply(in: Input) = seq(Parser.this, q)((x, y) => combine(x,y))(in)
- } */
-
- /** A parser combinator for non-back-tracking sequential composition.
- *
- * `p ~! q` succeeds if `p` succeeds and `q` succeeds on the input left over by `p`.
- * In case of failure, no back-tracking is performed (in an earlier parser produced by the `|` combinator).
- *
- * @param p a parser that will be executed after `p` (this parser) succeeds
- * @return a `Parser` that -- on success -- returns a `~` (like a Pair, but easier to pattern match on)
- * that contains the result of `p` and that of `q`.
- * The resulting parser fails if either `p` or `q` fails, this failure is fatal.
- */
- def ~! [U](p: => Parser[U]): Parser[~[T, U]]
- = OnceParser{ (for(a <- this; b <- commit(p)) yield new ~(a,b)).named("~!") }
-
- /** A parser combinator for alternative composition.
- *
- * `p | q` succeeds if `p` succeeds or `q` succeeds.
- * Note that `q` is only tried if `p`s failure is non-fatal (i.e., back-tracking is allowed).
- *
- * @param q a parser that will be executed if `p` (this parser) fails (and allows back-tracking)
- * @return a `Parser` that returns the result of the first parser to succeed (out of `p` and `q`)
- * The resulting parser succeeds if (and only if)
- * - `p` succeeds, ''or''
- * - if `p` fails allowing back-tracking and `q` succeeds.
- */
- def | [U >: T](q: => Parser[U]): Parser[U] = append(q).named("|")
-
- // TODO
- /** A parser combinator for alternative with longest match composition.
- *
- * `p ||| q` succeeds if `p` succeeds or `q` succeeds.
- * If `p` and `q` both succeed, the parser that consumed the most characters accepts.
- *
- * @param q0 a parser that accepts if p consumes less characters. -- evaluated at most once, and only when necessary
- * @return a `Parser` that returns the result of the parser consuming the most characters (out of `p` and `q`).
- */
- @migration("The call-by-name argument is evaluated at most once per constructed Parser object, instead of on every need that arises during parsing.", "2.9.0")
- def ||| [U >: T](q0: => Parser[U]): Parser[U] = new Parser[U] {
- lazy val q = q0 // lazy argument
- def apply(in: Input) = {
- val res1 = Parser.this(in)
- val res2 = q(in)
-
- (res1, res2) match {
- case (s1 @ Success(_, next1), s2 @ Success(_, next2)) => if (next2.pos < next1.pos) s1 else s2
- case (s1 @ Success(_, _), _) => s1
- case (_, s2 @ Success(_, _)) => s2
- case (e1 @ Error(_, _), _) => e1
- case (f1 @ Failure(_, next1), ns2 @ NoSuccess(_, next2)) => if (next2.pos < next1.pos) f1 else ns2
- }
- }
- override def toString = "|||"
- }
-
- /** A parser combinator for function application.
- *
- * `p ^^ f` succeeds if `p` succeeds; it returns `f` applied to the result of `p`.
- *
- * @param f a function that will be applied to this parser's result (see `map` in `ParseResult`).
- * @return a parser that has the same behaviour as the current parser, but whose result is
- * transformed by `f`.
- */
- def ^^ [U](f: T => U): Parser[U] = map(f).named(toString+"^^")
-
- /** A parser combinator that changes a successful result into the specified value.
- *
- * `p ^^^ v` succeeds if `p` succeeds; discards its result, and returns `v` instead.
- *
- * @param v The new result for the parser, evaluated at most once (if `p` succeeds), not evaluated at all if `p` fails.
- * @return a parser that has the same behaviour as the current parser, but whose successful result is `v`
- */
- @migration("The call-by-name argument is evaluated at most once per constructed Parser object, instead of on every need that arises during parsing.", "2.9.0")
- def ^^^ [U](v: => U): Parser[U] = new Parser[U] {
- lazy val v0 = v // lazy argument
- def apply(in: Input) = Parser.this(in) map (x => v0)
- }.named(toString+"^^^")
-
- /** A parser combinator for partial function application.
- *
- * `p ^? (f, error)` succeeds if `p` succeeds AND `f` is defined at the result of `p`;
- * in that case, it returns `f` applied to the result of `p`. If `f` is not applicable,
- * error(the result of `p`) should explain why.
- *
- * @param f a partial function that will be applied to this parser's result
- * (see `mapPartial` in `ParseResult`).
- * @param error a function that takes the same argument as `f` and produces an error message
- * to explain why `f` wasn't applicable
- * @return a parser that succeeds if the current parser succeeds <i>and</i> `f` is applicable
- * to the result. If so, the result will be transformed by `f`.
- */
- def ^? [U](f: PartialFunction[T, U], error: T => String): Parser[U] = Parser{ in =>
- this(in).mapPartial(f, error)}.named(toString+"^?")
-
- /** A parser combinator for partial function application.
- *
- * `p ^? f` succeeds if `p` succeeds AND `f` is defined at the result of `p`;
- * in that case, it returns `f` applied to the result of `p`.
- *
- * @param f a partial function that will be applied to this parser's result
- * (see `mapPartial` in `ParseResult`).
- * @return a parser that succeeds if the current parser succeeds <i>and</i> `f` is applicable
- * to the result. If so, the result will be transformed by `f`.
- */
- def ^? [U](f: PartialFunction[T, U]): Parser[U] = ^?(f, r => "Constructor function not defined at "+r)
-
- /** A parser combinator that parameterizes a subsequent parser with the
- * result of this one.
- *
- * Use this combinator when a parser depends on the result of a previous
- * parser. `p` should be a function that takes the result from the first
- * parser and returns the second parser.
- *
- * `p into fq` (with `fq` typically `{x => q}`) first applies `p`, and
- * then, if `p` successfully returned result `r`, applies `fq(r)` to the
- * rest of the input.
- *
- * ''From: G. Hutton. Higher-order functions for parsing. J. Funct. Program., 2(3):323--343, 1992.''
- *
- * @example {{{
- * def perlRE = "m" ~> (".".r into (separator => """[^%s]*""".format(separator).r <~ separator))
- * }}}
- *
- * @param fq a function that, given the result from this parser, returns
- * the second parser to be applied
- * @return a parser that succeeds if this parser succeeds (with result `x`)
- * and if then `fq(x)` succeeds
- */
- def into[U](fq: T => Parser[U]): Parser[U] = flatMap(fq)
-
- // shortcuts for combinators:
-
- /** Returns `into(fq)`. */
- def >>[U](fq: T => Parser[U])=into(fq)
-
- /** Returns a parser that repeatedly parses what this parser parses.
- *
- * @return rep(this)
- */
- def * = rep(this)
-
- /** Returns a parser that repeatedly parses what this parser parses,
- * interleaved with the `sep` parser. The `sep` parser specifies how
- * the results parsed by this parser should be combined.
- *
- * @return chainl1(this, sep)
- */
- def *[U >: T](sep: => Parser[(U, U) => U]) = chainl1(this, sep)
-
- // TODO: improve precedence? a ~ b*(",") = a ~ (b*(",")) should be true
-
- /** Returns a parser that repeatedly (at least once) parses what this parser parses.
- *
- * @return rep1(this)
- */
- def + = rep1(this)
-
- /** Returns a parser that optionally parses what this parser parses.
- *
- * @return opt(this)
- */
- def ? = opt(this)
-
- /** Changes the failure message produced by a parser.
- *
- * This doesn't change the behavior of a parser on neither
- * success nor error, just on failure. The semantics are
- * slightly different than those obtained by doing `| failure(msg)`,
- * in that the message produced by this method will always
- * replace the message produced, which is not guaranteed
- * by that idiom.
- *
- * For example, parser `p` below will always produce the
- * designated failure message, while `q` will not produce
- * it if `sign` is parsed but `number` is not.
- *
- * {{{
- * def p = sign.? ~ number withFailureMessage "Number expected!"
- * def q = sign.? ~ number | failure("Number expected!")
- * }}}
- *
- * @param msg The message that will replace the default failure message.
- * @return A parser with the same properties and different failure message.
- */
- def withFailureMessage(msg: String) = Parser{ in =>
- this(in) match {
- case Failure(_, next) => Failure(msg, next)
- case other => other
- }
- }
-
- /** Changes the error message produced by a parser.
- *
- * This doesn't change the behavior of a parser on neither
- * success nor failure, just on error. The semantics are
- * slightly different than those obtained by doing `| error(msg)`,
- * in that the message produced by this method will always
- * replace the message produced, which is not guaranteed
- * by that idiom.
- *
- * For example, parser `p` below will always produce the
- * designated error message, while `q` will not produce
- * it if `sign` is parsed but `number` is not.
- *
- * {{{
- * def p = sign.? ~ number withErrorMessage "Number expected!"
- * def q = sign.? ~ number | error("Number expected!")
- * }}}
- *
- * @param msg The message that will replace the default error message.
- * @return A parser with the same properties and different error message.
- */
- def withErrorMessage(msg: String) = Parser{ in =>
- this(in) match {
- case Error(_, next) => Error(msg, next)
- case other => other
- }
- }
- }
-
- /** Wrap a parser so that its failures become errors (the `|` combinator
- * will give up as soon as it encounters an error, on failure it simply
- * tries the next alternative).
- */
- def commit[T](p: => Parser[T]) = Parser{ in =>
- p(in) match{
- case s @ Success(_, _) => s
- case e @ Error(_, _) => e
- case f @ Failure(msg, next) => Error(msg, next)
- }
- }
-
- /** A parser matching input elements that satisfy a given predicate.
- *
- * `elem(kind, p)` succeeds if the input starts with an element `e` for which `p(e)` is true.
- *
- * @param kind The element kind, used for error messages
- * @param p A predicate that determines which elements match.
- * @return
- */
- def elem(kind: String, p: Elem => Boolean) = acceptIf(p)(inEl => kind+" expected")
-
- /** A parser that matches only the given element `e`.
- *
- * `elem(e)` succeeds if the input starts with an element `e`.
- *
- * @param e the `Elem` that must be the next piece of input for the returned parser to succeed
- * @return a `Parser` that succeeds if `e` is the next available input (and returns it).
- */
- def elem(e: Elem): Parser[Elem] = accept(e)
-
- /** A parser that matches only the given element `e`.
- *
- * The method is implicit so that elements can automatically be lifted to their parsers.
- * For example, when parsing `Token`s, `Identifier("new")` (which is a `Token`) can be used directly,
- * instead of first creating a `Parser` using `accept(Identifier("new"))`.
- *
- * @param e the `Elem` that must be the next piece of input for the returned parser to succeed
- * @return a `tParser` that succeeds if `e` is the next available input.
- */
-
- implicit def accept(e: Elem): Parser[Elem] = acceptIf(_ == e)("`"+e+"' expected but " + _ + " found")
-
- /** A parser that matches only the given list of element `es`.
- *
- * `accept(es)` succeeds if the input subsequently provides the elements in the list `es`.
- *
- * @param es the list of expected elements
- * @return a Parser that recognizes a specified list of elements
- */
- def accept[ES <% List[Elem]](es: ES): Parser[List[Elem]] = acceptSeq(es)
-
- /** The parser that matches an element in the domain of the partial function `f`.
- *
- * If `f` is defined on the first element in the input, `f` is applied
- * to it to produce this parser's result.
- *
- * Example: The parser `accept("name", {case Identifier(n) => Name(n)})`
- * accepts an `Identifier(n)` and returns a `Name(n)`
- *
- * @param expected a description of the kind of element this parser expects (for error messages)
- * @param f a partial function that determines when this parser is successful and what its output is
- * @return A parser that succeeds if `f` is applicable to the first element of the input,
- * applying `f` to it to produce the result.
- */
- def accept[U](expected: String, f: PartialFunction[Elem, U]): Parser[U] = acceptMatch(expected, f)
-
- /** A parser matching input elements that satisfy a given predicate.
- *
- * `acceptIf(p)(el => "Unexpected "+el)` succeeds if the input starts with an element `e` for which `p(e)` is true.
- *
- * @param err A function from the received element into an error message.
- * @param p A predicate that determines which elements match.
- * @return A parser for elements satisfying p(e).
- */
- def acceptIf(p: Elem => Boolean)(err: Elem => String): Parser[Elem] = Parser { in =>
- if (in.atEnd) Failure("end of input", in)
- else if (p(in.first)) Success(in.first, in.rest)
- else Failure(err(in.first), in)
- }
-
- /** The parser that matches an element in the domain of the partial function `f`.
- *
- * If `f` is defined on the first element in the input, `f` is applied
- * to it to produce this parser's result.
- *
- * Example: The parser `acceptMatch("name", {case Identifier(n) => Name(n)})`
- * accepts an `Identifier(n)` and returns a `Name(n)`
- *
- * @param expected a description of the kind of element this parser expects (for error messages)
- * @param f a partial function that determines when this parser is successful and what its output is
- * @return A parser that succeeds if `f` is applicable to the first element of the input,
- * applying `f` to it to produce the result.
- */
- def acceptMatch[U](expected: String, f: PartialFunction[Elem, U]): Parser[U] = Parser{ in =>
- if (in.atEnd) Failure("end of input", in)
- else if (f.isDefinedAt(in.first)) Success(f(in.first), in.rest)
- else Failure(expected+" expected", in)
- }
-
- /** A parser that matches only the given [[scala.collection.Iterable]] collection of elements `es`.
- *
- * `acceptSeq(es)` succeeds if the input subsequently provides the elements in the iterable `es`.
- *
- * @param es the list of expected elements
- * @return a Parser that recognizes a specified list of elements
- */
- def acceptSeq[ES <% Iterable[Elem]](es: ES): Parser[List[Elem]] =
- es.foldRight[Parser[List[Elem]]](success(Nil)){(x, pxs) => accept(x) ~ pxs ^^ mkList}
-
- /** A parser that always fails.
- *
- * @param msg The error message describing the failure.
- * @return A parser that always fails with the specified error message.
- */
- def failure(msg: String) = Parser{ in => Failure(msg, in) }
-
- /** A parser that results in an error.
- *
- * @param msg The error message describing the failure.
- * @return A parser that always fails with the specified error message.
- */
- def err(msg: String) = Parser{ in => Error(msg, in) }
-
- /** A parser that always succeeds.
- *
- * @param v The result for the parser
- * @return A parser that always succeeds, with the given result `v`
- */
- def success[T](v: T) = Parser{ in => Success(v, in) }
-
- /** A helper method that turns a `Parser` into one that will
- * print debugging information to stdout before and after
- * being applied.
- */
- def log[T](p: => Parser[T])(name: String): Parser[T] = Parser{ in =>
- println("trying "+ name +" at "+ in)
- val r = p(in)
- println(name +" --> "+ r)
- r
- }
-
- /** A parser generator for repetitions.
- *
- * `rep(p)` repeatedly uses `p` to parse the input until `p` fails
- * (the result is a List of the consecutive results of `p`).
- *
- * @param p a `Parser` that is to be applied successively to the input
- * @return A parser that returns a list of results produced by repeatedly applying `p` to the input.
- */
- def rep[T](p: => Parser[T]): Parser[List[T]] = rep1(p) | success(List())
-
- /** A parser generator for interleaved repetitions.
- *
- * `repsep(p, q)` repeatedly uses `p` interleaved with `q` to parse the input, until `p` fails.
- * (The result is a `List` of the results of `p`.)
- *
- * Example: `repsep(term, ",")` parses a comma-separated list of term's, yielding a list of these terms.
- *
- * @param p a `Parser` that is to be applied successively to the input
- * @param q a `Parser` that parses the elements that separate the elements parsed by `p`
- * @return A parser that returns a list of results produced by repeatedly applying `p` (interleaved with `q`) to the input.
- * The results of `p` are collected in a list. The results of `q` are discarded.
- */
- def repsep[T](p: => Parser[T], q: => Parser[Any]): Parser[List[T]] =
- rep1sep(p, q) | success(List())
-
- /** A parser generator for non-empty repetitions.
- *
- * `rep1(p)` repeatedly uses `p` to parse the input until `p` fails -- `p` must succeed at least
- * once (the result is a `List` of the consecutive results of `p`)
- *
- * @param p a `Parser` that is to be applied successively to the input
- * @return A parser that returns a list of results produced by repeatedly applying `p` to the input
- * (and that only succeeds if `p` matches at least once).
- */
- def rep1[T](p: => Parser[T]): Parser[List[T]] = rep1(p, p)
-
- /** A parser generator for non-empty repetitions.
- *
- * `rep1(f, p)` first uses `f` (which must succeed) and then repeatedly
- * uses `p` to parse the input until `p` fails
- * (the result is a `List` of the consecutive results of `f` and `p`)
- *
- * @param first a `Parser` that parses the first piece of input
- * @param p0 a `Parser` that is to be applied successively to the rest of the input (if any) -- evaluated at most once, and only when necessary
- * @return A parser that returns a list of results produced by first applying `f` and then
- * repeatedly `p` to the input (it only succeeds if `f` matches).
- */
- @migration("The `p0` call-by-name arguments is evaluated at most once per constructed Parser object, instead of on every need that arises during parsing.", "2.9.0")
- def rep1[T](first: => Parser[T], p0: => Parser[T]): Parser[List[T]] = Parser { in =>
- lazy val p = p0 // lazy argument
- val elems = new ListBuffer[T]
-
- def continue(in: Input): ParseResult[List[T]] = {
- val p0 = p // avoid repeatedly re-evaluating by-name parser
- @tailrec def applyp(in0: Input): ParseResult[List[T]] = p0(in0) match {
- case Success(x, rest) => elems += x ; applyp(rest)
- case e @ Error(_, _) => e // still have to propagate error
- case _ => Success(elems.toList, in0)
- }
-
- applyp(in)
- }
-
- first(in) match {
- case Success(x, rest) => elems += x ; continue(rest)
- case ns: NoSuccess => ns
- }
- }
-
- /** A parser generator for a specified number of repetitions.
- *
- * `repN(n, p)` uses `p` exactly `n` time to parse the input
- * (the result is a `List` of the `n` consecutive results of `p`).
- *
- * @param p a `Parser` that is to be applied successively to the input
- * @param num the exact number of times `p` must succeed
- * @return A parser that returns a list of results produced by repeatedly applying `p` to the input
- * (and that only succeeds if `p` matches exactly `n` times).
- */
- def repN[T](num: Int, p: => Parser[T]): Parser[List[T]] =
- if (num == 0) success(Nil) else Parser { in =>
- val elems = new ListBuffer[T]
- val p0 = p // avoid repeatedly re-evaluating by-name parser
-
- @tailrec def applyp(in0: Input): ParseResult[List[T]] =
- if (elems.length == num) Success(elems.toList, in0)
- else p0(in0) match {
- case Success(x, rest) => elems += x ; applyp(rest)
- case ns: NoSuccess => ns
- }
-
- applyp(in)
- }
-
- /** A parser generator for non-empty repetitions.
- *
- * `rep1sep(p, q)` repeatedly applies `p` interleaved with `q` to parse the
- * input, until `p` fails. The parser `p` must succeed at least once.
- *
- * @param p a `Parser` that is to be applied successively to the input
- * @param q a `Parser` that parses the elements that separate the elements parsed by `p`
- * (interleaved with `q`)
- * @return A parser that returns a list of results produced by repeatedly applying `p` to the input
- * (and that only succeeds if `p` matches at least once).
- * The results of `p` are collected in a list. The results of `q` are discarded.
- */
- def rep1sep[T](p : => Parser[T], q : => Parser[Any]): Parser[List[T]] =
- p ~ rep(q ~> p) ^^ {case x~y => x::y}
-
- /** A parser generator that, roughly, generalises the rep1sep generator so
- * that `q`, which parses the separator, produces a left-associative
- * function that combines the elements it separates.
- *
- * ''From: J. Fokker. Functional parsers. In J. Jeuring and E. Meijer, editors, Advanced Functional Programming,
- * volume 925 of Lecture Notes in Computer Science, pages 1--23. Springer, 1995.''
- *
- * @param p a parser that parses the elements
- * @param q a parser that parses the token(s) separating the elements, yielding a left-associative function that
- * combines two elements into one
- */
- def chainl1[T](p: => Parser[T], q: => Parser[(T, T) => T]): Parser[T]
- = chainl1(p, p, q)
-
- /** A parser generator that, roughly, generalises the `rep1sep` generator
- * so that `q`, which parses the separator, produces a left-associative
- * function that combines the elements it separates.
- *
- * @param first a parser that parses the first element
- * @param p a parser that parses the subsequent elements
- * @param q a parser that parses the token(s) separating the elements,
- * yielding a left-associative function that combines two elements
- * into one
- */
- def chainl1[T, U](first: => Parser[T], p: => Parser[U], q: => Parser[(T, U) => T]): Parser[T]
- = first ~ rep(q ~ p) ^^ {
- case x ~ xs => xs.foldLeft(x: T){case (a, f ~ b) => f(a, b)} // x's type annotation is needed to deal with changed type inference due to SI-5189
- }
-
- /** A parser generator that generalises the `rep1sep` generator so that `q`,
- * which parses the separator, produces a right-associative function that
- * combines the elements it separates. Additionally, the right-most (last)
- * element and the left-most combining function have to be supplied.
- *
- * rep1sep(p: Parser[T], q) corresponds to chainr1(p, q ^^ cons, cons, Nil) (where val cons = (x: T, y: List[T]) => x :: y)
- *
- * @param p a parser that parses the elements
- * @param q a parser that parses the token(s) separating the elements, yielding a right-associative function that
- * combines two elements into one
- * @param combine the "last" (left-most) combination function to be applied
- * @param first the "first" (right-most) element to be combined
- */
- def chainr1[T, U](p: => Parser[T], q: => Parser[(T, U) => U], combine: (T, U) => U, first: U): Parser[U]
- = p ~ rep(q ~ p) ^^ {
- case x ~ xs => (new ~(combine, x) :: xs).foldRight(first){case (f ~ a, b) => f(a, b)}
- }
-
- /** A parser generator for optional sub-phrases.
- *
- * `opt(p)` is a parser that returns `Some(x)` if `p` returns `x` and `None` if `p` fails.
- *
- * @param p A `Parser` that is tried on the input
- * @return a `Parser` that always succeeds: either with the result provided by `p` or
- * with the empty result
- */
- def opt[T](p: => Parser[T]): Parser[Option[T]] =
- p ^^ (x => Some(x)) | success(None)
-
- /** Wrap a parser so that its failures and errors become success and
- * vice versa -- it never consumes any input.
- */
- def not[T](p: => Parser[T]): Parser[Unit] = Parser { in =>
- p(in) match {
- case Success(_, _) => Failure("Expected failure", in)
- case _ => Success((), in)
- }
- }
-
- /** A parser generator for guard expressions. The resulting parser will
- * fail or succeed just like the one given as parameter but it will not
- * consume any input.
- *
- * @param p a `Parser` that is to be applied to the input
- * @return A parser that returns success if and only if `p` succeeds but
- * never consumes any input
- */
- def guard[T](p: => Parser[T]): Parser[T] = Parser { in =>
- p(in) match{
- case s@ Success(s1,_) => Success(s1, in)
- case e => e
- }
- }
-
- /** `positioned` decorates a parser's result with the start position of the
- * input it consumed.
- *
- * @param p a `Parser` whose result conforms to `Positional`.
- * @return A parser that has the same behaviour as `p`, but which marks its
- * result with the start position of the input it consumed,
- * if it didn't already have a position.
- */
- def positioned[T <: Positional](p: => Parser[T]): Parser[T] = Parser { in =>
- p(in) match {
- case Success(t, in1) => Success(if (t.pos == NoPosition) t setPos in.pos else t, in1)
- case ns: NoSuccess => ns
- }
- }
-
- /** A parser generator delimiting whole phrases (i.e. programs).
- *
- * `phrase(p)` succeeds if `p` succeeds and no input is left over after `p`.
- *
- * @param p the parser that must consume all input for the resulting parser
- * to succeed.
- * @return a parser that has the same result as `p`, but that only succeeds
- * if `p` consumed all the input.
- */
- def phrase[T](p: Parser[T]) = new Parser[T] {
- def apply(in: Input) = lastNoSuccessVar.withValue(None) {
- p(in) match {
- case s @ Success(out, in1) =>
- if (in1.atEnd)
- s
- else
- lastNoSuccessVar.value filterNot { _.next.pos < in1.pos } getOrElse Failure("end of input expected", in1)
- case ns => lastNoSuccessVar.value.getOrElse(ns)
- }
- }
- }
-
- /** Given a concatenation with a repetition (list), move the concatenated element into the list */
- def mkList[T] = (_: ~[T, List[T]]) match { case x ~ xs => x :: xs }
-
- /** A wrapper over sequence of matches.
- *
- * Given `p1: Parser[A]` and `p2: Parser[B]`, a parser composed with
- * `p1 ~ p2` will have type `Parser[~[A, B]]`. The successful result
- * of the parser can be extracted from this case class.
- *
- * It also enables pattern matching, so something like this is possible:
- *
- * {{{
- * def concat(p1: Parser[String], p2: Parser[String]): Parser[String] =
- * p1 ~ p2 ^^ { case a ~ b => a + b }
- * }}}
- */
- case class ~[+a, +b](_1: a, _2: b) {
- override def toString = "("+ _1 +"~"+ _2 +")"
- }
-
- /** A parser whose `~` combinator disallows back-tracking.
- */
- trait OnceParser[+T] extends Parser[T] {
- override def ~ [U](p: => Parser[U]): Parser[~[T, U]]
- = OnceParser{ (for(a <- this; b <- commit(p)) yield new ~(a,b)).named("~") }
- }
-}
diff --git a/src/parser-combinators/scala/util/parsing/combinator/RegexParsers.scala b/src/parser-combinators/scala/util/parsing/combinator/RegexParsers.scala
deleted file mode 100644
index 8ebbc573ad..0000000000
--- a/src/parser-combinators/scala/util/parsing/combinator/RegexParsers.scala
+++ /dev/null
@@ -1,166 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-package scala
-package util.parsing.combinator
-
-import java.util.regex.Pattern
-import scala.util.matching.Regex
-import scala.util.parsing.input._
-import scala.collection.immutable.PagedSeq
-import scala.language.implicitConversions
-
-/** The ''most important'' differences between `RegexParsers` and
- * [[scala.util.parsing.combinator.Parsers]] are:
- *
- * - `Elem` is defined to be [[scala.Char]]
- * - There's an implicit conversion from [[java.lang.String]] to `Parser[String]`,
- * so that string literals can be used as parser combinators.
- * - There's an implicit conversion from [[scala.util.matching.Regex]] to `Parser[String]`,
- * so that regex expressions can be used as parser combinators.
- * - The parsing methods call the method `skipWhitespace` (defaults to `true`) and, if true,
- * skip any whitespace before each parser is called.
- * - Protected val `whiteSpace` returns a regex that identifies whitespace.
- *
- * For example, this creates a very simple calculator receiving `String` input:
- *
- * {{{
- * object Calculator extends RegexParsers {
- * def number: Parser[Double] = """\d+(\.\d*)?""".r ^^ { _.toDouble }
- * def factor: Parser[Double] = number | "(" ~> expr <~ ")"
- * def term : Parser[Double] = factor ~ rep( "*" ~ factor | "/" ~ factor) ^^ {
- * case number ~ list => (number /: list) {
- * case (x, "*" ~ y) => x * y
- * case (x, "/" ~ y) => x / y
- * }
- * }
- * def expr : Parser[Double] = term ~ rep("+" ~ log(term)("Plus term") | "-" ~ log(term)("Minus term")) ^^ {
- * case number ~ list => list.foldLeft(number) { // same as before, using alternate name for /:
- * case (x, "+" ~ y) => x + y
- * case (x, "-" ~ y) => x - y
- * }
- * }
- *
- * def apply(input: String): Double = parseAll(expr, input) match {
- * case Success(result, _) => result
- * case failure : NoSuccess => scala.sys.error(failure.msg)
- * }
- * }
- * }}}
- */
-trait RegexParsers extends Parsers {
-
- type Elem = Char
-
- protected val whiteSpace = """\s+""".r
-
- def skipWhitespace = whiteSpace.toString.length > 0
-
- /** Method called to handle whitespace before parsers.
- *
- * It checks `skipWhitespace` and, if true, skips anything
- * matching `whiteSpace` starting from the current offset.
- *
- * @param source The input being parsed.
- * @param offset The offset into `source` from which to match.
- * @return The offset to be used for the next parser.
- */
- protected def handleWhiteSpace(source: java.lang.CharSequence, offset: Int): Int =
- if (skipWhitespace)
- (whiteSpace findPrefixMatchOf (source.subSequence(offset, source.length))) match {
- case Some(matched) => offset + matched.end
- case None => offset
- }
- else
- offset
-
- /** A parser that matches a literal string */
- implicit def literal(s: String): Parser[String] = new Parser[String] {
- def apply(in: Input) = {
- val source = in.source
- val offset = in.offset
- val start = handleWhiteSpace(source, offset)
- var i = 0
- var j = start
- while (i < s.length && j < source.length && s.charAt(i) == source.charAt(j)) {
- i += 1
- j += 1
- }
- if (i == s.length)
- Success(source.subSequence(start, j).toString, in.drop(j - offset))
- else {
- val found = if (start == source.length()) "end of source" else "`"+source.charAt(start)+"'"
- Failure("`"+s+"' expected but "+found+" found", in.drop(start - offset))
- }
- }
- }
-
- /** A parser that matches a regex string */
- implicit def regex(r: Regex): Parser[String] = new Parser[String] {
- def apply(in: Input) = {
- val source = in.source
- val offset = in.offset
- val start = handleWhiteSpace(source, offset)
- (r findPrefixMatchOf (source.subSequence(start, source.length))) match {
- case Some(matched) =>
- Success(source.subSequence(start, start + matched.end).toString,
- in.drop(start + matched.end - offset))
- case None =>
- val found = if (start == source.length()) "end of source" else "`"+source.charAt(start)+"'"
- Failure("string matching regex `"+r+"' expected but "+found+" found", in.drop(start - offset))
- }
- }
- }
-
- /** `positioned` decorates a parser's result with the start position of the input it consumed.
- * If whitespace is being skipped, then it is skipped before the start position is recorded.
- *
- * @param p a `Parser` whose result conforms to `Positional`.
- * @return A parser that has the same behaviour as `p`, but which marks its result with the
- * start position of the input it consumed after whitespace has been skipped, if it
- * didn't already have a position.
- */
- override def positioned[T <: Positional](p: => Parser[T]): Parser[T] = {
- val pp = super.positioned(p)
- new Parser[T] {
- def apply(in: Input) = {
- val offset = in.offset
- val start = handleWhiteSpace(in.source, offset)
- pp(in.drop (start - offset))
- }
- }
- }
-
- override def phrase[T](p: Parser[T]): Parser[T] =
- super.phrase(p <~ opt("""\z""".r))
-
- /** Parse some prefix of reader `in` with parser `p`. */
- def parse[T](p: Parser[T], in: Reader[Char]): ParseResult[T] =
- p(in)
-
- /** Parse some prefix of character sequence `in` with parser `p`. */
- def parse[T](p: Parser[T], in: java.lang.CharSequence): ParseResult[T] =
- p(new CharSequenceReader(in))
-
- /** Parse some prefix of reader `in` with parser `p`. */
- def parse[T](p: Parser[T], in: java.io.Reader): ParseResult[T] =
- p(new PagedSeqReader(PagedSeq.fromReader(in)))
-
- /** Parse all of reader `in` with parser `p`. */
- def parseAll[T](p: Parser[T], in: Reader[Char]): ParseResult[T] =
- parse(phrase(p), in)
-
- /** Parse all of reader `in` with parser `p`. */
- def parseAll[T](p: Parser[T], in: java.io.Reader): ParseResult[T] =
- parse(phrase(p), in)
-
- /** Parse all of character sequence `in` with parser `p`. */
- def parseAll[T](p: Parser[T], in: java.lang.CharSequence): ParseResult[T] =
- parse(phrase(p), in)
-}
diff --git a/src/parser-combinators/scala/util/parsing/combinator/lexical/Lexical.scala b/src/parser-combinators/scala/util/parsing/combinator/lexical/Lexical.scala
deleted file mode 100644
index d8029d068f..0000000000
--- a/src/parser-combinators/scala/util/parsing/combinator/lexical/Lexical.scala
+++ /dev/null
@@ -1,40 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala
-package util.parsing
-package combinator
-package lexical
-
-import token._
-import input.CharArrayReader.EofCh
-
-/** This component complements the `Scanners` component with
- * common operations for lexical parsers.
- *
- * Refer to [[scala.util.parsing.combinator.lexical.StdLexical]]
- * for a concrete implementation for a simple, Scala-like language.
- *
- * @author Martin Odersky, Adriaan Moors
- */
-abstract class Lexical extends Scanners with Tokens {
-
- /** A character-parser that matches a letter (and returns it).*/
- def letter = elem("letter", _.isLetter)
-
- /** A character-parser that matches a digit (and returns it).*/
- def digit = elem("digit", _.isDigit)
-
- /** A character-parser that matches any character except the ones given in `cs` (and returns it).*/
- def chrExcept(cs: Char*) = elem("", ch => (cs forall (ch != _)))
-
- /** A character-parser that matches a white-space character (and returns it).*/
- def whitespaceChar = elem("space char", ch => ch <= ' ' && ch != EofCh)
-}
diff --git a/src/parser-combinators/scala/util/parsing/combinator/lexical/Scanners.scala b/src/parser-combinators/scala/util/parsing/combinator/lexical/Scanners.scala
deleted file mode 100644
index 2e12915bb8..0000000000
--- a/src/parser-combinators/scala/util/parsing/combinator/lexical/Scanners.scala
+++ /dev/null
@@ -1,63 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala
-package util.parsing
-package combinator
-package lexical
-
-import input._
-
-/** This component provides core functionality for lexical parsers.
- *
- * See its subclasses [[scala.util.parsing.combinator.lexical.Lexical]] and -- most interestingly
- * [[scala.util.parsing.combinator.lexical.StdLexical]], for more functionality.
- *
- * @author Martin Odersky, Adriaan Moors
- */
-trait Scanners extends Parsers {
- type Elem = Char
- type Token
-
- /** This token is produced by a scanner `Scanner` when scanning failed. */
- def errorToken(msg: String): Token
-
- /** A parser that produces a token (from a stream of characters). */
- def token: Parser[Token]
-
- /** A parser for white-space -- its result will be discarded. */
- def whitespace: Parser[Any]
-
- /** `Scanner` is essentially¹ a parser that produces `Token`s
- * from a stream of characters. The tokens it produces are typically
- * passed to parsers in `TokenParsers`.
- *
- * @note ¹ `Scanner` is really a `Reader` of `Token`s
- */
- class Scanner(in: Reader[Char]) extends Reader[Token] {
- /** Convenience constructor (makes a character reader out of the given string) */
- def this(in: String) = this(new CharArrayReader(in.toCharArray()))
- private val (tok, rest1, rest2) = whitespace(in) match {
- case Success(_, in1) =>
- token(in1) match {
- case Success(tok, in2) => (tok, in1, in2)
- case ns: NoSuccess => (errorToken(ns.msg), ns.next, skip(ns.next))
- }
- case ns: NoSuccess => (errorToken(ns.msg), ns.next, skip(ns.next))
- }
- private def skip(in: Reader[Char]) = if (in.atEnd) in else in.rest
-
- override def source: java.lang.CharSequence = in.source
- override def offset: Int = in.offset
- def first = tok
- def rest = new Scanner(rest2)
- def pos = rest1.pos
- def atEnd = in.atEnd || (whitespace(in) match { case Success(_, in1) => in1.atEnd case _ => false })
- }
-}
-
diff --git a/src/parser-combinators/scala/util/parsing/combinator/lexical/StdLexical.scala b/src/parser-combinators/scala/util/parsing/combinator/lexical/StdLexical.scala
deleted file mode 100644
index 32d7502cda..0000000000
--- a/src/parser-combinators/scala/util/parsing/combinator/lexical/StdLexical.scala
+++ /dev/null
@@ -1,87 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala
-package util.parsing
-package combinator
-package lexical
-
-import token._
-import input.CharArrayReader.EofCh
-import scala.collection.mutable
-
-/** This component provides a standard lexical parser for a simple,
- * [[http://scala-lang.org Scala]]-like language. It parses keywords and
- * identifiers, numeric literals (integers), strings, and delimiters.
- *
- * To distinguish between identifiers and keywords, it uses a set of
- * reserved identifiers: every string contained in `reserved` is returned
- * as a keyword token. (Note that `=>` is hard-coded as a keyword.)
- * Additionally, the kinds of delimiters can be specified by the
- * `delimiters` set.
- *
- * Usually this component is used to break character-based input into
- * bigger tokens, which are then passed to a token-parser (see
- * [[scala.util.parsing.combinator.syntactical.TokenParsers]].)
- *
- * @author Martin Odersky
- * @author Iulian Dragos
- * @author Adriaan Moors
- */
-class StdLexical extends Lexical with StdTokens {
- // see `token` in `Scanners`
- def token: Parser[Token] =
- ( identChar ~ rep( identChar | digit ) ^^ { case first ~ rest => processIdent(first :: rest mkString "") }
- | digit ~ rep( digit ) ^^ { case first ~ rest => NumericLit(first :: rest mkString "") }
- | '\'' ~ rep( chrExcept('\'', '\n', EofCh) ) ~ '\'' ^^ { case '\'' ~ chars ~ '\'' => StringLit(chars mkString "") }
- | '\"' ~ rep( chrExcept('\"', '\n', EofCh) ) ~ '\"' ^^ { case '\"' ~ chars ~ '\"' => StringLit(chars mkString "") }
- | EofCh ^^^ EOF
- | '\'' ~> failure("unclosed string literal")
- | '\"' ~> failure("unclosed string literal")
- | delim
- | failure("illegal character")
- )
-
- /** Returns the legal identifier chars, except digits. */
- def identChar = letter | elem('_')
-
- // see `whitespace in `Scanners`
- def whitespace: Parser[Any] = rep[Any](
- whitespaceChar
- | '/' ~ '*' ~ comment
- | '/' ~ '/' ~ rep( chrExcept(EofCh, '\n') )
- | '/' ~ '*' ~ failure("unclosed comment")
- )
-
- protected def comment: Parser[Any] = (
- '*' ~ '/' ^^ { case _ => ' ' }
- | chrExcept(EofCh) ~ comment
- )
-
- /** The set of reserved identifiers: these will be returned as `Keyword`s. */
- val reserved = new mutable.HashSet[String]
-
- /** The set of delimiters (ordering does not matter). */
- val delimiters = new mutable.HashSet[String]
-
- protected def processIdent(name: String) =
- if (reserved contains name) Keyword(name) else Identifier(name)
-
- private lazy val _delim: Parser[Token] = {
- // construct parser for delimiters by |'ing together the parsers for the individual delimiters,
- // starting with the longest one -- otherwise a delimiter D will never be matched if there is
- // another delimiter that is a prefix of D
- def parseDelim(s: String): Parser[Token] = accept(s.toList) ^^ { x => Keyword(s) }
-
- val d = new Array[String](delimiters.size)
- delimiters.copyToArray(d, 0)
- scala.util.Sorting.quickSort(d)
- (d.toList map parseDelim).foldRight(failure("no matching delimiter"): Parser[Token])((x, y) => y | x)
- }
- protected def delim: Parser[Token] = _delim
-}
diff --git a/src/parser-combinators/scala/util/parsing/combinator/syntactical/StandardTokenParsers.scala b/src/parser-combinators/scala/util/parsing/combinator/syntactical/StandardTokenParsers.scala
deleted file mode 100644
index 5b9d14c9a7..0000000000
--- a/src/parser-combinators/scala/util/parsing/combinator/syntactical/StandardTokenParsers.scala
+++ /dev/null
@@ -1,32 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-package scala
-package util.parsing
-package combinator
-package syntactical
-
-import token._
-import lexical.StdLexical
-import scala.language.implicitConversions
-
-/** This component provides primitive parsers for the standard tokens defined in `StdTokens`.
-*
-* @author Martin Odersky, Adriaan Moors
- */
-class StandardTokenParsers extends StdTokenParsers {
- type Tokens = StdTokens
- val lexical = new StdLexical
-
- //an implicit keyword function that gives a warning when a given word is not in the reserved/delimiters list
- override implicit def keyword(chars : String): Parser[String] =
- if(lexical.reserved.contains(chars) || lexical.delimiters.contains(chars)) super.keyword(chars)
- else failure("You are trying to parse \""+chars+"\", but it is neither contained in the delimiters list, nor in the reserved keyword list of your lexical object")
-
-}
diff --git a/src/parser-combinators/scala/util/parsing/combinator/syntactical/StdTokenParsers.scala b/src/parser-combinators/scala/util/parsing/combinator/syntactical/StdTokenParsers.scala
deleted file mode 100644
index adcf85da7a..0000000000
--- a/src/parser-combinators/scala/util/parsing/combinator/syntactical/StdTokenParsers.scala
+++ /dev/null
@@ -1,52 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala
-package util.parsing
-package combinator
-package syntactical
-
-import token._
-import scala.collection.mutable
-import scala.language.implicitConversions
-
-/** This component provides primitive parsers for the standard tokens defined in `StdTokens`.
-*
-* @author Martin Odersky, Adriaan Moors
- */
-trait StdTokenParsers extends TokenParsers {
- type Tokens <: StdTokens
- import lexical.{Keyword, NumericLit, StringLit, Identifier}
-
- protected val keywordCache = mutable.HashMap[String, Parser[String]]()
-
- /** A parser which matches a single keyword token.
- *
- * @param chars The character string making up the matched keyword.
- * @return a `Parser` that matches the given string
- */
-// implicit def keyword(chars: String): Parser[String] = accept(Keyword(chars)) ^^ (_.chars)
- implicit def keyword(chars: String): Parser[String] =
- keywordCache.getOrElseUpdate(chars, accept(Keyword(chars)) ^^ (_.chars))
-
- /** A parser which matches a numeric literal */
- def numericLit: Parser[String] =
- elem("number", _.isInstanceOf[NumericLit]) ^^ (_.chars)
-
- /** A parser which matches a string literal */
- def stringLit: Parser[String] =
- elem("string literal", _.isInstanceOf[StringLit]) ^^ (_.chars)
-
- /** A parser which matches an identifier */
- def ident: Parser[String] =
- elem("identifier", _.isInstanceOf[Identifier]) ^^ (_.chars)
-}
-
-
diff --git a/src/parser-combinators/scala/util/parsing/combinator/syntactical/TokenParsers.scala b/src/parser-combinators/scala/util/parsing/combinator/syntactical/TokenParsers.scala
deleted file mode 100644
index b06babcd7e..0000000000
--- a/src/parser-combinators/scala/util/parsing/combinator/syntactical/TokenParsers.scala
+++ /dev/null
@@ -1,35 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-package scala
-package util.parsing
-package combinator
-package syntactical
-
-/** This is the core component for token-based parsers.
- *
- * @author Martin Odersky
- * @author Adriaan Moors
- */
-trait TokenParsers extends Parsers {
- /** `Tokens` is the abstract type of the `Token`s consumed by the parsers in this component. */
- type Tokens <: token.Tokens
-
- /** `lexical` is the component responsible for consuming some basic kind of
- * input (usually character-based) and turning it into the tokens
- * understood by these parsers.
- */
- val lexical: Tokens
-
- /** The input-type for these parsers*/
- type Elem = lexical.Token
-
-}
-
-
diff --git a/src/parser-combinators/scala/util/parsing/combinator/token/StdTokens.scala b/src/parser-combinators/scala/util/parsing/combinator/token/StdTokens.scala
deleted file mode 100644
index a102d1541e..0000000000
--- a/src/parser-combinators/scala/util/parsing/combinator/token/StdTokens.scala
+++ /dev/null
@@ -1,39 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala
-package util.parsing
-package combinator
-package token
-
-/** This component provides the standard `Token`s for a simple, Scala-like language.
- *
- * @author Martin Odersky
- * @author Adriaan Moors
- */
-trait StdTokens extends Tokens {
- /** The class of keyword tokens */
- case class Keyword(chars: String) extends Token {
- override def toString = "`"+chars+"'"
- }
-
- /** The class of numeric literal tokens */
- case class NumericLit(chars: String) extends Token {
- override def toString = chars
- }
-
- /** The class of string literal tokens */
- case class StringLit(chars: String) extends Token {
- override def toString = "\""+chars+"\""
- }
-
- /** The class of identifier tokens */
- case class Identifier(chars: String) extends Token {
- override def toString = "identifier "+chars
- }
-}
diff --git a/src/parser-combinators/scala/util/parsing/combinator/token/Tokens.scala b/src/parser-combinators/scala/util/parsing/combinator/token/Tokens.scala
deleted file mode 100644
index 5c3f1f95b5..0000000000
--- a/src/parser-combinators/scala/util/parsing/combinator/token/Tokens.scala
+++ /dev/null
@@ -1,43 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala
-package util.parsing
-package combinator
-package token
-
-/** This component provides the notion of `Token`, the unit of information that is passed from lexical
- * parsers in the `Lexical` component to the parsers in the `TokenParsers` component.
- *
- * @author Martin Odersky
- * @author Adriaan Moors
- */
-trait Tokens {
- /** Objects of this type are produced by a lexical parser or ``scanner'', and consumed by a parser.
- *
- * @see [[scala.util.parsing.combinator.syntactical.TokenParsers]]
- */
- abstract class Token {
- def chars: String
- }
-
- /** A class of error tokens. Error tokens are used to communicate
- * errors detected during lexical analysis
- */
- case class ErrorToken(msg: String) extends Token {
- def chars = "*** error: "+msg
- }
-
- /** A class for end-of-file tokens */
- case object EOF extends Token {
- def chars = "<eof>"
- }
-
- /** This token is produced by a scanner `Scanner` when scanning failed. */
- def errorToken(msg: String): Token = new ErrorToken(msg)
-}
diff --git a/src/parser-combinators/scala/util/parsing/input/CharArrayReader.scala b/src/parser-combinators/scala/util/parsing/input/CharArrayReader.scala
deleted file mode 100644
index 22530cb9aa..0000000000
--- a/src/parser-combinators/scala/util/parsing/input/CharArrayReader.scala
+++ /dev/null
@@ -1,35 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-package scala
-package util.parsing.input
-
-/** An object encapsulating basic character constants.
- *
- * @author Martin Odersky
- * @author Adriaan Moors
- */
-object CharArrayReader {
- final val EofCh = '\032'
-}
-
-/** A character array reader reads a stream of characters (keeping track of their positions)
- * from an array.
- *
- * @param chars an array of characters
- * @param index starting offset into the array; the first element returned will be `source(index)`
- *
- * @author Martin Odersky
- * @author Adriaan Moors
- */
-class CharArrayReader(chars: Array[Char], index: Int) extends CharSequenceReader(chars, index) {
-
- def this(chars: Array[Char]) = this(chars, 0)
-
-}
diff --git a/src/parser-combinators/scala/util/parsing/input/CharSequenceReader.scala b/src/parser-combinators/scala/util/parsing/input/CharSequenceReader.scala
deleted file mode 100644
index 8e7751cc82..0000000000
--- a/src/parser-combinators/scala/util/parsing/input/CharSequenceReader.scala
+++ /dev/null
@@ -1,66 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-package scala
-package util.parsing.input
-
-/** An object encapsulating basic character constants.
- *
- * @author Martin Odersky, Adriaan Moors
- */
-object CharSequenceReader {
- final val EofCh = '\032'
-}
-
-/** A character array reader reads a stream of characters (keeping track of their positions)
- * from an array.
- *
- * @param source the source sequence
- * @param offset starting offset.
- *
- * @author Martin Odersky
- */
-class CharSequenceReader(override val source: java.lang.CharSequence,
- override val offset: Int) extends Reader[Char] {
- import CharSequenceReader._
-
- /** Construct a `CharSequenceReader` with its first element at
- * `source(0)` and position `(1,1)`.
- */
- def this(source: java.lang.CharSequence) = this(source, 0)
-
- /** Returns the first element of the reader, or EofCh if reader is at its end.
- */
- def first =
- if (offset < source.length) source.charAt(offset) else EofCh
-
- /** Returns a CharSequenceReader consisting of all elements except the first.
- *
- * @return If `atEnd` is `true`, the result will be `this`;
- * otherwise, it's a `CharSequenceReader` containing the rest of input.
- */
- def rest: CharSequenceReader =
- if (offset < source.length) new CharSequenceReader(source, offset + 1)
- else this
-
- /** The position of the first element in the reader.
- */
- def pos: Position = new OffsetPosition(source, offset)
-
- /** true iff there are no more elements in this reader (except for trailing
- * EofCh's)
- */
- def atEnd = offset >= source.length
-
- /** Returns an abstract reader consisting of all elements except the first
- * `n` elements.
- */
- override def drop(n: Int): CharSequenceReader =
- new CharSequenceReader(source, offset + n)
-}
diff --git a/src/parser-combinators/scala/util/parsing/input/NoPosition.scala b/src/parser-combinators/scala/util/parsing/input/NoPosition.scala
deleted file mode 100644
index 4a32264b79..0000000000
--- a/src/parser-combinators/scala/util/parsing/input/NoPosition.scala
+++ /dev/null
@@ -1,25 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala
-package util.parsing.input
-
-/** Undefined position.
- *
- * @author Martin Odersky
- * @author Adriaan Moors
- */
-object NoPosition extends Position {
- def line = 0
- def column = 0
- override def toString = "<undefined position>"
- override def longString = toString
- def lineContents = ""
-}
diff --git a/src/parser-combinators/scala/util/parsing/input/OffsetPosition.scala b/src/parser-combinators/scala/util/parsing/input/OffsetPosition.scala
deleted file mode 100644
index 23f79c74d1..0000000000
--- a/src/parser-combinators/scala/util/parsing/input/OffsetPosition.scala
+++ /dev/null
@@ -1,73 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala
-package util.parsing.input
-
-import scala.collection.mutable.ArrayBuffer
-
-/** `OffsetPosition` is a standard class for positions
- * represented as offsets into a source ``document''.
- *
- * @param source The source document
- * @param offset The offset indicating the position
- *
- * @author Martin Odersky
- */
-case class OffsetPosition(source: java.lang.CharSequence, offset: Int) extends Position {
-
- /** An index that contains all line starts, including first line, and eof. */
- private lazy val index: Array[Int] = {
- val lineStarts = new ArrayBuffer[Int]
- lineStarts += 0
- for (i <- 0 until source.length)
- if (source.charAt(i) == '\n') lineStarts += (i + 1)
- lineStarts += source.length
- lineStarts.toArray
- }
-
- /** The line number referred to by the position; line numbers start at 1. */
- def line: Int = {
- var lo = 0
- var hi = index.length - 1
- while (lo + 1 < hi) {
- val mid = (hi + lo) / 2
- if (offset < index(mid)) hi = mid
- else lo = mid
- }
- lo + 1
- }
-
- /** The column number referred to by the position; column numbers start at 1. */
- def column: Int = offset - index(line - 1) + 1
-
- /** The contents of the line numbered at the current offset.
- *
- * @return the line at `offset` (not including a newline)
- */
- def lineContents: String =
- source.subSequence(index(line - 1), index(line)).toString
-
- /** Returns a string representation of the `Position`, of the form `line.column`. */
- override def toString = line+"."+column
-
- /** Compare this position to another, by first comparing their line numbers,
- * and then -- if necessary -- using the columns to break a tie.
- *
- * @param that a `Position` to compare to this `Position`
- * @return true if this position's line number or (in case of equal line numbers)
- * column is smaller than the corresponding components of `that`
- */
- override def <(that: Position) = that match {
- case OffsetPosition(_, that_offset) =>
- this.offset < that_offset
- case _ =>
- this.line < that.line ||
- this.line == that.line && this.column < that.column
- }
-}
diff --git a/src/parser-combinators/scala/util/parsing/input/PagedSeqReader.scala b/src/parser-combinators/scala/util/parsing/input/PagedSeqReader.scala
deleted file mode 100644
index 468f1f9a5f..0000000000
--- a/src/parser-combinators/scala/util/parsing/input/PagedSeqReader.scala
+++ /dev/null
@@ -1,71 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-package scala
-package util.parsing.input
-
-import scala.collection.immutable.PagedSeq
-
-/** An object encapsulating basic character constants.
- *
- * @author Martin Odersky
- * @author Adriaan Moors
- */
-object PagedSeqReader {
- final val EofCh = '\032'
-}
-
-/** A character array reader reads a stream of characters (keeping track of their positions)
- * from an array.
- *
- * @param seq the source sequence
- * @param offset starting offset.
- *
- * @author Martin Odersky
- */
-class PagedSeqReader(seq: PagedSeq[Char],
- override val offset: Int) extends Reader[Char] {
- import PagedSeqReader._
-
- override lazy val source: java.lang.CharSequence = seq
-
- /** Construct a `PagedSeqReader` with its first element at
- * `source(0)` and position `(1,1)`.
- */
- def this(seq: PagedSeq[Char]) = this(seq, 0)
-
- /** Returns the first element of the reader, or EofCh if reader is at its end
- */
- def first =
- if (seq.isDefinedAt(offset)) seq(offset) else EofCh
-
- /** Returns a PagedSeqReader consisting of all elements except the first
- *
- * @return If `atEnd` is `true`, the result will be `this`;
- * otherwise, it's a `PagedSeqReader` containing the rest of input.
- */
- def rest: PagedSeqReader =
- if (seq.isDefinedAt(offset)) new PagedSeqReader(seq, offset + 1)
- else this
-
- /** The position of the first element in the reader.
- */
- def pos: Position = new OffsetPosition(source, offset)
-
- /** true iff there are no more elements in this reader (except for trailing
- * EofCh's).
- */
- def atEnd = !seq.isDefinedAt(offset)
-
- /** Returns an abstract reader consisting of all elements except the first
- * `n` elements.
- */
- override def drop(n: Int): PagedSeqReader =
- new PagedSeqReader(seq, offset + n)
-}
diff --git a/src/parser-combinators/scala/util/parsing/input/Position.scala b/src/parser-combinators/scala/util/parsing/input/Position.scala
deleted file mode 100644
index b7995a6471..0000000000
--- a/src/parser-combinators/scala/util/parsing/input/Position.scala
+++ /dev/null
@@ -1,62 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala
-package util.parsing.input
-
-/** `Position` is the base trait for objects describing a position in a `document`.
- *
- * It provides functionality for:
- * - generating a visual representation of this position (`longString`);
- * - comparing two positions (`<`).
- *
- * To use this class for a concrete kind of `document`, implement the `lineContents` method.
- *
- * @author Martin Odersky
- * @author Adriaan Moors
- */
-trait Position {
-
- /** The line number referred to by the position; line numbers start at 1. */
- def line: Int
-
- /** The column number referred to by the position; column numbers start at 1. */
- def column: Int
-
- /** The contents of the line at this position. (must not contain a new-line character).
- */
- protected def lineContents: String
-
- /** Returns a string representation of the `Position`, of the form `line.column`. */
- override def toString = ""+line+"."+column
-
- /** Returns a more ``visual'' representation of this position.
- * More precisely, the resulting string consists of two lines:
- * 1. the line in the document referred to by this position
- * 2. a caret indicating the column
- *
- * Example:
- * {{{
- * List(this, is, a, line, from, the, document)
- * ^
- * }}}
- */
- def longString = lineContents+"\n"+lineContents.take(column-1).map{x => if (x == '\t') x else ' ' } + "^"
-
- /** Compare this position to another, by first comparing their line numbers,
- * and then -- if necessary -- using the columns to break a tie.
- *
- * @param `that` a `Position` to compare to this `Position`
- * @return true if this position's line number or (in case of equal line numbers)
- * column is smaller than the corresponding components of `that`
- */
- def <(that: Position) = {
- this.line < that.line ||
- this.line == that.line && this.column < that.column
- }
-}
diff --git a/src/parser-combinators/scala/util/parsing/input/Positional.scala b/src/parser-combinators/scala/util/parsing/input/Positional.scala
deleted file mode 100644
index cfde67cadd..0000000000
--- a/src/parser-combinators/scala/util/parsing/input/Positional.scala
+++ /dev/null
@@ -1,30 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala
-package util.parsing.input
-
-/** A trait for objects that have a source position.
- *
- * @author Martin Odersky, Adriaan Moors
- */
-trait Positional {
-
- /** The source position of this object, initially set to undefined. */
- var pos: Position = NoPosition
-
- /** If current source position is undefined, update it with given position `newpos`
- * @return the object itself
- */
- def setPos(newpos: Position): this.type = {
- if (pos eq NoPosition) pos = newpos
- this
- }
-}
-
-
diff --git a/src/parser-combinators/scala/util/parsing/input/Reader.scala b/src/parser-combinators/scala/util/parsing/input/Reader.scala
deleted file mode 100644
index 9dbf08a7ca..0000000000
--- a/src/parser-combinators/scala/util/parsing/input/Reader.scala
+++ /dev/null
@@ -1,62 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala
-package util.parsing.input
-
-
-/** An interface for streams of values that have positions.
- *
- * @author Martin Odersky
- * @author Adriaan Moors
- */
-abstract class Reader[+T] {
-
- /** If this is a reader over character sequences, the underlying char sequence.
- * If not, throws a `NoSuchMethodError` exception.
- *
- * @throws [[java.lang.NoSuchMethodError]] if this not a char sequence reader.
- */
- def source: java.lang.CharSequence =
- throw new NoSuchMethodError("not a char sequence reader")
-
- def offset: Int =
- throw new NoSuchMethodError("not a char sequence reader")
-
- /** Returns the first element of the reader
- */
- def first: T
-
- /** Returns an abstract reader consisting of all elements except the first
- *
- * @return If `atEnd` is `true`, the result will be `this';
- * otherwise, it's a `Reader` containing more elements.
- */
- def rest: Reader[T]
-
- /** Returns an abstract reader consisting of all elements except the first `n` elements.
- */
- def drop(n: Int): Reader[T] = {
- var r: Reader[T] = this
- var cnt = n
- while (cnt > 0) {
- r = r.rest; cnt -= 1
- }
- r
- }
-
- /** The position of the first element in the reader.
- */
- def pos: Position
-
- /** `true` iff there are no more elements in this reader.
- */
- def atEnd: Boolean
-}
diff --git a/src/parser-combinators/scala/util/parsing/input/StreamReader.scala b/src/parser-combinators/scala/util/parsing/input/StreamReader.scala
deleted file mode 100644
index 30eb097fd7..0000000000
--- a/src/parser-combinators/scala/util/parsing/input/StreamReader.scala
+++ /dev/null
@@ -1,76 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala
-package util.parsing.input
-
-import java.io.BufferedReader
-import scala.collection.immutable.PagedSeq
-
-/** An object to create a `StreamReader` from a `java.io.Reader`.
- *
- * @author Miles Sabin
- */
-object StreamReader {
- final val EofCh = '\032'
-
- /** Create a `StreamReader` from a `java.io.Reader`.
- *
- * @param in the `java.io.Reader` that provides the underlying
- * stream of characters for this Reader.
- */
- def apply(in: java.io.Reader): StreamReader = {
- new StreamReader(PagedSeq.fromReader(in), 0, 1)
- }
-}
-
-/** A StreamReader reads from a character sequence, typically created as a PagedSeq
- * from a java.io.Reader
- *
- * NOTE:
- * StreamReaders do not really fulfill the new contract for readers, which
- * requires a `source` CharSequence representing the full input.
- * Instead source is treated line by line.
- * As a consequence, regex matching cannot extend beyond a single line
- * when a StreamReader are used for input.
- *
- * If you need to match regexes spanning several lines you should consider
- * class `PagedSeqReader` instead.
- *
- * @author Miles Sabin
- * @author Martin Odersky
- */
-sealed class StreamReader(seq: PagedSeq[Char], off: Int, lnum: Int) extends PagedSeqReader(seq, off) {
- import StreamReader._
-
- override def rest: StreamReader =
- if (off == seq.length) this
- else if (seq(off) == '\n')
- new StreamReader(seq.slice(off + 1), 0, lnum + 1)
- else new StreamReader(seq, off + 1, lnum)
-
- private def nextEol = {
- var i = off
- while (i < seq.length && seq(i) != '\n' && seq(i) != EofCh) i += 1
- i
- }
-
- override def drop(n: Int): StreamReader = {
- val eolPos = nextEol
- if (eolPos < off + n && eolPos < seq.length)
- new StreamReader(seq.slice(eolPos + 1), 0, lnum + 1).drop(off + n - (eolPos + 1))
- else
- new StreamReader(seq, off + n, lnum)
- }
-
- override def pos: Position = new Position {
- def line = lnum
- def column = off + 1
- def lineContents = seq.slice(0, nextEol).toString
- }
-}
diff --git a/src/parser-combinators/scala/util/parsing/json/JSON.scala b/src/parser-combinators/scala/util/parsing/json/JSON.scala
deleted file mode 100644
index b06dddf532..0000000000
--- a/src/parser-combinators/scala/util/parsing/json/JSON.scala
+++ /dev/null
@@ -1,97 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala
-package util.parsing.json
-
-/**
- * This object provides a simple interface to the JSON parser class.
- * The default conversion for numerics is into a double. If you wish to
- * override this behavior at the global level, you can set the
- * `globalNumberParser` property to your own `(String => Any)` function.
- * If you only want to override at the per-thread level then you can set
- * the `perThreadNumberParser` property to your function. For example:
- * {{{
- * val myConversionFunc = {input : String => BigDecimal(input)}
- *
- * // Global override
- * JSON.globalNumberParser = myConversionFunc
- *
- * // Per-thread override
- * JSON.perThreadNumberParser = myConversionFunc
- * }}}
- *
- * @author Derek Chen-Becker <"java"+@+"chen-becker"+"."+"org">
- */
-@deprecated("This object will be removed.", "2.11.0")
-object JSON extends Parser {
-
- /**
- * This method converts ''raw'' results back into the original, deprecated
- * form.
- */
- private def unRaw (in : Any) : Any = in match {
- case JSONObject(obj) => obj.map({ case (k,v) => (k,unRaw(v))}).toList
- case JSONArray(list) => list.map(unRaw)
- case x => x
- }
-
- /**
- * Parse the given `JSON` string and return a list of elements. If the
- * string is a `JSON` object it will be a `JSONObject`. If it's a `JSON`
- * array it will be a `JSONArray`.
- *
- * @param input the given `JSON` string.
- * @return an optional `JSONType` element.
- */
- def parseRaw(input : String) : Option[JSONType] =
- phrase(root)(new lexical.Scanner(input)) match {
- case Success(result, _) => Some(result)
- case _ => None
- }
-
- /**
- * Parse the given `JSON` string and return either a `List[Any]`
- * if the `JSON` string specifies an `Array`, or a
- * `Map[String,Any]` if the `JSON` string specifies an object.
- *
- * @param input the given `JSON` string.
- * @return an optional list or map.
- */
- def parseFull(input: String): Option[Any] =
- parseRaw(input) match {
- case Some(data) => Some(resolveType(data))
- case None => None
- }
-
- /**
- * A utility method to resolve a parsed `JSON` list into objects or
- * arrays. See the `parse` method for details.
- */
- def resolveType(input: Any): Any = input match {
- case JSONObject(data) => data.transform {
- case (k,v) => resolveType(v)
- }
- case JSONArray(data) => data.map(resolveType)
- case x => x
- }
-
- /**
- * The global (VM) default function for converting a string to a numeric value.
- */
- def globalNumberParser_=(f: NumericParser) { defaultNumberParser = f }
- def globalNumberParser : NumericParser = defaultNumberParser
-
- /**
- * Defines the function used to convert a numeric string literal into a
- * numeric format on a per-thread basis. Use `globalNumberParser` for a
- * global override.
- */
- def perThreadNumberParser_=(f : NumericParser) { numberParser.set(f) }
- def perThreadNumberParser : NumericParser = numberParser.get()
-}
diff --git a/src/parser-combinators/scala/util/parsing/json/Lexer.scala b/src/parser-combinators/scala/util/parsing/json/Lexer.scala
deleted file mode 100644
index 7fc4e0bab6..0000000000
--- a/src/parser-combinators/scala/util/parsing/json/Lexer.scala
+++ /dev/null
@@ -1,90 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala
-package util.parsing.json
-
-import scala.util.parsing.combinator._
-import scala.util.parsing.combinator.lexical._
-import scala.util.parsing.input.CharArrayReader.EofCh
-
-/**
- * @author Derek Chen-Becker <"java"+@+"chen-becker"+"."+"org">
- */
-@deprecated("This class will be removed.", "2.11.0")
-class Lexer extends StdLexical with ImplicitConversions {
-
- override def token: Parser[Token] =
- //( '\"' ~ rep(charSeq | letter) ~ '\"' ^^ lift(StringLit)
- ( string ^^ StringLit
- | number ~ letter ^^ { case n ~ l => ErrorToken("Invalid number format : " + n + l) }
- | '-' ~> whitespace ~ number ~ letter ^^ { case ws ~ num ~ l => ErrorToken("Invalid number format : -" + num + l) }
- | '-' ~> whitespace ~ number ^^ { case ws ~ num => NumericLit("-" + num) }
- | number ^^ NumericLit
- | EofCh ^^^ EOF
- | delim
- | '\"' ~> failure("Unterminated string")
- | rep(letter) ^^ checkKeyword
- | failure("Illegal character")
- )
-
- def checkKeyword(xs : List[Any]) = {
- val strRep = xs mkString ""
- if (reserved contains strRep) Keyword(strRep) else ErrorToken("Not a keyword: " + strRep)
- }
-
- /** A string is a collection of zero or more Unicode characters, wrapped in
- * double quotes, using backslash escapes (cf. http://www.json.org/).
- */
- def string = '\"' ~> rep(charSeq | chrExcept('\"', '\n', EofCh)) <~ '\"' ^^ { _ mkString "" }
-
- override def whitespace = rep(whitespaceChar)
-
- def number = intPart ~ opt(fracPart) ~ opt(expPart) ^^ { case i ~ f ~ e =>
- i + optString(".", f) + optString("", e)
- }
- def intPart = zero | intList
- def intList = nonzero ~ rep(digit) ^^ {case x ~ y => (x :: y) mkString ""}
- def fracPart = '.' ~> rep(digit) ^^ { _ mkString "" }
- def expPart = exponent ~ opt(sign) ~ rep1(digit) ^^ { case e ~ s ~ d =>
- e + optString("", s) + d.mkString("")
- }
-
- private def optString[A](pre: String, a: Option[A]) = a match {
- case Some(x) => pre + x.toString
- case None => ""
- }
-
- def zero: Parser[String] = '0' ^^^ "0"
- def nonzero = elem("nonzero digit", d => d.isDigit && d != '0')
- def exponent = elem("exponent character", d => d == 'e' || d == 'E')
- def sign = elem("sign character", d => d == '-' || d == '+')
-
- def charSeq: Parser[String] =
- ('\\' ~ '\"' ^^^ "\""
- |'\\' ~ '\\' ^^^ "\\"
- |'\\' ~ '/' ^^^ "/"
- |'\\' ~ 'b' ^^^ "\b"
- |'\\' ~ 'f' ^^^ "\f"
- |'\\' ~ 'n' ^^^ "\n"
- |'\\' ~ 'r' ^^^ "\r"
- |'\\' ~ 't' ^^^ "\t"
- |'\\' ~> 'u' ~> unicodeBlock)
-
- val hexDigits = Set[Char]() ++ "0123456789abcdefABCDEF".toArray
- def hexDigit = elem("hex digit", hexDigits.contains(_))
-
- private def unicodeBlock = hexDigit ~ hexDigit ~ hexDigit ~ hexDigit ^^ {
- case a ~ b ~ c ~ d =>
- new String(Array(Integer.parseInt(List(a, b, c, d) mkString "", 16)), 0, 1)
- }
-
- //private def lift[T](f: String => T)(xs: List[Any]): T = f(xs mkString "")
-}
diff --git a/src/parser-combinators/scala/util/parsing/json/Parser.scala b/src/parser-combinators/scala/util/parsing/json/Parser.scala
deleted file mode 100644
index 521dfc6612..0000000000
--- a/src/parser-combinators/scala/util/parsing/json/Parser.scala
+++ /dev/null
@@ -1,147 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala
-package util.parsing.json
-
-import scala.util.parsing.combinator._
-import scala.util.parsing.combinator.syntactical._
-
-/**
- * A marker class for the JSON result types.
- *
- * @author Derek Chen-Becker <"java"+@+"chen-becker"+"."+"org">
- */
-@deprecated("This class will be removed.", "2.11.0")
-sealed abstract class JSONType {
- /**
- * This version of toString allows you to provide your own value
- * formatter.
- */
- def toString (formatter : JSONFormat.ValueFormatter) : String
-
- /**
- * Returns a String representation of this JSON value
- * using the JSONFormat.defaultFormatter.
- */
- override def toString = toString(JSONFormat.defaultFormatter)
-}
-
-/**
- * This object defines functions that are used when converting JSONType
- * values into String representations. Mostly this is concerned with
- * proper quoting of strings.
- *
- * @author Derek Chen-Becker <"java"+@+"chen-becker"+"."+"org">
- */
-@deprecated("This object will be removed.", "2.11.0")
-object JSONFormat {
- /**
- * This type defines a function that can be used to
- * format values into JSON format.
- */
- type ValueFormatter = Any => String
-
- /**
- * The default formatter used by the library. You can
- * provide your own with the toString calls on
- * JSONObject and JSONArray instances.
- */
- val defaultFormatter : ValueFormatter = (x : Any) => x match {
- case s : String => "\"" + quoteString(s) + "\""
- case jo : JSONObject => jo.toString(defaultFormatter)
- case ja : JSONArray => ja.toString(defaultFormatter)
- case other => other.toString
- }
-
- /**
- * This function can be used to properly quote Strings
- * for JSON output.
- */
- def quoteString (s : String) : String =
- s.map {
- case '"' => "\\\""
- case '\\' => "\\\\"
- case '/' => "\\/"
- case '\b' => "\\b"
- case '\f' => "\\f"
- case '\n' => "\\n"
- case '\r' => "\\r"
- case '\t' => "\\t"
- /* We'll unicode escape any control characters. These include:
- * 0x0 -> 0x1f : ASCII Control (C0 Control Codes)
- * 0x7f : ASCII DELETE
- * 0x80 -> 0x9f : C1 Control Codes
- *
- * Per RFC4627, section 2.5, we're not technically required to
- * encode the C1 codes, but we do to be safe.
- */
- case c if ((c >= '\u0000' && c <= '\u001f') || (c >= '\u007f' && c <= '\u009f')) => "\\u%04x".format(c.toInt)
- case c => c
- }.mkString
-}
-
-/**
- * Represents a JSON Object (map).
- *
- * @author Derek Chen-Becker <"java"+@+"chen-becker"+"."+"org">
- */
-@deprecated("This class will be removed.", "2.11.0")
-case class JSONObject (obj : Map[String,Any]) extends JSONType {
- def toString (formatter : JSONFormat.ValueFormatter) =
- "{" + obj.map({ case (k,v) => formatter(k.toString) + " : " + formatter(v) }).mkString(", ") + "}"
-}
-
-/**
- * Represents a JSON Array (list).
- * @author Derek Chen-Becker <"java"+@+"chen-becker"+"."+"org">
- */
-@deprecated("This class will be removed.", "2.11.0")
-case class JSONArray (list : List[Any]) extends JSONType {
- def toString (formatter : JSONFormat.ValueFormatter) =
- "[" + list.map(formatter).mkString(", ") + "]"
-}
-
-/**
- * The main JSON Parser.
- *
- * @author Derek Chen-Becker <"java"+@+"chen-becker"+"."+"org">
- */
-@deprecated("This class will be removed.", "2.11.0")
-class Parser extends StdTokenParsers with ImplicitConversions {
- // Fill in abstract defs
- type Tokens = Lexer
- val lexical = new Tokens
-
- // Configure lexical parsing
- lexical.reserved ++= List("true", "false", "null")
- lexical.delimiters ++= List("{", "}", "[", "]", ":", ",")
-
- /** Type signature for functions that can parse numeric literals */
- type NumericParser = String => Any
-
- // Global default number parsing function
- protected var defaultNumberParser : NumericParser = {_.toDouble}
-
- // Per-thread default number parsing function
- protected val numberParser = new ThreadLocal[NumericParser]() {
- override def initialValue() = defaultNumberParser
- }
-
- // Define the grammar
- def root = jsonObj | jsonArray
- def jsonObj = "{" ~> repsep(objEntry, ",") <~ "}" ^^ { case vals : List[_] => JSONObject(Map(vals : _*)) }
- def jsonArray = "[" ~> repsep(value, ",") <~ "]" ^^ { case vals : List[_] => JSONArray(vals) }
- def objEntry = stringVal ~ (":" ~> value) ^^ { case x ~ y => (x, y) }
- def value: Parser[Any] = (jsonObj | jsonArray | number | "true" ^^^ true | "false" ^^^ false | "null" ^^^ null | stringVal)
- def stringVal = accept("string", { case lexical.StringLit(n) => n} )
- def number = accept("number", { case lexical.NumericLit(n) => numberParser.get.apply(n)} )
-}
-
diff --git a/src/partest-extras/scala/tools/partest/ReplTest.scala b/src/partest-extras/scala/tools/partest/ReplTest.scala
index 7cc2dd39a9..a728e8bdef 100644
--- a/src/partest-extras/scala/tools/partest/ReplTest.scala
+++ b/src/partest-extras/scala/tools/partest/ReplTest.scala
@@ -9,8 +9,8 @@ import scala.tools.nsc.Settings
import scala.tools.nsc.interpreter.ILoop
import java.lang.reflect.{ Method => JMethod, Field => JField }
-/** A trait for testing repl code. It drops the first line
- * of output because the real repl prints a version number.
+/** A class for testing repl code.
+ * It filters the line of output that mentions a version number.
*/
abstract class ReplTest extends DirectTest {
// override to transform Settings object immediately before the finish
@@ -22,22 +22,56 @@ abstract class ReplTest extends DirectTest {
s.Xnojline.value = true
transformSettings(s)
}
+ def welcoming: Boolean = false
+ lazy val welcome = "(Welcome to Scala) version .*".r
+ def normalize(s: String) = s match {
+ case welcome(w) => w
+ case s => s
+ }
+ def unwelcoming(s: String) = s match {
+ case welcome(w) => false
+ case _ => true
+ }
def eval() = {
val s = settings
log("eval(): settings = " + s)
- ILoop.runForTranscript(code, s).lines drop 1
+ //ILoop.runForTranscript(code, s).lines drop 1 // not always first line
+ val lines = ILoop.runForTranscript(code, s).lines
+ if (welcoming) lines map normalize
+ else lines filter unwelcoming
}
def show() = eval() foreach println
}
+/** Retain and normalize the welcome message. */
+trait Welcoming { this: ReplTest =>
+ override def welcoming = true
+}
+
+/** Run a REPL test from a session transcript.
+ * The `session` should be a triple-quoted String starting
+ * with the `Type in expressions` message and ending
+ * after the final `prompt`, including the last space.
+ */
abstract class SessionTest extends ReplTest {
+ /** Session transcript, as a triple-quoted, multiline, marginalized string. */
def session: String
- override final def code = expected filter (_.startsWith(prompt)) map (_.drop(prompt.length)) mkString "\n"
- def expected = session.stripMargin.lines.toList
+
+ /** Expected output, as an iterator. */
+ def expected = session.stripMargin.lines
+
+ /** Code is the command list culled from the session (or the expected session output).
+ * Would be nicer if code were lazy lines.
+ */
+ override final def code = expected filter (_ startsWith prompt) map (_ drop prompt.length) mkString "\n"
+
final def prompt = "scala> "
+
+ /** Default test is to compare expected and actual output and emit the diff on a failed comparison. */
override def show() = {
- val out = eval().toList
- if (out.size != expected.size) Console println s"Expected ${expected.size} lines, got ${out.size}"
- if (out != expected) Console print nest.FileManager.compareContents(expected, out, "expected", "actual")
+ val evaled = eval().toList
+ val wanted = expected.toList
+ if (evaled.size != wanted.size) Console println s"Expected ${wanted.size} lines, got ${evaled.size}"
+ if (evaled != wanted) Console print nest.FileManager.compareContents(wanted, evaled, "expected", "actual")
}
}
diff --git a/src/partest-extras/scala/tools/partest/ScriptTest.scala b/src/partest-extras/scala/tools/partest/ScriptTest.scala
new file mode 100644
index 0000000000..24a4121b54
--- /dev/null
+++ b/src/partest-extras/scala/tools/partest/ScriptTest.scala
@@ -0,0 +1,21 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ */
+
+package scala.tools.partest
+
+import scala.reflect.internal.util.ScalaClassLoader
+
+/** A `ScriptTest` is a `DirectTest` for which the code
+ * is the contents of a script file.
+ */
+abstract class ScriptTest extends DirectTest {
+ def testmain = "TestMain"
+ override def extraSettings = s"-usejavacp -Xscript $testmain"
+ def scriptPath = testPath changeExtension "script"
+ def code = scriptPath.toFile.slurp
+ def show() = {
+ compile()
+ ScalaClassLoader(getClass.getClassLoader).run(testmain, Seq.empty[String])
+ }
+}
diff --git a/src/reflect/scala/reflect/api/BuildUtils.scala b/src/reflect/scala/reflect/api/BuildUtils.scala
index c568cf74c0..60c2a81947 100644
--- a/src/reflect/scala/reflect/api/BuildUtils.scala
+++ b/src/reflect/scala/reflect/api/BuildUtils.scala
@@ -58,16 +58,12 @@ private[reflect] trait BuildUtils { self: Universe =>
*/
def setAnnotations[S <: Symbol](sym: S, annots: List[Annotation]): S
- def flagsFromBits(bits: Long): FlagSet
-
def This(sym: Symbol): Tree
def Select(qualifier: Tree, sym: Symbol): Select
def Ident(sym: Symbol): Ident
- def Block(stats: List[Tree]): Block
-
def TypeTree(tp: Type): TypeTree
def thisPrefix(sym: Symbol): Type
@@ -76,23 +72,45 @@ private[reflect] trait BuildUtils { self: Universe =>
def setSymbol[T <: Tree](tree: T, sym: Symbol): T
- def mkAnnotationCtor(tree: Tree, args: List[Tree]): Tree
+ def mkAnnotation(tree: Tree): Tree
+
+ def mkAnnotation(trees: List[Tree]): List[Tree]
+
+ def mkRefineStat(stat: Tree): Tree
+
+ def mkRefineStat(stats: List[Tree]): List[Tree]
+
+ def mkEarlyDef(defn: Tree): Tree
+
+ def mkEarlyDef(defns: List[Tree]): List[Tree]
+
+ def RefTree(qual: Tree, sym: Symbol): Tree
+
+ val ScalaDot: ScalaDotExtractor
- val FlagsAsBits: FlagsAsBitsExtractor
+ trait ScalaDotExtractor {
+ def apply(name: Name): Tree
+ def unapply(tree: Tree): Option[Name]
+ }
+
+ val FlagsRepr: FlagsReprExtractor
- trait FlagsAsBitsExtractor {
- def unapply(flags: Long): Option[Long]
+ trait FlagsReprExtractor {
+ def apply(value: Long): FlagSet
+ def unapply(flags: Long): Some[Long]
}
- val TypeApplied: TypeAppliedExtractor
+ val SyntacticTypeApplied: SyntacticTypeAppliedExtractor
- trait TypeAppliedExtractor {
+ trait SyntacticTypeAppliedExtractor {
+ def apply(tree: Tree, targs: List[Tree]): Tree
def unapply(tree: Tree): Some[(Tree, List[Tree])]
}
- val Applied: AppliedExtractor
+ val SyntacticApplied: SyntacticAppliedExtractor
- trait AppliedExtractor {
+ trait SyntacticAppliedExtractor {
+ def apply(tree: Tree, argss: List[List[Tree]]): Tree
def unapply(tree: Tree): Some[(Tree, List[List[Tree]])]
}
@@ -100,20 +118,80 @@ private[reflect] trait BuildUtils { self: Universe =>
trait SyntacticClassDefExtractor {
def apply(mods: Modifiers, name: TypeName, tparams: List[TypeDef],
- constrMods: Modifiers, vparamss: List[List[ValDef]], parents: List[Tree],
- selfdef: ValDef, body: List[Tree]): Tree
- def unapply(tree: Tree): Option[(Modifiers, TypeName, List[TypeDef], Modifiers,
- List[List[ValDef]], List[Tree], ValDef, List[Tree])]
+ constrMods: Modifiers, vparamss: List[List[ValDef]], earlyDefs: List[Tree],
+ parents: List[Tree], selfdef: ValDef, body: List[Tree]): ClassDef
+ def unapply(tree: Tree): Option[(Modifiers, TypeName, List[TypeDef], Modifiers, List[List[ValDef]],
+ List[Tree], List[Tree], ValDef, List[Tree])]
+ }
+
+ val SyntacticTraitDef: SyntacticTraitDefExtractor
+
+ trait SyntacticTraitDefExtractor {
+ def apply(mods: Modifiers, name: TypeName, tparams: List[TypeDef],
+ earlyDefs: List[Tree], parents: List[Tree], selfdef: ValDef, body: List[Tree]): ClassDef
+ def unapply(tree: Tree): Option[(Modifiers, TypeName, List[TypeDef],
+ List[Tree], List[Tree], ValDef, List[Tree])]
}
- val TupleN: TupleNExtractor
- val TupleTypeN: TupleNExtractor
+ val SyntacticModuleDef: SyntacticModuleDefExtractor
+
+ trait SyntacticModuleDefExtractor {
+ def apply(mods: Modifiers, name: TermName, earlyDefs: List[Tree],
+ parents: List[Tree], selfdef: ValDef, body: List[Tree]): Tree
+ def unapply(tree: Tree): Option[(Modifiers, TermName, List[Tree], List[Tree], ValDef, List[Tree])]
+ }
- trait TupleNExtractor {
+ val SyntacticTuple: SyntacticTupleExtractor
+ val SyntacticTupleType: SyntacticTupleExtractor
+
+ trait SyntacticTupleExtractor {
def apply(args: List[Tree]): Tree
def unapply(tree: Tree): Option[List[Tree]]
}
- def RefTree(qual: Tree, sym: Symbol): Tree
+ val SyntacticBlock: SyntacticBlockExtractor
+
+ trait SyntacticBlockExtractor {
+ def apply(stats: List[Tree]): Tree
+ def unapply(tree: Tree): Option[List[Tree]]
+ }
+
+ val SyntacticNew: SyntacticNewExtractor
+
+ trait SyntacticNewExtractor {
+ def apply(earlyDefs: List[Tree], parents: List[Tree], selfdef: ValDef, body: List[Tree]): Tree
+ def unapply(tree: Tree): Option[(List[Tree], List[Tree], ValDef, List[Tree])]
+ }
+
+ val SyntacticFunctionType: SyntacticFunctionTypeExtractor
+
+ trait SyntacticFunctionTypeExtractor {
+ def apply(argtpes: List[Tree], restpe: Tree): Tree
+ def unapply(tree: Tree): Option[(List[Tree], Tree)]
+ }
+
+ val SyntacticFunction: SyntacticFunctionExtractor
+
+ trait SyntacticFunctionExtractor {
+ def apply(params: List[ValDef], body: Tree): Tree
+
+ def unapply(tree: Tree): Option[(List[ValDef], Tree)]
+ }
+
+ val SyntacticDefDef: SyntacticDefDefExtractor
+
+ trait SyntacticDefDefExtractor {
+ def apply(mods: Modifiers, name: TermName, tparams: List[Tree], vparamss: List[List[ValDef]], tpt: Tree, rhs: Tree): DefDef
+
+ def unapply(tree: Tree): Option[(Modifiers, TermName, List[Tree], List[List[ValDef]], Tree, Tree)]
+ }
+
+ val SyntacticValDef: SyntacticValDefExtractor
+ val SyntacticVarDef: SyntacticValDefExtractor
+
+ trait SyntacticValDefExtractor {
+ def apply(mods: Modifiers, name: TermName, tpt: Tree, rhs: Tree): ValDef
+ def unapply(tree: Tree): Option[(Modifiers, TermName, Tree, Tree)]
+ }
}
}
diff --git a/src/reflect/scala/reflect/internal/BuildUtils.scala b/src/reflect/scala/reflect/internal/BuildUtils.scala
index cdebfe52f8..06a6e10c30 100644
--- a/src/reflect/scala/reflect/internal/BuildUtils.scala
+++ b/src/reflect/scala/reflect/internal/BuildUtils.scala
@@ -5,7 +5,7 @@ package internal
import Flags._
trait BuildUtils { self: SymbolTable =>
- import definitions.{TupleClass, MaxTupleArity, ScalaPackage, UnitClass}
+ import definitions.{TupleClass, FunctionClass, MaxTupleArity, MaxFunctionArity, ScalaPackage, UnitClass}
class BuildImpl extends BuildApi {
@@ -47,20 +47,12 @@ trait BuildUtils { self: SymbolTable =>
def setTypeSignature[S <: Symbol](sym: S, tpe: Type): S =
sym.setTypeSignature(tpe)
- def flagsFromBits(bits: Long): FlagSet = bits
-
def This(sym: Symbol): Tree = self.This(sym)
def Select(qualifier: Tree, sym: Symbol): Select = self.Select(qualifier, sym)
def Ident(sym: Symbol): Ident = self.Ident(sym)
- def Block(stats: List[Tree]): Block = stats match {
- case Nil => self.Block(Nil, Literal(Constant(())))
- case elem :: Nil => self.Block(Nil, elem)
- case elems => self.Block(elems.init, elems.last)
- }
-
def TypeTree(tp: Type): TypeTree = self.TypeTree(tp)
def thisPrefix(sym: Symbol): Type = sym.thisPrefix
@@ -69,72 +61,229 @@ trait BuildUtils { self: SymbolTable =>
def setSymbol[T <: Tree](tree: T, sym: Symbol): T = { tree.setSymbol(sym); tree }
- def mkAnnotationCtor(tree: Tree, args: List[Tree]): Tree = tree match {
- case ident: Ident => Apply(self.Select(New(ident), nme.CONSTRUCTOR: TermName), args)
- case call @ Apply(Select(New(ident: Ident), nme.CONSTRUCTOR), _) =>
- if (args.nonEmpty)
- throw new IllegalArgumentException("Can't splice annotation that already contains args with extra args, consider merging these lists together")
- call
- case _ => throw new IllegalArgumentException(s"Tree ${showRaw(tree)} isn't a correct representation of annotation, consider passing Ident as a first argument")
+ def mkAnnotation(tree: Tree): Tree = tree match {
+ case SyntacticNew(Nil, SyntacticApplied(SyntacticTypeApplied(_, _), _) :: Nil, emptyValDef, Nil) =>
+ tree
+ case _ =>
+ throw new IllegalArgumentException(s"Tree ${showRaw(tree)} isn't a correct representation of annotation." +
+ """Consider reformatting it into a q"new $name[..$targs](...$argss)" shape""")
+ }
+
+ def mkAnnotation(trees: List[Tree]): List[Tree] = trees.map(mkAnnotation)
+
+ def mkVparamss(argss: List[List[ValDef]]): List[List[ValDef]] = argss.map(_.map(mkParam))
+
+ def mkParam(vd: ValDef): ValDef = {
+ var newmods = (vd.mods | PARAM) & (~DEFERRED)
+ if (vd.rhs.nonEmpty) newmods |= DEFAULTPARAM
+ copyValDef(vd)(mods = newmods)
+ }
+
+ def mkTparams(tparams: List[Tree]): List[TypeDef] =
+ tparams.map {
+ case td: TypeDef => copyTypeDef(td)(mods = (td.mods | PARAM) & (~DEFERRED))
+ case other => throw new IllegalArgumentException("can't splice $other as type parameter")
+ }
+
+ def mkRefineStat(stat: Tree): Tree = {
+ stat match {
+ case dd: DefDef => require(dd.rhs.isEmpty, "can't use DefDef with non-empty body as refine stat")
+ case vd: ValDef => require(vd.rhs.isEmpty, "can't use ValDef with non-empty rhs as refine stat")
+ case td: TypeDef =>
+ case _ => throw new IllegalArgumentException(s"not legal refine stat: $stat")
+ }
+ stat
}
- object FlagsAsBits extends FlagsAsBitsExtractor {
+ def mkRefineStat(stats: List[Tree]): List[Tree] = stats.map(mkRefineStat)
+
+ object ScalaDot extends ScalaDotExtractor {
+ def apply(name: Name): Tree = gen.scalaDot(name)
+ def unapply(tree: Tree): Option[Name] = tree match {
+ case Select(id @ Ident(nme.scala_), name) if id.symbol == ScalaPackage => Some(name)
+ case _ => None
+ }
+ }
+
+ def mkEarlyDef(defn: Tree): Tree = defn match {
+ case vdef @ ValDef(mods, _, _, _) if !mods.isDeferred =>
+ copyValDef(vdef)(mods = mods | PRESUPER)
+ case tdef @ TypeDef(mods, _, _, _) =>
+ copyTypeDef(tdef)(mods = mods | PRESUPER)
+ case _ =>
+ throw new IllegalArgumentException(s"not legal early def: $defn")
+ }
+
+ def mkEarlyDef(defns: List[Tree]): List[Tree] = defns.map(mkEarlyDef)
+
+ def RefTree(qual: Tree, sym: Symbol) = self.RefTree(qual, sym.name) setSymbol sym
+
+ object FlagsRepr extends FlagsReprExtractor {
+ def apply(bits: Long): FlagSet = bits
def unapply(flags: Long): Some[Long] = Some(flags)
}
- object TypeApplied extends TypeAppliedExtractor {
+ object SyntacticTypeApplied extends SyntacticTypeAppliedExtractor {
+ def apply(tree: Tree, targs: List[Tree]): Tree =
+ if (targs.isEmpty) tree
+ else if (tree.isTerm) TypeApply(tree, targs)
+ else if (tree.isType) AppliedTypeTree(tree, targs)
+ else throw new IllegalArgumentException(s"can't apply types to $tree")
+
def unapply(tree: Tree): Some[(Tree, List[Tree])] = tree match {
case TypeApply(fun, targs) => Some((fun, targs))
+ case AppliedTypeTree(tpe, targs) => Some((tpe, targs))
case _ => Some((tree, Nil))
}
}
- object Applied extends AppliedExtractor {
+ object SyntacticApplied extends SyntacticAppliedExtractor {
+ def apply(tree: Tree, argss: List[List[Tree]]): Tree =
+ argss.foldLeft(tree) { Apply(_, _) }
+
def unapply(tree: Tree): Some[(Tree, List[List[Tree]])] = {
val treeInfo.Applied(fun, targs, argss) = tree
- targs match {
- case Nil => Some((fun, argss))
- case _ => Some((TypeApply(fun, targs), argss))
+ Some((SyntacticTypeApplied(fun, targs), argss))
+ }
+ }
+
+ private object UnCtor {
+ def unapply(tree: Tree): Option[(Modifiers, List[List[ValDef]], List[Tree])] = tree match {
+ case DefDef(mods, nme.MIXIN_CONSTRUCTOR, _, _, _, Block(lvdefs, _)) =>
+ Some(mods | Flag.TRAIT, Nil, lvdefs)
+ case DefDef(mods, nme.CONSTRUCTOR, Nil, vparamss, _, Block(lvdefs :+ _, _)) =>
+ Some(mods, vparamss, lvdefs)
+ case _ => None
+ }
+ }
+
+ private object UnMkTemplate {
+ def unapply(templ: Template): Option[(List[Tree], ValDef, Modifiers, List[List[ValDef]], List[Tree], List[Tree])] = {
+ val Template(parents, selfdef, tbody) = templ
+ def result(ctorMods: Modifiers, vparamss: List[List[ValDef]], edefs: List[Tree], body: List[Tree]) =
+ Some((parents, selfdef, ctorMods, vparamss, edefs, body))
+ def indexOfCtor(trees: List[Tree]) =
+ trees.indexWhere { case UnCtor(_, _, _) => true ; case _ => false }
+
+ if (tbody forall treeInfo.isInterfaceMember)
+ result(NoMods | Flag.TRAIT, Nil, Nil, tbody)
+ else if (indexOfCtor(tbody) == -1)
+ None
+ else {
+ val (rawEdefs, rest) = tbody.span(treeInfo.isEarlyDef)
+ val (gvdefs, etdefs) = rawEdefs.partition(treeInfo.isEarlyValDef)
+ val (fieldDefs, UnCtor(ctorMods, ctorVparamss, lvdefs) :: body) = rest.splitAt(indexOfCtor(rest))
+ val evdefs = gvdefs.zip(lvdefs).map {
+ case (gvdef @ ValDef(_, _, tpt: TypeTree, _), ValDef(_, _, _, rhs)) =>
+ copyValDef(gvdef)(tpt = tpt.original, rhs = rhs)
+ }
+ val edefs = evdefs ::: etdefs
+ if (ctorMods.isTrait)
+ result(ctorMods, Nil, edefs, body)
+ else {
+ // undo conversion from (implicit ... ) to ()(implicit ... ) when its the only parameter section
+ val vparamssRestoredImplicits = ctorVparamss match {
+ case Nil :: (tail @ ((head :: _) :: _)) if head.mods.isImplicit => tail
+ case other => other
+ }
+ // undo flag modifications by mergeing flag info from constructor args and fieldDefs
+ val modsMap = fieldDefs.map { case ValDef(mods, name, _, _) => name -> mods }.toMap
+ val vparamss = mmap(vparamssRestoredImplicits) { vd =>
+ val originalMods = modsMap(vd.name) | (vd.mods.flags & DEFAULTPARAM)
+ atPos(vd.pos)(ValDef(originalMods, vd.name, vd.tpt, vd.rhs))
+ }
+ result(ctorMods, vparamss, edefs, body)
+ }
}
}
}
object SyntacticClassDef extends SyntacticClassDefExtractor {
def apply(mods: Modifiers, name: TypeName, tparams: List[TypeDef],
- constrMods: Modifiers, vparamss: List[List[ValDef]], parents: List[Tree],
- selfdef: ValDef, body: List[Tree]): Tree =
- ClassDef(mods, name, tparams, gen.mkTemplate(parents, selfdef, constrMods, vparamss, body, NoPosition))
-
- def unapply(tree: Tree): Option[(Modifiers, TypeName, List[TypeDef], Modifiers,
- List[List[ValDef]], List[Tree], ValDef, List[Tree])] = tree match {
- case ClassDef(mods, name, tparams, Template(parents, selfdef, tbody)) =>
- // extract generated fieldDefs and constructor
- val (defs, (ctor: DefDef) :: body) = tbody.splitAt(tbody.indexWhere {
- case DefDef(_, nme.CONSTRUCTOR, _, _, _, _) => true
- case _ => false
- })
- val (earlyDefs, fieldDefs) = defs.span(treeInfo.isEarlyDef)
-
- // undo conversion from (implicit ... ) to ()(implicit ... ) when its the only parameter section
- val vparamssRestoredImplicits = ctor.vparamss match {
- case Nil :: rest if !rest.isEmpty && !rest.head.isEmpty && rest.head.head.mods.isImplicit => rest
- case other => other
- }
+ constrMods: Modifiers, vparamss: List[List[ValDef]], earlyDefs: List[Tree],
+ parents: List[Tree], selfdef: ValDef, body: List[Tree]): ClassDef = {
+ val extraFlags = PARAMACCESSOR | (if (mods.isCase) CASEACCESSOR else 0L)
+ val vparamss0 = vparamss.map { _.map { vd => copyValDef(vd)(mods = (vd.mods | extraFlags) & (~DEFERRED)) } }
+ val tparams0 = mkTparams(tparams)
+ val parents0 = gen.mkParents(mods,
+ if (mods.isCase) parents.filter {
+ case ScalaDot(tpnme.Product | tpnme.Serializable | tpnme.AnyRef) => false
+ case _ => true
+ } else parents
+ )
+ val body0 = earlyDefs ::: body
+ val templ = gen.mkTemplate(parents0, selfdef, constrMods, vparamss0, body0)
+ gen.mkClassDef(mods, name, tparams0, templ)
+ }
- // undo flag modifications by mergeing flag info from constructor args and fieldDefs
- val modsMap = fieldDefs.map { case ValDef(mods, name, _, _) => name -> mods }.toMap
- val vparamss = mmap(vparamssRestoredImplicits) { vd =>
- val originalMods = modsMap(vd.name) | (vd.mods.flags & DEFAULTPARAM)
- atPos(vd.pos)(ValDef(originalMods, vd.name, vd.tpt, vd.rhs))
- }
+ def unapply(tree: Tree): Option[(Modifiers, TypeName, List[TypeDef], Modifiers, List[List[ValDef]],
+ List[Tree], List[Tree], ValDef, List[Tree])] = tree match {
+ case ClassDef(mods, name, tparams, UnMkTemplate(parents, selfdef, ctorMods, vparamss, earlyDefs, body))
+ if !ctorMods.isTrait && !ctorMods.hasFlag(JAVA) =>
+ Some((mods, name, tparams, ctorMods, vparamss, earlyDefs, parents, selfdef, body))
+ case _ =>
+ None
+ }
+ }
+
+ object SyntacticTraitDef extends SyntacticTraitDefExtractor {
+ def apply(mods: Modifiers, name: TypeName, tparams: List[TypeDef], earlyDefs: List[Tree],
+ parents: List[Tree], selfdef: ValDef, body: List[Tree]): ClassDef = {
+ val mods0 = mods | TRAIT | ABSTRACT
+ val templ = gen.mkTemplate(parents, selfdef, Modifiers(TRAIT), Nil, earlyDefs ::: body)
+ gen.mkClassDef(mods0, name, mkTparams(tparams), templ)
+ }
+
+ def unapply(tree: Tree): Option[(Modifiers, TypeName, List[TypeDef],
+ List[Tree], List[Tree], ValDef, List[Tree])] = tree match {
+ case ClassDef(mods, name, tparams, UnMkTemplate(parents, selfdef, ctorMods, vparamss, earlyDefs, body))
+ if mods.isTrait =>
+ Some((mods, name, tparams, earlyDefs, parents, selfdef, body))
+ case _ => None
+ }
+ }
+
+ object SyntacticModuleDef extends SyntacticModuleDefExtractor {
+ def apply(mods: Modifiers, name: TermName, earlyDefs: List[Tree],
+ parents: List[Tree], selfdef: ValDef, body: List[Tree]) =
+ ModuleDef(mods, name, gen.mkTemplate(parents, selfdef, NoMods, Nil, earlyDefs ::: body))
- Some((mods, name, tparams, ctor.mods, vparamss, parents, selfdef, earlyDefs ::: body))
+ def unapply(tree: Tree): Option[(Modifiers, TermName, List[Tree], List[Tree], ValDef, List[Tree])] = tree match {
+ case ModuleDef(mods, name, UnMkTemplate(parents, selfdef, _, _, earlyDefs, body)) =>
+ Some((mods, name, earlyDefs, parents, selfdef, body))
case _ =>
None
}
}
- object TupleN extends TupleNExtractor {
+ private trait ScalaMemberRef {
+ val symbols: Seq[Symbol]
+ def result(name: Name): Option[Symbol] =
+ symbols.collect { case sym if sym.name == name => sym }.headOption
+ def unapply(tree: Tree): Option[Symbol] = tree match {
+ case id @ Ident(name) if symbols.contains(id.symbol) && name == id.symbol.name =>
+ Some(id.symbol)
+ case Select(scalapkg @ Ident(nme.scala_), name) if scalapkg.symbol == ScalaPackage =>
+ result(name)
+ case Select(Select(Ident(nme.ROOTPKG), nme.scala_), name) =>
+ result(name)
+ case _ => None
+ }
+ }
+ private object TupleClassRef extends ScalaMemberRef {
+ val symbols = TupleClass.filter { _ != null }.toSeq
+ }
+ private object TupleCompanionRef extends ScalaMemberRef {
+ val symbols = TupleClassRef.symbols.map { _.companionModule }
+ }
+ private object UnitClassRef extends ScalaMemberRef {
+ val symbols = Seq(UnitClass)
+ }
+ private object FunctionClassRef extends ScalaMemberRef {
+ val symbols = FunctionClass.toSeq
+ }
+
+ object SyntacticTuple extends SyntacticTupleExtractor {
def apply(args: List[Tree]): Tree = args match {
case Nil => Literal(Constant(()))
case _ =>
@@ -145,18 +294,16 @@ trait BuildUtils { self: SymbolTable =>
def unapply(tree: Tree): Option[List[Tree]] = tree match {
case Literal(Constant(())) =>
Some(Nil)
- case Apply(id: Ident, args)
- if args.length <= MaxTupleArity && id.symbol == TupleClass(args.length).companionModule =>
- Some(args)
- case Apply(Select(Ident(nme.scala_), TermName(tuple)), args)
- if args.length <= MaxTupleArity && tuple == TupleClass(args.length).name =>
+ case Apply(TupleCompanionRef(sym), args)
+ if args.length <= MaxTupleArity
+ && sym == TupleClass(args.length).companionModule =>
Some(args)
case _ =>
None
}
}
- object TupleTypeN extends TupleNExtractor {
+ object SyntacticTupleType extends SyntacticTupleExtractor {
def apply(args: List[Tree]): Tree = args match {
case Nil => self.Select(self.Ident(nme.scala_), tpnme.Unit)
case _ =>
@@ -165,20 +312,98 @@ trait BuildUtils { self: SymbolTable =>
}
def unapply(tree: Tree): Option[List[Tree]] = tree match {
- case Select(Ident(nme.scala_), tpnme.Unit) =>
+ case UnitClassRef(_) =>
Some(Nil)
- case AppliedTypeTree(id: Ident, args)
- if args.length <= MaxTupleArity && id.symbol == TupleClass(args.length) =>
- Some(args)
- case AppliedTypeTree(Select(id @ Ident(nme.scala_), TermName(tuple)), args)
- if args.length <= MaxTupleArity && id.symbol == ScalaPackage && tuple == TupleClass(args.length).name =>
+ case AppliedTypeTree(TupleClassRef(sym), args)
+ if args.length <= MaxTupleArity && sym == TupleClass(args.length) =>
Some(args)
case _ =>
None
}
}
- def RefTree(qual: Tree, sym: Symbol) = self.RefTree(qual, sym.name) setSymbol sym
+ object SyntacticFunctionType extends SyntacticFunctionTypeExtractor {
+ def apply(argtpes: List[Tree], restpe: Tree): Tree = {
+ require(argtpes.length <= MaxFunctionArity + 1, s"Function types with arity bigger than $MaxFunctionArity aren't supported")
+ gen.mkFunctionTypeTree(argtpes, restpe)
+ }
+
+ def unapply(tree: Tree): Option[(List[Tree], Tree)] = tree match {
+ case AppliedTypeTree(FunctionClassRef(sym), args @ (argtpes :+ restpe))
+ if args.length - 1 <= MaxFunctionArity && sym == FunctionClass(args.length - 1) =>
+ Some((argtpes, restpe))
+ case _ => None
+ }
+ }
+
+ object SyntacticBlock extends SyntacticBlockExtractor {
+ def apply(stats: List[Tree]): Tree = gen.mkBlock(stats)
+
+ def unapply(tree: Tree): Option[List[Tree]] = tree match {
+ case self.Block(stats, expr) => Some(stats :+ expr)
+ case _ if tree.isTerm => Some(tree :: Nil)
+ case _ => None
+ }
+ }
+
+ object SyntacticFunction extends SyntacticFunctionExtractor {
+ def apply(params: List[ValDef], body: Tree): Tree = {
+ val params0 = params.map { arg =>
+ require(arg.rhs.isEmpty, "anonymous functions don't support default values")
+ mkParam(arg)
+ }
+ Function(params0, body)
+ }
+
+ def unapply(tree: Tree): Option[(List[ValDef], Tree)] = tree match {
+ case Function(params, body) => Some((params, body))
+ case _ => None
+ }
+ }
+
+ object SyntacticNew extends SyntacticNewExtractor {
+ def apply(earlyDefs: List[Tree], parents: List[Tree], selfdef: ValDef, body: List[Tree]): Tree =
+ gen.mkNew(parents, selfdef, earlyDefs ::: body, NoPosition, NoPosition)
+
+ def unapply(tree: Tree): Option[(List[Tree], List[Tree], ValDef, List[Tree])] = tree match {
+ case SyntacticApplied(Select(New(SyntacticTypeApplied(ident, targs)), nme.CONSTRUCTOR), argss) =>
+ Some((Nil, SyntacticApplied(SyntacticTypeApplied(ident, targs), argss) :: Nil, emptyValDef, Nil))
+ case SyntacticBlock(SyntacticClassDef(_, tpnme.ANON_CLASS_NAME, Nil, _, List(Nil), earlyDefs, parents, selfdef, body) ::
+ Apply(Select(New(Ident(tpnme.ANON_CLASS_NAME)), nme.CONSTRUCTOR), Nil) :: Nil) =>
+ Some((earlyDefs, parents, selfdef, body))
+ case _ =>
+ None
+ }
+ }
+
+ object SyntacticDefDef extends SyntacticDefDefExtractor {
+ def apply(mods: Modifiers, name: TermName, tparams: List[Tree], vparamss: List[List[ValDef]], tpt: Tree, rhs: Tree): DefDef =
+ DefDef(mods, name, mkTparams(tparams), mkVparamss(vparamss), tpt, rhs)
+
+ def unapply(tree: Tree): Option[(Modifiers, TermName, List[Tree], List[List[ValDef]], Tree, Tree)] = tree match {
+ case DefDef(mods, name, tparams, vparamss, tpt, rhs) => Some((mods, name, tparams, vparamss, tpt, rhs))
+ case _ => None
+ }
+ }
+
+ trait SyntacticValDefBase extends SyntacticValDefExtractor {
+ val isMutable: Boolean
+
+ def apply(mods: Modifiers, name: TermName, tpt: Tree, rhs: Tree) = {
+ val mods1 = if (isMutable) mods | MUTABLE else mods
+ ValDef(mods1, name, tpt, rhs)
+ }
+
+ def unapply(tree: Tree): Option[(Modifiers, TermName, Tree, Tree)] = tree match {
+ case ValDef(mods, name, tpt, rhs) if mods.hasFlag(MUTABLE) == isMutable =>
+ Some((mods, name, tpt, rhs))
+ case _ =>
+ None
+ }
+ }
+
+ object SyntacticValDef extends SyntacticValDefBase { val isMutable = false }
+ object SyntacticVarDef extends SyntacticValDefBase { val isMutable = true }
}
val build: BuildApi = new BuildImpl
diff --git a/src/reflect/scala/reflect/internal/ClassfileConstants.scala b/src/reflect/scala/reflect/internal/ClassfileConstants.scala
index a7ce044780..e0a6757d34 100644
--- a/src/reflect/scala/reflect/internal/ClassfileConstants.scala
+++ b/src/reflect/scala/reflect/internal/ClassfileConstants.scala
@@ -335,13 +335,8 @@ object ClassfileConstants {
abstract class FlagTranslation {
import Flags._
- private var isAnnotation = false
- private var isClass = false
- private def initFields(flags: Int) = {
- isAnnotation = (flags & JAVA_ACC_ANNOTATION) != 0
- isClass = false
- }
- private def translateFlag(jflag: Int): Long = (jflag: @switch) match {
+ private def isAnnotation(flags: Int): Boolean = (flags & JAVA_ACC_ANNOTATION) != 0
+ private def translateFlag(jflag: Int, isAnnotation: Boolean, isClass: Boolean): Long = (jflag: @switch) match {
case JAVA_ACC_PRIVATE => PRIVATE
case JAVA_ACC_PROTECTED => PROTECTED
case JAVA_ACC_FINAL => FINAL
@@ -351,31 +346,28 @@ object ClassfileConstants {
case JAVA_ACC_INTERFACE => if (isAnnotation) 0L else TRAIT | INTERFACE | ABSTRACT
case _ => 0L
}
- private def translateFlags(jflags: Int, baseFlags: Long): Long = {
+ private def translateFlags(jflags: Int, baseFlags: Long, isAnnotation: Boolean, isClass: Boolean): Long = {
+ def translateFlag0(jflags: Int): Long = translateFlag(jflags, isAnnotation, isClass)
var res: Long = JAVA | baseFlags
/* fast, elegant, maintainable, pick any two... */
- res |= translateFlag(jflags & JAVA_ACC_PRIVATE)
- res |= translateFlag(jflags & JAVA_ACC_PROTECTED)
- res |= translateFlag(jflags & JAVA_ACC_FINAL)
- res |= translateFlag(jflags & JAVA_ACC_SYNTHETIC)
- res |= translateFlag(jflags & JAVA_ACC_STATIC)
- res |= translateFlag(jflags & JAVA_ACC_ABSTRACT)
- res |= translateFlag(jflags & JAVA_ACC_INTERFACE)
+ res |= translateFlag0(jflags & JAVA_ACC_PRIVATE)
+ res |= translateFlag0(jflags & JAVA_ACC_PROTECTED)
+ res |= translateFlag0(jflags & JAVA_ACC_FINAL)
+ res |= translateFlag0(jflags & JAVA_ACC_SYNTHETIC)
+ res |= translateFlag0(jflags & JAVA_ACC_STATIC)
+ res |= translateFlag0(jflags & JAVA_ACC_ABSTRACT)
+ res |= translateFlag0(jflags & JAVA_ACC_INTERFACE)
res
}
def classFlags(jflags: Int): Long = {
- initFields(jflags)
- isClass = true
- translateFlags(jflags, 0)
+ translateFlags(jflags, 0, isAnnotation(jflags), isClass = true)
}
def fieldFlags(jflags: Int): Long = {
- initFields(jflags)
- translateFlags(jflags, if ((jflags & JAVA_ACC_FINAL) == 0) MUTABLE else 0)
+ translateFlags(jflags, if ((jflags & JAVA_ACC_FINAL) == 0) MUTABLE else 0 , isAnnotation(jflags), isClass = false)
}
def methodFlags(jflags: Int): Long = {
- initFields(jflags)
- translateFlags(jflags, if ((jflags & JAVA_ACC_BRIDGE) != 0) BRIDGE | ARTIFACT else 0)
+ translateFlags(jflags, if ((jflags & JAVA_ACC_BRIDGE) != 0) BRIDGE | ARTIFACT else 0, isAnnotation(jflags), isClass = false)
}
}
object FlagTranslation extends FlagTranslation { }
diff --git a/src/reflect/scala/reflect/internal/Definitions.scala b/src/reflect/scala/reflect/internal/Definitions.scala
index 92f2a64ce9..90a1ab39d5 100644
--- a/src/reflect/scala/reflect/internal/Definitions.scala
+++ b/src/reflect/scala/reflect/internal/Definitions.scala
@@ -1004,6 +1004,7 @@ trait Definitions extends api.StandardDefinitions {
lazy val ThrowsClass = requiredClass[scala.throws[_]]
lazy val TransientAttr = requiredClass[scala.transient]
lazy val UncheckedClass = requiredClass[scala.unchecked]
+ lazy val UncheckedBoundsClass = getClassIfDefined("scala.reflect.internal.annotations.uncheckedBounds")
lazy val UnspecializedClass = requiredClass[scala.annotation.unspecialized]
lazy val VolatileAttr = requiredClass[scala.volatile]
diff --git a/src/reflect/scala/reflect/internal/ExistentialsAndSkolems.scala b/src/reflect/scala/reflect/internal/ExistentialsAndSkolems.scala
index 073f124630..0eeca4aace 100644
--- a/src/reflect/scala/reflect/internal/ExistentialsAndSkolems.scala
+++ b/src/reflect/scala/reflect/internal/ExistentialsAndSkolems.scala
@@ -19,6 +19,9 @@ trait ExistentialsAndSkolems {
* can be deskolemized to the original type parameter. (A skolem is a
* representation of a bound variable when viewed inside its scope.)
* !!!Adriaan: this does not work for hk types.
+ *
+ * Skolems will be created at level 0, rather than the current value
+ * of `skolemizationLevel`. (See SI-7782)
*/
def deriveFreshSkolems(tparams: List[Symbol]): List[Symbol] = {
class Deskolemizer extends LazyType {
@@ -30,7 +33,11 @@ trait ExistentialsAndSkolems {
sym setInfo sym.deSkolemize.info.substSym(typeParams, typeSkolems)
}
}
- (new Deskolemizer).typeSkolems
+
+ val saved = skolemizationLevel
+ skolemizationLevel = 0
+ try new Deskolemizer().typeSkolems
+ finally skolemizationLevel = saved
}
def isRawParameter(sym: Symbol) = // is it a type parameter leaked by a raw type?
diff --git a/src/reflect/scala/reflect/internal/Importers.scala b/src/reflect/scala/reflect/internal/Importers.scala
index b0828e9c54..9ddf156128 100644
--- a/src/reflect/scala/reflect/internal/Importers.scala
+++ b/src/reflect/scala/reflect/internal/Importers.scala
@@ -411,6 +411,11 @@ trait Importers extends api.Importers { to: SymbolTable =>
if (my != null) {
addFixup(recreatedTreeCompleter(their, my))
tryFixup()
+ // we have to be careful with position import as some shared trees
+ // like EmptyTree, emptyValDef don't support position assignment
+ if (their.pos != NoPosition) {
+ my.setPos(importPosition(their.pos))
+ }
}
my
}
diff --git a/src/reflect/scala/reflect/internal/Names.scala b/src/reflect/scala/reflect/internal/Names.scala
index 0d78e24548..ed248d6e1e 100644
--- a/src/reflect/scala/reflect/internal/Names.scala
+++ b/src/reflect/scala/reflect/internal/Names.scala
@@ -18,6 +18,18 @@ trait Names extends api.Names {
final val nameDebug = false
+ // Ideally we would just synchronize unconditionally and let HotSpot's Biased Locking
+ // kick in in the compiler universe, where access to the lock is single threaded. But,
+ // objects created in the first 4seconds of the JVM startup aren't eligible for biased
+ // locking.
+ //
+ // We might also be able to accept the performance hit, but we don't have tools to
+ // detect performance regressions.
+ //
+ // Discussion: https://groups.google.com/forum/#!search/biased$20scala-internals/scala-internals/0cYB7SkJ-nM/47MLhsgw8jwJ
+ protected def synchronizeNames: Boolean = false
+ private val nameLock: Object = new Object
+
/** Memory to store all names sequentially. */
var chrs: Array[Char] = new Array[Char](NAME_SIZE)
private var nc = 0
@@ -64,61 +76,68 @@ trait Names extends api.Names {
}
/** Create a term name from the characters in cs[offset..offset+len-1]. */
- def newTermName(cs: Array[Char], offset: Int, len: Int): TermName =
+ final def newTermName(cs: Array[Char], offset: Int, len: Int): TermName =
newTermName(cs, offset, len, cachedString = null)
- def newTermName(cs: Array[Char]): TermName = newTermName(cs, 0, cs.length)
- def newTypeName(cs: Array[Char]): TypeName = newTypeName(cs, 0, cs.length)
+ final def newTermName(cs: Array[Char]): TermName = newTermName(cs, 0, cs.length)
+
+ final def newTypeName(cs: Array[Char]): TypeName = newTypeName(cs, 0, cs.length)
/** Create a term name from the characters in cs[offset..offset+len-1].
* TODO - have a mode where name validation is performed at creation time
* (e.g. if a name has the string "$class" in it, then fail if that
* string is not at the very end.)
*/
- protected def newTermName(cs: Array[Char], offset: Int, len: Int, cachedString: String): TermName = {
- val h = hashValue(cs, offset, len) & HASH_MASK
- var n = termHashtable(h)
- while ((n ne null) && (n.length != len || !equals(n.start, cs, offset, len)))
- n = n.next
-
- if (n ne null) n
- else {
- // The logic order here is future-proofing against the possibility
- // that name.toString will become an eager val, in which case the call
- // to enterChars cannot follow the construction of the TermName.
- val ncStart = nc
- enterChars(cs, offset, len)
- if (cachedString ne null) new TermName_S(ncStart, len, h, cachedString)
- else new TermName_R(ncStart, len, h)
+ final def newTermName(cs: Array[Char], offset: Int, len: Int, cachedString: String): TermName = {
+ def body = {
+ val h = hashValue(cs, offset, len) & HASH_MASK
+ var n = termHashtable(h)
+ while ((n ne null) && (n.length != len || !equals(n.start, cs, offset, len)))
+ n = n.next
+
+ if (n ne null) n
+ else {
+ // The logic order here is future-proofing against the possibility
+ // that name.toString will become an eager val, in which case the call
+ // to enterChars cannot follow the construction of the TermName.
+ val ncStart = nc
+ enterChars(cs, offset, len)
+ if (cachedString ne null) new TermName_S(ncStart, len, h, cachedString)
+ else new TermName_R(ncStart, len, h)
+ }
}
+ if (synchronizeNames) nameLock.synchronized(body) else body
}
- protected def newTypeName(cs: Array[Char], offset: Int, len: Int, cachedString: String): TypeName =
+
+ final def newTypeName(cs: Array[Char], offset: Int, len: Int, cachedString: String): TypeName =
newTermName(cs, offset, len, cachedString).toTypeName
/** Create a term name from string. */
+ @deprecatedOverriding("To synchronize, use `override def synchronizeNames = true`", "2.11.0") // overriden in https://github.com/scala-ide/scala-ide/blob/master/org.scala-ide.sdt.core/src/scala/tools/eclipse/ScalaPresentationCompiler.scala
def newTermName(s: String): TermName = newTermName(s.toCharArray(), 0, s.length(), null)
/** Create a type name from string. */
+ @deprecatedOverriding("To synchronize, use `override def synchronizeNames = true`", "2.11.0") // overriden in https://github.com/scala-ide/scala-ide/blob/master/org.scala-ide.sdt.core/src/scala/tools/eclipse/ScalaPresentationCompiler.scala
def newTypeName(s: String): TypeName = newTermName(s).toTypeName
/** Create a term name from the UTF8 encoded bytes in bs[offset..offset+len-1]. */
- def newTermName(bs: Array[Byte], offset: Int, len: Int): TermName = {
+ final def newTermName(bs: Array[Byte], offset: Int, len: Int): TermName = {
val chars = Codec.fromUTF8(bs, offset, len)
newTermName(chars, 0, chars.length)
}
- def newTermNameCached(s: String): TermName =
+ final def newTermNameCached(s: String): TermName =
newTermName(s.toCharArray(), 0, s.length(), cachedString = s)
- def newTypeNameCached(s: String): TypeName =
+ final def newTypeNameCached(s: String): TypeName =
newTypeName(s.toCharArray(), 0, s.length(), cachedString = s)
/** Create a type name from the characters in cs[offset..offset+len-1]. */
- def newTypeName(cs: Array[Char], offset: Int, len: Int): TypeName =
+ final def newTypeName(cs: Array[Char], offset: Int, len: Int): TypeName =
newTermName(cs, offset, len, cachedString = null).toTypeName
/** Create a type name from the UTF8 encoded bytes in bs[offset..offset+len-1]. */
- def newTypeName(bs: Array[Byte], offset: Int, len: Int): TypeName =
+ final def newTypeName(bs: Array[Byte], offset: Int, len: Int): TypeName =
newTermName(bs, offset, len).toTypeName
/**
@@ -128,6 +147,8 @@ trait Names extends api.Names {
* For those of METHOD sort, its descriptor is stored ie has a leading '('
*
* can-multi-thread
+ * TODO SI-6240 !!! JZ Really? the constructors TermName and TypeName publish unconstructed `this` references
+ * into the hash tables; we could observe them here before the subclass constructor completes.
*/
final def lookupTypeName(cs: Array[Char]): TypeName = { lookupTypeNameIfExisting(cs, true) }
@@ -494,23 +515,26 @@ trait Names extends api.Names {
override def toString = new String(chrs, index, len)
}
+ // SYNCNOTE: caller to constructor must synchronize if `synchronizeNames` is enabled
sealed abstract class TermName(index0: Int, len0: Int, hash: Int) extends Name(index0, len0) {
type ThisNameType = TermName
protected[this] def thisName: TermName = this
-
val next: TermName = termHashtable(hash)
termHashtable(hash) = this
def isTermName: Boolean = true
def isTypeName: Boolean = false
def toTermName: TermName = this
def toTypeName: TypeName = {
- val h = hashValue(chrs, index, len) & HASH_MASK
- var n = typeHashtable(h)
- while ((n ne null) && n.start != index)
- n = n.next
-
- if (n ne null) n
- else createCompanionName(h)
+ def body = {
+ val h = hashValue(chrs, index, len) & HASH_MASK
+ var n = typeHashtable(h)
+ while ((n ne null) && n.start != index)
+ n = n.next
+
+ if (n ne null) n
+ else createCompanionName(h)
+ }
+ if (synchronizeNames) nameLock.synchronized(body) else body
}
def newName(str: String): TermName = newTermName(str)
def companionName: TypeName = toTypeName
@@ -518,6 +542,7 @@ trait Names extends api.Names {
newTermName(chrs, start + from, to - from)
def nameKind = "term"
+ /** SYNCNOTE: caller must synchronize if `synchronizeNames` is enabled */
protected def createCompanionName(h: Int): TypeName
}
@@ -534,16 +559,20 @@ trait Names extends api.Names {
val next: TypeName = typeHashtable(hash)
typeHashtable(hash) = this
+
def isTermName: Boolean = false
def isTypeName: Boolean = true
def toTermName: TermName = {
- val h = hashValue(chrs, index, len) & HASH_MASK
- var n = termHashtable(h)
- while ((n ne null) && n.start != index)
- n = n.next
-
- if (n ne null) n
- else createCompanionName(h)
+ def body = {
+ val h = hashValue(chrs, index, len) & HASH_MASK
+ var n = termHashtable(h)
+ while ((n ne null) && n.start != index)
+ n = n.next
+
+ if (n ne null) n
+ else createCompanionName(h)
+ }
+ if (synchronizeNames) nameLock.synchronized(body) else body
}
def toTypeName: TypeName = this
def newName(str: String): TypeName = newTypeName(str)
@@ -553,6 +582,7 @@ trait Names extends api.Names {
def nameKind = "type"
override def decode = if (nameDebug) super.decode + "!" else super.decode
+ /** SYNCNOTE: caller must synchronize if `synchronizeNames` is enabled */
protected def createCompanionName(h: Int): TermName
}
diff --git a/src/reflect/scala/reflect/internal/Printers.scala b/src/reflect/scala/reflect/internal/Printers.scala
index 1603029340..206dff44e2 100644
--- a/src/reflect/scala/reflect/internal/Printers.scala
+++ b/src/reflect/scala/reflect/internal/Printers.scala
@@ -574,6 +574,8 @@ trait Printers extends api.Printers { self: SymbolTable =>
case refTree: RefTree =>
if (tree.symbol.name != refTree.name) print("[", tree.symbol, " aka ", refTree.name, "]")
else print(tree.symbol)
+ case defTree: DefTree =>
+ print(tree.symbol)
case _ =>
print(tree.symbol.name)
}
diff --git a/src/reflect/scala/reflect/internal/StdNames.scala b/src/reflect/scala/reflect/internal/StdNames.scala
index 7a2287664a..686ebf5a1e 100644
--- a/src/reflect/scala/reflect/internal/StdNames.scala
+++ b/src/reflect/scala/reflect/internal/StdNames.scala
@@ -40,6 +40,7 @@ trait StdNames {
def result: Set[TermName] = try kws finally kws = null
}
+ private[reflect] def compactifyName(orig: String): String = compactify(orig)
private final object compactify extends (String => String) {
val md5 = MessageDigest.getInstance("MD5")
@@ -247,8 +248,11 @@ trait StdNames {
final val Quasiquote: NameType = "Quasiquote"
// quasiquote-specific names
- final val QUASIQUOTE_MODS: NameType = "$quasiquote$mods$"
- final val QUASIQUOTE_TUPLE: NameType = "$quasiquote$tuple$"
+ final val QUASIQUOTE_MODS: NameType = "$quasiquote$mods$"
+ final val QUASIQUOTE_TUPLE: NameType = "$quasiquote$tuple$"
+ final val QUASIQUOTE_FUNCTION: NameType = "$quasiquote$function$"
+ final val QUASIQUOTE_REFINE_STAT: NameType = "$quasiquote$refine$stat$"
+ final val QUASIQUOTE_EARLY_DEF: NameType = "$quasiquote$early$def$"
// Annotation simple names, used in Namer
final val BeanPropertyAnnot: NameType = "BeanProperty"
@@ -560,7 +564,6 @@ trait StdNames {
val Any: NameType = "Any"
val AnyVal: NameType = "AnyVal"
val Apply: NameType = "Apply"
- val Applied: NameType = "Applied"
val ArrayAnnotArg: NameType = "ArrayAnnotArg"
val Block: NameType = "Block"
val ConstantType: NameType = "ConstantType"
@@ -568,7 +571,7 @@ trait StdNames {
val EmptyPackageClass: NameType = "EmptyPackageClass"
val False : NameType = "False"
val Flag : NameType = "Flag"
- val FlagsAsBits: NameType = "FlagsAsBits"
+ val FlagsRepr: NameType = "FlagsRepr"
val Ident: NameType = "Ident"
val Import: NameType = "Import"
val Literal: NameType = "Literal"
@@ -578,6 +581,7 @@ trait StdNames {
val New: NameType = "New"
val NoFlags: NameType = "NoFlags"
val NoSymbol: NameType = "NoSymbol"
+ val NoMods: NameType = "NoMods"
val Nothing: NameType = "Nothing"
val Null: NameType = "Null"
val Object: NameType = "Object"
@@ -586,15 +590,25 @@ trait StdNames {
val Select: NameType = "Select"
val SelectFromTypeTree: NameType = "SelectFromTypeTree"
val StringContext: NameType = "StringContext"
+ val SyntacticApplied: NameType = "SyntacticApplied"
+ val SyntacticBlock: NameType = "SyntacticBlock"
val SyntacticClassDef: NameType = "SyntacticClassDef"
+ val SyntacticDefDef: NameType = "SyntacticDefDef"
+ val SyntacticFunction: NameType = "SyntacticFunction"
+ val SyntacticFunctionType: NameType= "SyntacticFunctionType"
+ val SyntacticModuleDef: NameType = "SyntacticModuleDef"
+ val SyntacticNew: NameType = "SyntacticNew"
+ val SyntacticTraitDef: NameType = "SyntacticTraitDef"
+ val SyntacticTuple: NameType = "SyntacticTuple"
+ val SyntacticTupleType: NameType = "SyntacticTupleType"
+ val SyntacticTypeApplied: NameType = "SyntacticTypeApplied"
+ val SyntacticValDef: NameType = "SyntacticValDef"
+ val SyntacticVarDef: NameType = "SyntacticVarDef"
val This: NameType = "This"
val ThisType: NameType = "ThisType"
val True : NameType = "True"
val Tuple2: NameType = "Tuple2"
- val TupleN: NameType = "TupleN"
- val TupleTypeN: NameType = "TupleTypeN"
val TYPE_ : NameType = "TYPE"
- val TypeApplied: NameType = "TypeApplied"
val TypeRef: NameType = "TypeRef"
val TypeTree: NameType = "TypeTree"
val UNIT : NameType = "UNIT"
@@ -650,7 +664,6 @@ trait StdNames {
val filter: NameType = "filter"
val finalize_ : NameType = "finalize"
val find_ : NameType = "find"
- val flagsFromBits : NameType = "flagsFromBits"
val flatMap: NameType = "flatMap"
val flatten: NameType = "flatten"
val foldLeft: NameType = "foldLeft"
@@ -681,7 +694,9 @@ trait StdNames {
val materializeWeakTypeTag: NameType = "materializeWeakTypeTag"
val materializeTypeTag: NameType = "materializeTypeTag"
val moduleClass : NameType = "moduleClass"
- val mkAnnotationCtor: NameType = "mkAnnotationCtor"
+ val mkAnnotation: NameType = "mkAnnotation"
+ val mkRefineStat: NameType = "mkRefineStat"
+ val mkEarlyDef: NameType = "mkEarlyDef"
val ne: NameType = "ne"
val newArray: NameType = "newArray"
val newFreeTerm: NameType = "newFreeTerm"
@@ -721,6 +736,7 @@ trait StdNames {
val staticModule : NameType = "staticModule"
val staticPackage : NameType = "staticPackage"
val synchronized_ : NameType = "synchronized"
+ val ScalaDot: NameType = "ScalaDot"
val TermName: NameType = "TermName"
val this_ : NameType = "this"
val thisPrefix : NameType = "thisPrefix"
diff --git a/src/reflect/scala/reflect/internal/TreeGen.scala b/src/reflect/scala/reflect/internal/TreeGen.scala
index 1af8c225f5..07fa6fb317 100644
--- a/src/reflect/scala/reflect/internal/TreeGen.scala
+++ b/src/reflect/scala/reflect/internal/TreeGen.scala
@@ -14,7 +14,8 @@ abstract class TreeGen extends macros.TreeBuilder {
def rootScalaDot(name: Name) = Select(rootId(nme.scala_) setSymbol ScalaPackage, name)
def scalaDot(name: Name) = Select(Ident(nme.scala_) setSymbol ScalaPackage, name)
def scalaAnnotationDot(name: Name) = Select(scalaDot(nme.annotation), name)
- def scalaAnyRefConstr = scalaDot(tpnme.AnyRef) setSymbol AnyRefClass // used in ide
+ def scalaAnyRefConstrRaw = scalaDot(tpnme.AnyRef)
+ def scalaAnyRefConstr = scalaAnyRefConstrRaw setSymbol AnyRefClass // used in ide
def scalaFunctionConstr(argtpes: List[Tree], restpe: Tree, abstractFun: Boolean = false): Tree = {
val cls = if (abstractFun)
@@ -185,7 +186,7 @@ abstract class TreeGen extends macros.TreeBuilder {
val needsPackageQualifier = (
(sym ne null)
&& qualsym.isPackage
- && !sym.isDefinedInPackage
+ && !(sym.isDefinedInPackage || sym.moduleClass.isDefinedInPackage) // SI-7817 work around strangeness in post-flatten `Symbol#owner`
)
val pkgQualifier =
if (needsPackageQualifier) {
@@ -324,7 +325,8 @@ abstract class TreeGen extends macros.TreeBuilder {
* body
* }
*/
- def mkTemplate(parents: List[Tree], self: ValDef, constrMods: Modifiers, vparamss: List[List[ValDef]], body: List[Tree], superPos: Position): Template = {
+ def mkTemplate(parents: List[Tree], self: ValDef, constrMods: Modifiers,
+ vparamss: List[List[ValDef]], body: List[Tree], superPos: Position = NoPosition): Template = {
/* Add constructor to template */
// create parameters for <init> as synthetic trees.
@@ -348,10 +350,10 @@ abstract class TreeGen extends macros.TreeBuilder {
}
val lvdefs = evdefs collect { case vdef: ValDef => copyValDef(vdef)(mods = vdef.mods | PRESUPER) }
- val constrs = {
- if (constrMods hasFlag TRAIT) {
- if (body forall treeInfo.isInterfaceMember) List()
- else List(
+ val constr = {
+ if (constrMods.isTrait) {
+ if (body forall treeInfo.isInterfaceMember) None
+ else Some(
atPos(wrappingPos(superPos, lvdefs)) (
DefDef(NoMods, nme.MIXIN_CONSTRUCTOR, List(), List(Nil), TypeTree(), Block(lvdefs, Literal(Constant())))))
} else {
@@ -365,17 +367,74 @@ abstract class TreeGen extends macros.TreeBuilder {
// (the type macros aren't in the trunk yet, but there is a plan for them to land there soon)
// this means that we don't know what will be the arguments of the super call
// therefore here we emit a dummy which gets populated when the template is named and typechecked
- List(
+ Some(
// TODO: previously this was `wrappingPos(superPos, lvdefs ::: argss.flatten)`
// is it going to be a problem that we can no longer include the `argss`?
atPos(wrappingPos(superPos, lvdefs)) (
DefDef(constrMods, nme.CONSTRUCTOR, List(), vparamss1, TypeTree(), Block(lvdefs ::: List(superCall), Literal(Constant())))))
}
}
- constrs foreach (ensureNonOverlapping(_, parents ::: gvdefs, focus=false))
+ constr foreach (ensureNonOverlapping(_, parents ::: gvdefs, focus=false))
// Field definitions for the class - remove defaults.
val fieldDefs = vparamss.flatten map (vd => copyValDef(vd)(mods = vd.mods &~ DEFAULTPARAM, rhs = EmptyTree))
- global.Template(parents, self, gvdefs ::: fieldDefs ::: constrs ::: etdefs ::: rest)
+ global.Template(parents, self, gvdefs ::: fieldDefs ::: constr ++: etdefs ::: rest)
}
+
+ def mkParents(ownerMods: Modifiers, parents: List[Tree], parentPos: Position = NoPosition) =
+ if (ownerMods.isCase) parents ::: List(scalaDot(tpnme.Product), scalaDot(tpnme.Serializable))
+ else if (parents.isEmpty) atPos(parentPos)(scalaAnyRefConstrRaw) :: Nil
+ else parents
+
+ def mkClassDef(mods: Modifiers, name: TypeName, tparams: List[TypeDef], templ: Template): ClassDef = {
+ val isInterface = mods.isTrait && (templ.body forall treeInfo.isInterfaceMember)
+ val mods1 = if (isInterface) (mods | Flags.INTERFACE) else mods
+ ClassDef(mods1, name, tparams, templ)
+ }
+
+ /** Create positioned tree representing an object creation <new parents { stats }
+ * @param npos the position of the new
+ * @param cpos the position of the anonymous class starting with parents
+ */
+ def mkNew(parents: List[Tree], self: ValDef, stats: List[Tree],
+ npos: Position, cpos: Position): Tree =
+ if (parents.isEmpty)
+ mkNew(List(scalaAnyRefConstr), self, stats, npos, cpos)
+ else if (parents.tail.isEmpty && stats.isEmpty) {
+ // `Parsers.template` no longer differentiates tpts and their argss
+ // e.g. `C()` will be represented as a single tree Apply(Ident(C), Nil)
+ // instead of parents = Ident(C), argss = Nil as before
+ // this change works great for things that are actually templates
+ // but in this degenerate case we need to perform postprocessing
+ val app = treeInfo.dissectApplied(parents.head)
+ atPos(npos union cpos) { New(app.callee, app.argss) }
+ } else {
+ val x = tpnme.ANON_CLASS_NAME
+ atPos(npos union cpos) {
+ Block(
+ List(
+ atPos(cpos) {
+ ClassDef(
+ Modifiers(FINAL), x, Nil,
+ mkTemplate(parents, self, NoMods, List(Nil), stats, cpos.focus))
+ }),
+ atPos(npos) {
+ New(
+ Ident(x) setPos npos.focus,
+ Nil)
+ }
+ )
+ }
+ }
+
+ /** Create a tree representing the function type (argtpes) => restpe */
+ def mkFunctionTypeTree(argtpes: List[Tree], restpe: Tree): Tree =
+ AppliedTypeTree(rootScalaDot(newTypeName("Function" + argtpes.length)), argtpes ::: List(restpe))
+
+ /** Create block of statements `stats` */
+ def mkBlock(stats: List[Tree]): Tree =
+ if (stats.isEmpty) Literal(Constant(()))
+ else if (!stats.last.isTerm) Block(stats, Literal(Constant(())))
+ else if (stats.length == 1) stats.head
+ else Block(stats.init, stats.last)
}
diff --git a/src/reflect/scala/reflect/internal/Trees.scala b/src/reflect/scala/reflect/internal/Trees.scala
index fab1f45358..84818a6f42 100644
--- a/src/reflect/scala/reflect/internal/Trees.scala
+++ b/src/reflect/scala/reflect/internal/Trees.scala
@@ -1657,6 +1657,22 @@ trait Trees extends api.Trees { self: SymbolTable =>
case t =>
sys.error("Not a ValDef: " + t + "/" + t.getClass)
}
+ def copyTypeDef(tree: Tree)(
+ mods: Modifiers = null,
+ name: Name = null,
+ tparams: List[TypeDef] = null,
+ rhs: Tree = null
+ ): TypeDef = tree match {
+ case TypeDef(mods0, name0, tparams0, rhs0) =>
+ treeCopy.TypeDef(tree,
+ if (mods eq null) mods0 else mods,
+ if (name eq null) name0 else name,
+ if (tparams eq null) tparams0 else tparams,
+ if (rhs eq null) rhs0 else rhs
+ )
+ case t =>
+ sys.error("Not a TypeDef: " + t + "/" + t.getClass)
+ }
def copyClassDef(tree: Tree)(
mods: Modifiers = null,
name: Name = null,
diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala
index e2157a6063..ca01a4b8e3 100644
--- a/src/reflect/scala/reflect/internal/Types.scala
+++ b/src/reflect/scala/reflect/internal/Types.scala
@@ -4036,12 +4036,13 @@ trait Types
def isConstantType(tp: Type) = tp match {
case ConstantType(_) => true
- case _ => false
+ case _ => false
}
- def isExistentialType(tp: Type): Boolean = tp.dealias match {
- case ExistentialType(_, _) => true
- case _ => false
+ def isExistentialType(tp: Type): Boolean = tp match {
+ case _: ExistentialType => true
+ case tp: Type if tp.dealias ne tp => isExistentialType(tp.dealias)
+ case _ => false
}
def isImplicitMethodType(tp: Type) = tp match {
@@ -4552,6 +4553,12 @@ trait Types
else (ps :+ SerializableTpe).toList
)
+ /** Adds the @uncheckedBound annotation if the given `tp` has type arguments */
+ final def uncheckedBounds(tp: Type): Type = {
+ if (tp.typeArgs.isEmpty || UncheckedBoundsClass == NoSymbol) tp // second condition for backwards compatibilty with older scala-reflect.jar
+ else tp.withAnnotation(AnnotationInfo marker UncheckedBoundsClass.tpe)
+ }
+
/** Members of the given class, other than those inherited
* from Any or AnyRef.
*/
diff --git a/src/reflect/scala/reflect/internal/annotations/uncheckedBounds.scala b/src/reflect/scala/reflect/internal/annotations/uncheckedBounds.scala
new file mode 100644
index 0000000000..a44bb54734
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/annotations/uncheckedBounds.scala
@@ -0,0 +1,13 @@
+package scala.reflect
+package internal
+package annotations
+
+/**
+ * An annotation that designates the annotated type should not be checked for violations of
+ * type parameter bounds in the `refchecks` phase of the compiler. This can be used by synthesized
+ * code the uses an inferred type of an expression as the type of an artifict val/def (for example,
+ * a temporary value introduced by an ANF transform). See [[https://issues.scala-lang.org/browse/SI-7694]].
+ *
+ * @since 2.10.3
+ */
+final class uncheckedBounds extends scala.annotation.StaticAnnotation
diff --git a/src/reflect/scala/reflect/internal/util/SourceFile.scala b/src/reflect/scala/reflect/internal/util/SourceFile.scala
index ea4c9a9b68..6bb4cf3f0e 100644
--- a/src/reflect/scala/reflect/internal/util/SourceFile.scala
+++ b/src/reflect/scala/reflect/internal/util/SourceFile.scala
@@ -86,6 +86,11 @@ object ScriptSourceFile {
stripped
}
+
+ def apply(underlying: BatchSourceFile) = {
+ val headerLen = headerLength(underlying.content)
+ new ScriptSourceFile(underlying, underlying.content drop headerLen, headerLen)
+ }
}
class ScriptSourceFile(underlying: BatchSourceFile, content: Array[Char], override val start: Int) extends BatchSourceFile(underlying.file, content) {
diff --git a/src/reflect/scala/reflect/runtime/JavaMirrors.scala b/src/reflect/scala/reflect/runtime/JavaMirrors.scala
index 93861b0899..fe5d9c4576 100644
--- a/src/reflect/scala/reflect/runtime/JavaMirrors.scala
+++ b/src/reflect/scala/reflect/runtime/JavaMirrors.scala
@@ -18,7 +18,7 @@ import internal.pickling.ByteCodecs
import internal.pickling.UnPickler
import scala.collection.mutable.{ HashMap, ListBuffer }
import internal.Flags._
-import ReflectionUtils.{staticSingletonInstance, innerSingletonInstance}
+import ReflectionUtils.{staticSingletonInstance, innerSingletonInstance, scalacShouldntLoadClass}
import scala.language.existentials
import scala.runtime.{ScalaRunTime, BoxesRunTime}
@@ -696,8 +696,10 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
val parents = try {
parentsLevel += 1
val jsuperclazz = jclazz.getGenericSuperclass
- val superclazz = if (jsuperclazz == null) AnyTpe else typeToScala(jsuperclazz)
- superclazz :: (jclazz.getGenericInterfaces.toList map typeToScala)
+ val ifaces = jclazz.getGenericInterfaces.toList map typeToScala
+ val isAnnotation = JavaAccFlags(jclazz).isAnnotation
+ if (isAnnotation) AnnotationClass.tpe :: ClassfileAnnotationClass.tpe :: ifaces
+ else (if (jsuperclazz == null) AnyTpe else typeToScala(jsuperclazz)) :: ifaces
} finally {
parentsLevel -= 1
}
@@ -709,14 +711,21 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
def enter(sym: Symbol, mods: JavaAccFlags) =
( if (mods.isStatic) module.moduleClass else clazz ).info.decls enter sym
- for (jinner <- jclazz.getDeclaredClasses)
+ def enterEmptyCtorIfNecessary(): Unit = {
+ if (jclazz.getConstructors.isEmpty)
+ clazz.info.decls.enter(clazz.newClassConstructor(NoPosition))
+ }
+
+ for (jinner <- jclazz.getDeclaredClasses) {
jclassAsScala(jinner) // inner class is entered as a side-effect
// no need to call enter explicitly
+ }
pendingLoadActions ::= { () =>
jclazz.getDeclaredFields foreach (f => enter(jfieldAsScala(f), f.javaFlags))
jclazz.getDeclaredMethods foreach (m => enter(jmethodAsScala(m), m.javaFlags))
jclazz.getConstructors foreach (c => enter(jconstrAsScala(c), c.javaFlags))
+ enterEmptyCtorIfNecessary()
}
if (parentsLevel == 0) {
@@ -949,7 +958,7 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
val cls =
if (jclazz.isMemberClass && !nme.isImplClassName(jname))
lookupClass
- else if (jclazz.isLocalClass0 || isInvalidClassName(jname))
+ else if (jclazz.isLocalClass0 || scalacShouldntLoadClass(jname))
// local classes and implementation classes not preserved by unpickling - treat as Java
//
// upd. but only if they cannot be loaded as top-level classes
@@ -1171,6 +1180,17 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
var fullNameOfJavaClass = ownerClazz.getName
if (childOfClass || childOfTopLevel) fullNameOfJavaClass += "$"
fullNameOfJavaClass += clazz.name
+
+ // compactify (see SI-7779)
+ fullNameOfJavaClass = fullNameOfJavaClass match {
+ case PackageAndClassPattern(pack, clazzName) =>
+ // in a package
+ pack + compactifyName(clazzName)
+ case _ =>
+ // in the empty package
+ compactifyName(fullNameOfJavaClass)
+ }
+
if (clazz.isModuleClass) fullNameOfJavaClass += "$"
// println(s"ownerChildren = ${ownerChildren.toList}")
@@ -1180,6 +1200,8 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
noClass
}
+ private val PackageAndClassPattern = """(.*\.)(.*)$""".r
+
private def expandedName(sym: Symbol): String =
if (sym.isPrivate) nme.expandedName(sym.name.toTermName, sym.owner).toString
else sym.name.toString
@@ -1219,7 +1241,7 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
val effectiveParamClasses =
if (!constr.owner.owner.isStaticOwner) jclazz.getEnclosingClass +: paramClasses
else paramClasses
- jclazz getConstructor (effectiveParamClasses: _*)
+ jclazz getDeclaredConstructor (effectiveParamClasses: _*)
}
private def jArrayClass(elemClazz: jClass[_]): jClass[_] = {
@@ -1234,6 +1256,7 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
case TypeRef(_, ArrayClass, List(elemtpe)) => jArrayClass(typeToJavaClass(elemtpe))
case TypeRef(_, sym: ClassSymbol, _) => classToJava(sym.asClass)
case tpe @ TypeRef(_, sym: AliasTypeSymbol, _) => typeToJavaClass(tpe.dealias)
+ case SingleType(_, sym: ModuleSymbol) => classToJava(sym.moduleClass.asClass)
case _ => throw new NoClassDefFoundError("no Java class corresponding to "+tpe+" found")
}
}
diff --git a/src/reflect/scala/reflect/runtime/ReflectionUtils.scala b/src/reflect/scala/reflect/runtime/ReflectionUtils.scala
index 2db9706007..710ec02acd 100644
--- a/src/reflect/scala/reflect/runtime/ReflectionUtils.scala
+++ b/src/reflect/scala/reflect/runtime/ReflectionUtils.scala
@@ -78,4 +78,10 @@ private[scala] object ReflectionUtils {
accessor setAccessible true
accessor invoke outer
}
+
+ def isTraitImplementation(fileName: String) = fileName endsWith "$class.class"
+
+ def scalacShouldntLoadClassfile(fileName: String) = isTraitImplementation(fileName)
+
+ def scalacShouldntLoadClass(name: scala.reflect.internal.SymbolTable#Name) = scalacShouldntLoadClassfile(name + ".class")
}
diff --git a/src/reflect/scala/reflect/runtime/SymbolLoaders.scala b/src/reflect/scala/reflect/runtime/SymbolLoaders.scala
index 815cc0c885..3e01a6df02 100644
--- a/src/reflect/scala/reflect/runtime/SymbolLoaders.scala
+++ b/src/reflect/scala/reflect/runtime/SymbolLoaders.scala
@@ -5,6 +5,7 @@ package runtime
import internal.Flags
import java.lang.{Class => jClass, Package => jPackage}
import scala.collection.mutable
+import scala.reflect.runtime.ReflectionUtils.scalacShouldntLoadClass
private[reflect] trait SymbolLoaders { self: SymbolTable =>
@@ -90,14 +91,6 @@ private[reflect] trait SymbolLoaders { self: SymbolTable =>
}
}
- /** Is the given name valid for a top-level class? We exclude names with embedded $-signs, because
- * these are nested classes or anonymous classes,
- */
- def isInvalidClassName(name: Name) = {
- val dp = name pos '$'
- 0 < dp && dp < (name.length - 1)
- }
-
class PackageScope(pkgClass: Symbol) extends Scope(initFingerPrints = -1L) // disable fingerprinting as we do not know entries beforehand
with SynchronizedScope {
assert(pkgClass.isType)
@@ -107,7 +100,7 @@ private[reflect] trait SymbolLoaders { self: SymbolTable =>
val e = super.lookupEntry(name)
if (e != null)
e
- else if (isInvalidClassName(name) || (negatives contains name))
+ else if (scalacShouldntLoadClass(name) || (negatives contains name))
null
else {
val path =
diff --git a/src/reflect/scala/reflect/runtime/SynchronizedOps.scala b/src/reflect/scala/reflect/runtime/SynchronizedOps.scala
index 132470b2e7..6aa47a0405 100644
--- a/src/reflect/scala/reflect/runtime/SynchronizedOps.scala
+++ b/src/reflect/scala/reflect/runtime/SynchronizedOps.scala
@@ -9,10 +9,7 @@ private[reflect] trait SynchronizedOps extends internal.SymbolTable
// Names
- private lazy val nameLock = new Object
-
- override def newTermName(s: String): TermName = nameLock.synchronized { super.newTermName(s) }
- override def newTypeName(s: String): TypeName = nameLock.synchronized { super.newTypeName(s) }
+ override protected def synchronizeNames = true
// BaseTypeSeqs
diff --git a/src/repl/scala/tools/nsc/interpreter/ILoop.scala b/src/repl/scala/tools/nsc/interpreter/ILoop.scala
index a623ee5055..ed56016bce 100644
--- a/src/repl/scala/tools/nsc/interpreter/ILoop.scala
+++ b/src/repl/scala/tools/nsc/interpreter/ILoop.scala
@@ -854,9 +854,9 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
globalFuture = future {
intp.initializeSynchronous()
loopPostInit()
- loadFiles(settings)
!intp.reporter.hasErrors
}
+ loadFiles(settings)
printWelcome()
try loop()
diff --git a/src/repl/scala/tools/nsc/interpreter/IMain.scala b/src/repl/scala/tools/nsc/interpreter/IMain.scala
index 3eafa563bc..ee4ff59498 100644
--- a/src/repl/scala/tools/nsc/interpreter/IMain.scala
+++ b/src/repl/scala/tools/nsc/interpreter/IMain.scala
@@ -22,7 +22,7 @@ import scala.reflect.internal.util.{ BatchSourceFile, SourceFile }
import scala.tools.util.PathResolver
import scala.tools.nsc.io.AbstractFile
import scala.tools.nsc.typechecker.{ TypeStrings, StructuredTypeStrings }
-import scala.tools.nsc.util.{ ScalaClassLoader, stringFromWriter, stackTracePrefixString }
+import scala.tools.nsc.util.{ ScalaClassLoader, stringFromWriter, StackTraceOps }
import scala.tools.nsc.util.Exceptional.unwrap
import javax.script.{AbstractScriptEngine, Bindings, ScriptContext, ScriptEngine, ScriptEngineFactory, ScriptException, CompiledScript, Compilable}
@@ -726,7 +726,7 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set
def isWrapperInit(x: StackTraceElement) = cond(x.getClassName) {
case classNameRegex() if x.getMethodName == nme.CONSTRUCTOR.decoded => true
}
- val stackTrace = util.stackTracePrefixString(unwrapped)(!isWrapperInit(_))
+ val stackTrace = unwrapped stackTracePrefixString (!isWrapperInit(_))
withLastExceptionLock[String]({
directBind[Throwable]("lastException", unwrapped)(StdReplTags.tagOfThrowable, classTag[Throwable])
diff --git a/src/xml/scala/xml/Atom.scala b/src/xml/scala/xml/Atom.scala
deleted file mode 100644
index 33e58ba7e7..0000000000
--- a/src/xml/scala/xml/Atom.scala
+++ /dev/null
@@ -1,47 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala
-package xml
-
-/** The class `Atom` provides an XML node for text (`PCDATA`).
- * It is used in both non-bound and bound XML representations.
- *
- * @author Burak Emir
- * @param data the text contained in this node, may not be `'''null'''`.
- */
-class Atom[+A](val data: A) extends SpecialNode with Serializable {
- if (data == null)
- throw new IllegalArgumentException("cannot construct "+getClass.getSimpleName+" with null")
-
- override protected def basisForHashCode: Seq[Any] = Seq(data)
-
- override def strict_==(other: Equality) = other match {
- case x: Atom[_] => data == x.data
- case _ => false
- }
-
- override def canEqual(other: Any) = other match {
- case _: Atom[_] => true
- case _ => false
- }
-
- final override def doCollectNamespaces = false
- final override def doTransform = false
-
- def label = "#PCDATA"
-
- /** Returns text, with some characters escaped according to the XML
- * specification.
- */
- def buildString(sb: StringBuilder): StringBuilder =
- Utility.escape(data.toString, sb)
-
- override def text: String = data.toString
-
-}
diff --git a/src/xml/scala/xml/Attribute.scala b/src/xml/scala/xml/Attribute.scala
deleted file mode 100644
index e4b2b69fc6..0000000000
--- a/src/xml/scala/xml/Attribute.scala
+++ /dev/null
@@ -1,101 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala
-package xml
-
-/** This singleton object contains the `apply` and `unapply` methods for
- * convenient construction and deconstruction.
- *
- * @author Burak Emir
- * @version 1.0
- */
-object Attribute {
- def unapply(x: Attribute) = x match {
- case PrefixedAttribute(_, key, value, next) => Some((key, value, next))
- case UnprefixedAttribute(key, value, next) => Some((key, value, next))
- case _ => None
- }
-
- /** Convenience functions which choose Un/Prefixedness appropriately */
- def apply(key: String, value: Seq[Node], next: MetaData): Attribute =
- new UnprefixedAttribute(key, value, next)
-
- def apply(pre: String, key: String, value: String, next: MetaData): Attribute =
- if (pre == null || pre == "") new UnprefixedAttribute(key, value, next)
- else new PrefixedAttribute(pre, key, value, next)
-
- def apply(pre: String, key: String, value: Seq[Node], next: MetaData): Attribute =
- if (pre == null || pre == "") new UnprefixedAttribute(key, value, next)
- else new PrefixedAttribute(pre, key, value, next)
-
- def apply(pre: Option[String], key: String, value: Seq[Node], next: MetaData): Attribute =
- pre match {
- case None => new UnprefixedAttribute(key, value, next)
- case Some(p) => new PrefixedAttribute(p, key, value, next)
- }
-}
-
-/** The `Attribute` trait defines the interface shared by both
- * [[scala.xml.PrefixedAttribute]] and [[scala.xml.UnprefixedAttribute]].
- *
- * @author Burak Emir
- * @version 1.0
- */
-abstract trait Attribute extends MetaData {
- def pre: String // will be null if unprefixed
- val key: String
- val value: Seq[Node]
- val next: MetaData
-
- def apply(key: String): Seq[Node]
- def apply(namespace: String, scope: NamespaceBinding, key: String): Seq[Node]
- def copy(next: MetaData): Attribute
-
- def remove(key: String) =
- if (!isPrefixed && this.key == key) next
- else copy(next remove key)
-
- def remove(namespace: String, scope: NamespaceBinding, key: String) =
- if (this.key == key && (scope getURI pre) == namespace) next
- else copy(next.remove(namespace, scope, key))
-
- def isPrefixed: Boolean = pre != null
-
- def getNamespace(owner: Node): String
-
- def wellformed(scope: NamespaceBinding): Boolean = {
- val arg = if (isPrefixed) scope getURI pre else null
- (next(arg, scope, key) == null) && (next wellformed scope)
- }
-
- /** Returns an iterator on attributes */
- override def iterator: Iterator[MetaData] = {
- if (value == null) next.iterator
- else Iterator.single(this) ++ next.iterator
- }
-
- override def size: Int = {
- if (value == null) next.size
- else 1 + next.size
- }
-
- /** Appends string representation of only this attribute to stringbuffer.
- */
- protected def toString1(sb: StringBuilder) {
- if (value == null)
- return
- if (isPrefixed)
- sb append pre append ':'
-
- sb append key append '='
- val sb2 = new StringBuilder()
- Utility.sequenceToXML(value, TopScope, sb2, stripComments = true)
- Utility.appendQuoted(sb2.toString, sb)
- }
-}
diff --git a/src/xml/scala/xml/Comment.scala b/src/xml/scala/xml/Comment.scala
deleted file mode 100644
index b8dccdcb16..0000000000
--- a/src/xml/scala/xml/Comment.scala
+++ /dev/null
@@ -1,31 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala
-package xml
-
-/** The class `Comment` implements an XML node for comments.
- *
- * @author Burak Emir
- * @param commentText the text contained in this node, may not contain "--"
- */
-case class Comment(commentText: String) extends SpecialNode {
-
- def label = "#REM"
- override def text = ""
- final override def doCollectNamespaces = false
- final override def doTransform = false
-
- if (commentText contains "--")
- throw new IllegalArgumentException("text contains \"--\"")
-
- /** Appends &quot;<!-- text -->&quot; to this string buffer.
- */
- override def buildString(sb: StringBuilder) =
- sb append "<!--" append commentText append "-->"
-}
diff --git a/src/xml/scala/xml/Document.scala b/src/xml/scala/xml/Document.scala
deleted file mode 100644
index 9a725014fc..0000000000
--- a/src/xml/scala/xml/Document.scala
+++ /dev/null
@@ -1,92 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala
-package xml
-
-/** A document information item (according to InfoSet spec). The comments
- * are copied from the Infoset spec, only augmented with some information
- * on the Scala types for definitions that might have no value.
- * Also plays the role of an `XMLEvent` for pull parsing.
- *
- * @author Burak Emir
- * @version 1.0, 26/04/2005
- */
-@SerialVersionUID(-2289320563321795109L)
-class Document extends NodeSeq with pull.XMLEvent with Serializable {
-
- /** An ordered list of child information items, in document
- * order. The list contains exactly one element information item. The
- * list also contains one processing instruction information item for
- * each processing instruction outside the document element, and one
- * comment information item for each comment outside the document
- * element. Processing instructions and comments within the DTD are
- * excluded. If there is a document type declaration, the list also
- * contains a document type declaration information item.
- */
- var children: Seq[Node] = _
-
- /** The element information item corresponding to the document element. */
- var docElem: Node = _
-
- /** The dtd that comes with the document, if any */
- var dtd: scala.xml.dtd.DTD = _
-
- /** An unordered set of notation information items, one for each notation
- * declared in the DTD. If any notation is multiply declared, this property
- * has no value.
- */
- def notations: Seq[scala.xml.dtd.NotationDecl] =
- dtd.notations
-
- /** An unordered set of unparsed entity information items, one for each
- * unparsed entity declared in the DTD.
- */
- def unparsedEntities: Seq[scala.xml.dtd.EntityDecl] =
- dtd.unparsedEntities
-
- /** The base URI of the document entity. */
- var baseURI: String = _
-
- /** The name of the character encoding scheme in which the document entity
- * is expressed.
- */
- var encoding: Option[String] = _
-
- /** An indication of the standalone status of the document, either
- * true or false. This property is derived from the optional standalone
- * document declaration in the XML declaration at the beginning of the
- * document entity, and has no value (`None`) if there is no
- * standalone document declaration.
- */
- var standAlone: Option[Boolean] = _
-
- /** A string representing the XML version of the document. This
- * property is derived from the XML declaration optionally present at
- * the beginning of the document entity, and has no value (`None`)
- * if there is no XML declaration.
- */
- var version: Option[String] = _
-
- /** 9. This property is not strictly speaking part of the infoset of
- * the document. Rather it is an indication of whether the processor
- * has read the complete DTD. Its value is a boolean. If it is false,
- * then certain properties (indicated in their descriptions below) may
- * be unknown. If it is true, those properties are never unknown.
- */
- var allDeclarationsProcessed = false
-
- // methods for NodeSeq
-
- def theSeq: Seq[Node] = this.docElem
-
- override def canEqual(other: Any) = other match {
- case _: Document => true
- case _ => false
- }
-}
diff --git a/src/xml/scala/xml/Elem.scala b/src/xml/scala/xml/Elem.scala
deleted file mode 100755
index e9b87e516c..0000000000
--- a/src/xml/scala/xml/Elem.scala
+++ /dev/null
@@ -1,136 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala
-package xml
-
-/** This singleton object contains the `apply` and `unapplySeq` methods for
- * convenient construction and deconstruction. It is possible to deconstruct
- * any `Node` instance (that is not a `SpecialNode` or a `Group`) using the
- * syntax `case Elem(prefix, label, attribs, scope, child @ _*) => ...`
- *
- * Copyright 2008 Google Inc. All Rights Reserved.
- * @author Burak Emir <bqe@google.com>
- */
-object Elem {
- /** Build an Elem, setting its minimizeEmpty property to `true` if it has no children. Note that this
- * default may not be exactly what you want, as some XML dialects don't permit some elements to be minimized.
- *
- * @deprecated This factory method is retained for backward compatibility; please use the other one, with which you
- * can specify your own preference for minimizeEmpty.
- */
- @deprecated("Use the other apply method in this object", "2.10.0")
- def apply(prefix: String, label: String, attributes: MetaData, scope: NamespaceBinding, child: Node*): Elem =
- apply(prefix, label, attributes, scope, child.isEmpty, child: _*)
-
- def apply(prefix: String, label: String, attributes: MetaData, scope: NamespaceBinding, minimizeEmpty: Boolean, child: Node*): Elem =
- new Elem(prefix, label, attributes, scope, minimizeEmpty, child: _*)
-
- def unapplySeq(n: Node) = n match {
- case _: SpecialNode | _: Group => None
- case _ => Some((n.prefix, n.label, n.attributes, n.scope, n.child))
- }
-
- import scala.sys.process._
- import scala.language.implicitConversions
- /** Implicitly convert a [[scala.xml.Elem]] into a
- * [[scala.sys.process.ProcessBuilder]]. This is done by obtaining the text
- * elements of the element, trimming spaces, and then converting the result
- * from string to a process. Importantly, tags are completely ignored, so
- * they cannot be used to separate parameters.
- */
- @deprecated("To create a scala.sys.process.Process from an xml.Elem, please use Process(elem.text.trim).", "2.11.0")
- implicit def xmlToProcess(command: scala.xml.Elem): ProcessBuilder = Process(command.text.trim)
-
- @deprecated("To create a scala.sys.process.Process from an xml.Elem, please use Process(elem.text.trim).", "2.11.0")
- implicit def processXml(p: Process.type) = new {
- /** Creates a [[scala.sys.process.ProcessBuilder]] from a Scala XML Element.
- * This can be used as a way to template strings.
- *
- * @example {{{
- * apply(<x> {dxPath.absolutePath} --dex --output={classesDexPath.absolutePath} {classesMinJarPath.absolutePath}</x>)
- * }}}
- */
- def apply(command: Elem): ProcessBuilder = Process(command.text.trim)
- }
-}
-
-
-/** The case class `Elem` extends the `Node` class,
- * providing an immutable data object representing an XML element.
- *
- * @param prefix namespace prefix (may be null, but not the empty string)
- * @param label the element name
- * @param attributes1 the attribute map
- * @param scope the scope containing the namespace bindings
- * @param minimizeEmpty `true` if this element should be serialized as minimized (i.e. "&lt;el/&gt;") when
- * empty; `false` if it should be written out in long form.
- * @param child the children of this node
- *
- * Copyright 2008 Google Inc. All Rights Reserved.
- * @author Burak Emir <bqe@google.com>
- */
-class Elem(
- override val prefix: String,
- val label: String,
- attributes1: MetaData,
- override val scope: NamespaceBinding,
- val minimizeEmpty: Boolean,
- val child: Node*)
-extends Node with Serializable
-{
- @deprecated("This constructor is retained for backward compatibility. Please use the primary constructor, which lets you specify your own preference for `minimizeEmpty`.", "2.10.0")
- def this(prefix: String, label: String, attributes: MetaData, scope: NamespaceBinding, child: Node*) = {
- this(prefix, label, attributes, scope, child.isEmpty, child: _*)
- }
-
- final override def doCollectNamespaces = true
- final override def doTransform = true
-
- override val attributes = MetaData.normalize(attributes1, scope)
-
- if (prefix == "")
- throw new IllegalArgumentException("prefix of zero length, use null instead")
-
- if (scope == null)
- throw new IllegalArgumentException("scope is null, use scala.xml.TopScope for empty scope")
-
- //@todo: copy the children,
- // setting namespace scope if necessary
- // cleaning adjacent text nodes if necessary
-
- override protected def basisForHashCode: Seq[Any] =
- prefix :: label :: attributes :: child.toList
-
- /** Returns a new element with updated attributes, resolving namespace uris
- * from this element's scope. See MetaData.update for details.
- *
- * @param updates MetaData with new and updated attributes
- * @return a new symbol with updated attributes
- */
- final def %(updates: MetaData): Elem =
- copy(attributes = MetaData.update(attributes, scope, updates))
-
- /** Returns a copy of this element with any supplied arguments replacing
- * this element's value for that field.
- *
- * @return a new symbol with updated attributes
- */
- def copy(
- prefix: String = this.prefix,
- label: String = this.label,
- attributes: MetaData = this.attributes,
- scope: NamespaceBinding = this.scope,
- minimizeEmpty: Boolean = this.minimizeEmpty,
- child: Seq[Node] = this.child.toSeq
- ): Elem = Elem(prefix, label, attributes, scope, minimizeEmpty, child: _*)
-
- /** Returns concatenation of `text(n)` for each child `n`.
- */
- override def text = (child map (_.text)).mkString
-}
diff --git a/src/xml/scala/xml/EntityRef.scala b/src/xml/scala/xml/EntityRef.scala
deleted file mode 100644
index 7a58831075..0000000000
--- a/src/xml/scala/xml/EntityRef.scala
+++ /dev/null
@@ -1,40 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala
-package xml
-
-/** The class `EntityRef` implements an XML node for entity references.
- *
- * @author Burak Emir
- * @version 1.0
- * @param entityName the name of the entity reference, for example `amp`.
- */
-case class EntityRef(entityName: String) extends SpecialNode {
- final override def doCollectNamespaces = false
- final override def doTransform = false
- def label = "#ENTITY"
-
- override def text = entityName match {
- case "lt" => "<"
- case "gt" => ">"
- case "amp" => "&"
- case "apos" => "'"
- case "quot" => "\""
- case _ => Utility.sbToString(buildString)
- }
-
- /** Appends `"&amp; entityName;"` to this string buffer.
- *
- * @param sb the string buffer.
- * @return the modified string buffer `sb`.
- */
- override def buildString(sb: StringBuilder) =
- sb.append("&").append(entityName).append(";")
-
-}
diff --git a/src/xml/scala/xml/Equality.scala b/src/xml/scala/xml/Equality.scala
deleted file mode 100644
index 021d185812..0000000000
--- a/src/xml/scala/xml/Equality.scala
+++ /dev/null
@@ -1,107 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala
-package xml
-
-/** In an attempt to contain the damage being inflicted on consistency by the
- * ad hoc `equals` methods spread around `xml`, the logic is centralized and
- * all the `xml` classes go through the `xml.Equality trait`. There are two
- * forms of `xml` comparison.
- *
- * 1. `'''def''' strict_==(other: scala.xml.Equality)`
- *
- * This one tries to honor the little things like symmetry and hashCode
- * contracts. The `equals` method routes all comparisons through this.
- *
- * 1. `xml_==(other: Any)`
- *
- * This one picks up where `strict_==` leaves off. It might declare any two
- * things equal.
- *
- * As things stood, the logic not only made a mockery of the collections
- * equals contract, but also laid waste to that of case classes.
- *
- * Among the obstacles to sanity are/were:
- *
- * Node extends NodeSeq extends Seq[Node]
- * MetaData extends Iterable[MetaData]
- * The hacky "Group" xml node which throws exceptions
- * with wild abandon, so don't get too close
- * Rampant asymmetry and impossible hashCodes
- * Most classes claiming to be equal to "String" if
- * some specific stringification of it was the same.
- * String was never going to return the favor.
- */
-
-object Equality {
- def asRef(x: Any): AnyRef = x.asInstanceOf[AnyRef]
-
- /** Note - these functions assume strict equality has already failed.
- */
- def compareBlithely(x1: AnyRef, x2: String): Boolean = x1 match {
- case x: Atom[_] => x.data == x2
- case x: NodeSeq => x.text == x2
- case _ => false
- }
- def compareBlithely(x1: AnyRef, x2: Node): Boolean = x1 match {
- case x: NodeSeq if x.length == 1 => x2 == x(0)
- case _ => false
- }
- def compareBlithely(x1: AnyRef, x2: AnyRef): Boolean = {
- if (x1 == null || x2 == null)
- return (x1 eq x2)
-
- x2 match {
- case s: String => compareBlithely(x1, s)
- case n: Node => compareBlithely(x1, n)
- case _ => false
- }
- }
-}
-import Equality._
-
-trait Equality extends scala.Equals {
- protected def basisForHashCode: Seq[Any]
-
- def strict_==(other: Equality): Boolean
- def strict_!=(other: Equality) = !strict_==(other)
-
- /** We insist we're only equal to other `xml.Equality` implementors,
- * which heads off a lot of inconsistency up front.
- */
- override def canEqual(other: Any): Boolean = other match {
- case x: Equality => true
- case _ => false
- }
-
- /** It's be nice to make these final, but there are probably
- * people out there subclassing the XML types, especially when
- * it comes to equals. However WE at least can pretend they
- * are final since clearly individual classes cannot be trusted
- * to maintain a semblance of order.
- */
- override def hashCode() = basisForHashCode.##
- override def equals(other: Any) = doComparison(other, blithe = false)
- final def xml_==(other: Any) = doComparison(other, blithe = true)
- final def xml_!=(other: Any) = !xml_==(other)
-
- /** The "blithe" parameter expresses the caller's unconcerned attitude
- * regarding the usual constraints on equals. The method is thereby
- * given carte blanche to declare any two things equal.
- */
- private def doComparison(other: Any, blithe: Boolean) = {
- val strictlyEqual = other match {
- case x: AnyRef if this eq x => true
- case x: Equality => (x canEqual this) && (this strict_== x)
- case _ => false
- }
-
- strictlyEqual || (blithe && compareBlithely(this, asRef(other)))
- }
-}
diff --git a/src/xml/scala/xml/Group.scala b/src/xml/scala/xml/Group.scala
deleted file mode 100644
index e3af615008..0000000000
--- a/src/xml/scala/xml/Group.scala
+++ /dev/null
@@ -1,42 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala
-package xml
-
-/** A hack to group XML nodes in one node for output.
- *
- * @author Burak Emir
- * @version 1.0
- */
-final case class Group(nodes: Seq[Node]) extends Node {
- override def theSeq = nodes
-
- override def canEqual(other: Any) = other match {
- case x: Group => true
- case _ => false
- }
-
- override def strict_==(other: Equality) = other match {
- case Group(xs) => nodes sameElements xs
- case _ => false
- }
-
- override protected def basisForHashCode = nodes
-
- /** Since Group is very much a hack it throws an exception if you
- * try to do anything with it.
- */
- private def fail(msg: String) = throw new UnsupportedOperationException("class Group does not support method '%s'" format msg)
-
- def label = fail("label")
- override def attributes = fail("attributes")
- override def namespace = fail("namespace")
- override def child = fail("child")
- def buildString(sb: StringBuilder) = fail("toString(StringBuilder)")
-}
diff --git a/src/xml/scala/xml/MalformedAttributeException.scala b/src/xml/scala/xml/MalformedAttributeException.scala
deleted file mode 100644
index d499ad3e10..0000000000
--- a/src/xml/scala/xml/MalformedAttributeException.scala
+++ /dev/null
@@ -1,15 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala
-package xml
-
-
-case class MalformedAttributeException(msg: String) extends RuntimeException(msg)
diff --git a/src/xml/scala/xml/MetaData.scala b/src/xml/scala/xml/MetaData.scala
deleted file mode 100644
index 8b5ea187cb..0000000000
--- a/src/xml/scala/xml/MetaData.scala
+++ /dev/null
@@ -1,217 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala
-package xml
-
-import Utility.sbToString
-import scala.annotation.tailrec
-import scala.collection.{ AbstractIterable, Iterator }
-
-/**
- * Copyright 2008 Google Inc. All Rights Reserved.
- * @author Burak Emir <bqe@google.com>
- */
-object MetaData {
- /**
- * appends all attributes from new_tail to attribs, without attempting to
- * detect or remove duplicates. The method guarantees that all attributes
- * from attribs come before the attributes in new_tail, but does not
- * guarantee to preserve the relative order of attribs.
- *
- * Duplicates can be removed with `normalize`.
- */
- @tailrec // temporarily marked final so it will compile under -Xexperimental
- final def concatenate(attribs: MetaData, new_tail: MetaData): MetaData =
- if (attribs eq Null) new_tail
- else concatenate(attribs.next, attribs copy new_tail)
-
- /**
- * returns normalized MetaData, with all duplicates removed and namespace prefixes resolved to
- * namespace URIs via the given scope.
- */
- def normalize(attribs: MetaData, scope: NamespaceBinding): MetaData = {
- def iterate(md: MetaData, normalized_attribs: MetaData, set: Set[String]): MetaData = {
- lazy val key = getUniversalKey(md, scope)
- if (md eq Null) normalized_attribs
- else if ((md.value eq null) || set(key)) iterate(md.next, normalized_attribs, set)
- else md copy iterate(md.next, normalized_attribs, set + key)
- }
- iterate(attribs, Null, Set())
- }
-
- /**
- * returns key if md is unprefixed, pre+key is md is prefixed
- */
- def getUniversalKey(attrib: MetaData, scope: NamespaceBinding) = attrib match {
- case prefixed: PrefixedAttribute => scope.getURI(prefixed.pre) + prefixed.key
- case unprefixed: UnprefixedAttribute => unprefixed.key
- }
-
- /**
- * returns MetaData with attributes updated from given MetaData
- */
- def update(attribs: MetaData, scope: NamespaceBinding, updates: MetaData): MetaData =
- normalize(concatenate(updates, attribs), scope)
-
-}
-
-/** This class represents an attribute and at the same time a linked list of
- * attributes. Every instance of this class is either
- * - an instance of `UnprefixedAttribute key,value` or
- * - an instance of `PrefixedAttribute namespace_prefix,key,value` or
- * - `Null, the empty attribute list.
- *
- * Namespace URIs are obtained by using the namespace scope of the element
- * owning this attribute (see `getNamespace`).
- *
- * Copyright 2008 Google Inc. All Rights Reserved.
- * @author Burak Emir <bqe@google.com>
- */
-abstract class MetaData
-extends AbstractIterable[MetaData]
- with Iterable[MetaData]
- with Equality
- with Serializable {
-
- /** Updates this MetaData with the MetaData given as argument. All attributes that occur in updates
- * are part of the resulting MetaData. If an attribute occurs in both this instance and
- * updates, only the one in updates is part of the result (avoiding duplicates). For prefixed
- * attributes, namespaces are resolved using the given scope, which defaults to TopScope.
- *
- * @param updates MetaData with new and updated attributes
- * @return a new MetaData instance that contains old, new and updated attributes
- */
- def append(updates: MetaData, scope: NamespaceBinding = TopScope): MetaData =
- MetaData.update(this, scope, updates)
-
- /**
- * Gets value of unqualified (unprefixed) attribute with given key, null if not found
- *
- * @param key
- * @return value as Seq[Node] if key is found, null otherwise
- */
- def apply(key: String): Seq[Node]
-
- /** convenience method, same as `apply(namespace, owner.scope, key)`.
- *
- * @param namespace_uri namespace uri of key
- * @param owner the element owning this attribute list
- * @param key the attribute key
- */
- final def apply(namespace_uri: String, owner: Node, key: String): Seq[Node] =
- apply(namespace_uri, owner.scope, key)
-
- /**
- * Gets value of prefixed attribute with given key and namespace, null if not found
- *
- * @param namespace_uri namespace uri of key
- * @param scp a namespace scp (usually of the element owning this attribute list)
- * @param k to be looked for
- * @return value as Seq[Node] if key is found, null otherwise
- */
- def apply(namespace_uri: String, scp: NamespaceBinding, k: String): Seq[Node]
-
- /** returns a copy of this MetaData item with next field set to argument.
- */
- def copy(next: MetaData): MetaData
-
- /** if owner is the element of this metadata item, returns namespace */
- def getNamespace(owner: Node): String
-
- def hasNext = (Null != next)
-
- def length: Int = length(0)
-
- def length(i: Int): Int = next.length(i + 1)
-
- def isPrefixed: Boolean
-
- override def canEqual(other: Any) = other match {
- case _: MetaData => true
- case _ => false
- }
- override def strict_==(other: Equality) = other match {
- case m: MetaData => this.asAttrMap == m.asAttrMap
- case _ => false
- }
- protected def basisForHashCode: Seq[Any] = List(this.asAttrMap)
-
- /** filters this sequence of meta data */
- override def filter(f: MetaData => Boolean): MetaData =
- if (f(this)) copy(next filter f)
- else next filter f
-
- /** returns key of this MetaData item */
- def key: String
-
- /** returns value of this MetaData item */
- def value: Seq[Node]
-
- /** Returns a String containing "prefix:key" if the first key is
- * prefixed, and "key" otherwise.
- */
- def prefixedKey = this match {
- case x: Attribute if x.isPrefixed => x.pre + ":" + key
- case _ => key
- }
-
- /** Returns a Map containing the attributes stored as key/value pairs.
- */
- def asAttrMap: Map[String, String] =
- (iterator map (x => (x.prefixedKey, x.value.text))).toMap
-
- /** returns Null or the next MetaData item */
- def next: MetaData
-
- /**
- * Gets value of unqualified (unprefixed) attribute with given key, None if not found
- *
- * @param key
- * @return value in Some(Seq[Node]) if key is found, None otherwise
- */
- final def get(key: String): Option[Seq[Node]] = Option(apply(key))
-
- /** same as get(uri, owner.scope, key) */
- final def get(uri: String, owner: Node, key: String): Option[Seq[Node]] =
- get(uri, owner.scope, key)
-
- /** gets value of qualified (prefixed) attribute with given key.
- *
- * @param uri namespace of key
- * @param scope a namespace scp (usually of the element owning this attribute list)
- * @param key to be looked fore
- * @return value as Some[Seq[Node]] if key is found, None otherwise
- */
- final def get(uri: String, scope: NamespaceBinding, key: String): Option[Seq[Node]] =
- Option(apply(uri, scope, key))
-
- protected def toString1(): String = sbToString(toString1)
-
- // appends string representations of single attribute to StringBuilder
- protected def toString1(sb: StringBuilder): Unit
-
- override def toString(): String = sbToString(buildString)
-
- def buildString(sb: StringBuilder): StringBuilder = {
- sb append ' '
- toString1(sb)
- next buildString sb
- }
-
- /**
- */
- def wellformed(scope: NamespaceBinding): Boolean
-
- def remove(key: String): MetaData
-
- def remove(namespace: String, scope: NamespaceBinding, key: String): MetaData
-
- final def remove(namespace: String, owner: Node, key: String): MetaData =
- remove(namespace, owner.scope, key)
-}
diff --git a/src/xml/scala/xml/NamespaceBinding.scala b/src/xml/scala/xml/NamespaceBinding.scala
deleted file mode 100644
index b320466976..0000000000
--- a/src/xml/scala/xml/NamespaceBinding.scala
+++ /dev/null
@@ -1,83 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala
-package xml
-
-import Utility.sbToString
-
-/** The class `NamespaceBinding` represents namespace bindings
- * and scopes. The binding for the default namespace is treated as a null
- * prefix. the absent namespace is represented with the null uri. Neither
- * prefix nor uri may be empty, which is not checked.
- *
- * @author Burak Emir
- * @version 1.0
- */
-@SerialVersionUID(0 - 2518644165573446725L)
-case class NamespaceBinding(prefix: String, uri: String, parent: NamespaceBinding) extends AnyRef with Equality
-{
- if (prefix == "")
- throw new IllegalArgumentException("zero length prefix not allowed")
-
- def getURI(_prefix: String): String =
- if (prefix == _prefix) uri else parent getURI _prefix
-
- /** Returns some prefix that is mapped to the URI.
- *
- * @param _uri the input URI
- * @return the prefix that is mapped to the input URI, or null
- * if no prefix is mapped to the URI.
- */
- def getPrefix(_uri: String): String =
- if (_uri == uri) prefix else parent getPrefix _uri
-
- override def toString(): String = sbToString(buildString(_, TopScope))
-
- private def shadowRedefined(stop: NamespaceBinding): NamespaceBinding = {
- def prefixList(x: NamespaceBinding): List[String] =
- if ((x == null) || (x eq stop)) Nil
- else x.prefix :: prefixList(x.parent)
- def fromPrefixList(l: List[String]): NamespaceBinding = l match {
- case Nil => stop
- case x :: xs => new NamespaceBinding(x, this.getURI(x), fromPrefixList(xs))
- }
- val ps0 = prefixList(this).reverse
- val ps = ps0.distinct
- if (ps.size == ps0.size) this
- else fromPrefixList(ps)
- }
-
- override def canEqual(other: Any) = other match {
- case _: NamespaceBinding => true
- case _ => false
- }
-
- override def strict_==(other: Equality) = other match {
- case x: NamespaceBinding => (prefix == x.prefix) && (uri == x.uri) && (parent == x.parent)
- case _ => false
- }
-
- def basisForHashCode: Seq[Any] = List(prefix, uri, parent)
-
- def buildString(stop: NamespaceBinding): String = sbToString(buildString(_, stop))
-
- def buildString(sb: StringBuilder, stop: NamespaceBinding) {
- shadowRedefined(stop).doBuildString(sb, stop)
- }
-
- private def doBuildString(sb: StringBuilder, stop: NamespaceBinding) {
- if ((this == null) || (this eq stop)) return // contains?
-
- val s = " xmlns%s=\"%s\"".format(
- (if (prefix != null) ":" + prefix else ""),
- (if (uri != null) uri else "")
- )
- parent.doBuildString(sb append s, stop) // copy(ignore)
- }
-}
diff --git a/src/xml/scala/xml/Node.scala b/src/xml/scala/xml/Node.scala
deleted file mode 100755
index e121284252..0000000000
--- a/src/xml/scala/xml/Node.scala
+++ /dev/null
@@ -1,198 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala
-package xml
-
-/** This singleton object contains the `unapplySeq` method for
- * convenient deconstruction.
- *
- * @author Burak Emir
- * @version 1.0
- */
-object Node {
- /** the constant empty attribute sequence */
- final def NoAttributes: MetaData = Null
-
- /** the empty namespace */
- val EmptyNamespace = ""
-
- def unapplySeq(n: Node) = Some((n.label, n.attributes, n.child))
-}
-
-/**
- * An abstract class representing XML with nodes of a labelled tree.
- * This class contains an implementation of a subset of XPath for navigation.
- *
- * @author Burak Emir and others
- * @version 1.1
- */
-abstract class Node extends NodeSeq {
-
- /** prefix of this node */
- def prefix: String = null
-
- /** label of this node. I.e. "foo" for &lt;foo/&gt;) */
- def label: String
-
- /** used internally. Atom/Molecule = -1 PI = -2 Comment = -3 EntityRef = -5
- */
- def isAtom = this.isInstanceOf[Atom[_]]
-
- /** The logic formerly found in typeTag$, as best I could infer it. */
- def doCollectNamespaces = true // if (tag >= 0) DO collect namespaces
- def doTransform = true // if (tag < 0) DO NOT transform
-
- /**
- * method returning the namespace bindings of this node. by default, this
- * is TopScope, which means there are no namespace bindings except the
- * predefined one for "xml".
- */
- def scope: NamespaceBinding = TopScope
-
- /**
- * convenience, same as `getNamespace(this.prefix)`
- */
- def namespace = getNamespace(this.prefix)
-
- /**
- * Convenience method, same as `scope.getURI(pre)` but additionally
- * checks if scope is `'''null'''`.
- *
- * @param pre the prefix whose namespace name we would like to obtain
- * @return the namespace if `scope != null` and prefix was
- * found, else `null`
- */
- def getNamespace(pre: String): String = if (scope eq null) null else scope.getURI(pre)
-
- /**
- * Convenience method, looks up an unprefixed attribute in attributes of this node.
- * Same as `attributes.getValue(key)`
- *
- * @param key of queried attribute.
- * @return value of `UnprefixedAttribute` with given key
- * in attributes, if it exists, otherwise `null`.
- */
- final def attribute(key: String): Option[Seq[Node]] = attributes.get(key)
-
- /**
- * Convenience method, looks up a prefixed attribute in attributes of this node.
- * Same as `attributes.getValue(uri, this, key)`-
- *
- * @param uri namespace of queried attribute (may not be null).
- * @param key of queried attribute.
- * @return value of `PrefixedAttribute` with given namespace
- * and given key, otherwise `'''null'''`.
- */
- final def attribute(uri: String, key: String): Option[Seq[Node]] =
- attributes.get(uri, this, key)
-
- /**
- * Returns attribute meaning all attributes of this node, prefixed and
- * unprefixed, in no particular order. In class `Node`, this
- * defaults to `Null` (the empty attribute list).
- *
- * @return all attributes of this node
- */
- def attributes: MetaData = Null
-
- /**
- * Returns child axis i.e. all children of this node.
- *
- * @return all children of this node
- */
- def child: Seq[Node]
-
- /** Children which do not stringify to "" (needed for equality)
- */
- def nonEmptyChildren: Seq[Node] = child filterNot (_.toString == "")
-
- /**
- * Descendant axis (all descendants of this node, not including node itself)
- * includes all text nodes, element nodes, comments and processing instructions.
- */
- def descendant: List[Node] =
- child.toList.flatMap { x => x::x.descendant }
-
- /**
- * Descendant axis (all descendants of this node, including thisa node)
- * includes all text nodes, element nodes, comments and processing instructions.
- */
- def descendant_or_self: List[Node] = this :: descendant
-
- override def canEqual(other: Any) = other match {
- case x: Group => false
- case x: Node => true
- case _ => false
- }
-
- override protected def basisForHashCode: Seq[Any] =
- prefix :: label :: attributes :: nonEmptyChildren.toList
-
- override def strict_==(other: Equality) = other match {
- case _: Group => false
- case x: Node =>
- (prefix == x.prefix) &&
- (label == x.label) &&
- (attributes == x.attributes) &&
- // (scope == x.scope) // note - original code didn't compare scopes so I left it as is.
- (nonEmptyChildren sameElements x.nonEmptyChildren)
- case _ =>
- false
- }
-
- // implementations of NodeSeq methods
-
- /**
- * returns a sequence consisting of only this node
- */
- def theSeq: Seq[Node] = this :: Nil
-
- /**
- * String representation of this node
- *
- * @param stripComments if true, strips comment nodes from result
- */
- def buildString(stripComments: Boolean): String =
- Utility.serialize(this, stripComments = stripComments).toString
-
- /**
- * Same as `toString('''false''')`.
- */
- override def toString(): String = buildString(stripComments = false)
-
- /**
- * Appends qualified name of this node to `StringBuilder`.
- */
- def nameToString(sb: StringBuilder): StringBuilder = {
- if (null != prefix) {
- sb append prefix
- sb append ':'
- }
- sb append label
- }
-
- /**
- * Returns a type symbol (e.g. DTD, XSD), default `'''null'''`.
- */
- def xmlType(): TypeSymbol = null
-
- /**
- * Returns a text representation of this node. Note that this is not equivalent to
- * the XPath node-test called text(), it is rather an implementation of the
- * XPath function string()
- * Martin to Burak: to do: if you make this method abstract, the compiler will now
- * complain if there's no implementation in a subclass. Is this what we want? Note that
- * this would break doc/DocGenator and doc/ModelToXML, with an error message like:
- * {{{
- * doc\DocGenerator.scala:1219: error: object creation impossible, since there is a deferred declaration of method text in class Node of type => String which is not implemented in a subclass
- * new SpecialNode {
- * ^
- * }}} */
- override def text: String = super.text
-}
diff --git a/src/xml/scala/xml/NodeBuffer.scala b/src/xml/scala/xml/NodeBuffer.scala
deleted file mode 100644
index ae7c7b2bf8..0000000000
--- a/src/xml/scala/xml/NodeBuffer.scala
+++ /dev/null
@@ -1,47 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala
-package xml
-
-/**
- * This class acts as a Buffer for nodes. If it is used as a sequence of
- * nodes `Seq[Node]`, it must be ensured that no updates occur after that
- * point, because `scala.xml.Node` is assumed to be immutable.
- *
- * Despite this being a sequence, don't use it as key in a hashtable.
- * Calling the hashcode function will result in a runtime error.
- *
- * @author Burak Emir
- * @version 1.0
- */
-class NodeBuffer extends scala.collection.mutable.ArrayBuffer[Node] {
-
- /**
- * Append given object to this buffer, returns reference on this
- * `NodeBuffer` for convenience. Some rules apply:
- * - If argument `o` is `'''null'''`, it is ignored.
- * - If it is an `Iterator` or `Iterable`, its elements will be added.
- * - If `o` is a node, it is added as it is.
- * - If it is anything else, it gets wrapped in an [[scala.xml.Atom]].
- *
- * @param o converts to an xml node and adds to this node buffer
- * @return this nodebuffer
- */
- def &+(o: Any): NodeBuffer = {
- o match {
- case null | _: Unit | Text("") => // ignore
- case it: Iterator[_] => it foreach &+
- case n: Node => super.+=(n)
- case ns: Iterable[_] => this &+ ns.iterator
- case ns: Array[_] => this &+ ns.iterator
- case d => super.+=(new Atom(d))
- }
- this
- }
-}
diff --git a/src/xml/scala/xml/NodeSeq.scala b/src/xml/scala/xml/NodeSeq.scala
deleted file mode 100644
index b8022472fb..0000000000
--- a/src/xml/scala/xml/NodeSeq.scala
+++ /dev/null
@@ -1,157 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala
-package xml
-
-import scala.collection.{ mutable, immutable, generic, SeqLike, AbstractSeq }
-import mutable.{ Builder, ListBuffer }
-import generic.{ CanBuildFrom }
-import scala.language.implicitConversions
-
-/** This object ...
- *
- * @author Burak Emir
- * @version 1.0
- */
-object NodeSeq {
- final val Empty = fromSeq(Nil)
- def fromSeq(s: Seq[Node]): NodeSeq = new NodeSeq {
- def theSeq = s
- }
- type Coll = NodeSeq
- implicit def canBuildFrom: CanBuildFrom[Coll, Node, NodeSeq] =
- new CanBuildFrom[Coll, Node, NodeSeq] {
- def apply(from: Coll) = newBuilder
- def apply() = newBuilder
- }
- def newBuilder: Builder[Node, NodeSeq] = new ListBuffer[Node] mapResult fromSeq
- implicit def seqToNodeSeq(s: Seq[Node]): NodeSeq = fromSeq(s)
-}
-
-/** This class implements a wrapper around `Seq[Node]` that adds XPath
- * and comprehension methods.
- *
- * @author Burak Emir
- * @version 1.0
- */
-abstract class NodeSeq extends AbstractSeq[Node] with immutable.Seq[Node] with SeqLike[Node, NodeSeq] with Equality {
- import NodeSeq.seqToNodeSeq // import view magic for NodeSeq wrappers
-
- /** Creates a list buffer as builder for this class */
- override protected[this] def newBuilder = NodeSeq.newBuilder
-
- def theSeq: Seq[Node]
- def length = theSeq.length
- override def iterator = theSeq.iterator
-
- def apply(i: Int): Node = theSeq(i)
- def apply(f: Node => Boolean): NodeSeq = filter(f)
-
- def xml_sameElements[A](that: Iterable[A]): Boolean = {
- val these = this.iterator
- val those = that.iterator
- while (these.hasNext && those.hasNext)
- if (these.next xml_!= those.next)
- return false
-
- !these.hasNext && !those.hasNext
- }
-
- protected def basisForHashCode: Seq[Any] = theSeq
-
- override def canEqual(other: Any) = other match {
- case _: NodeSeq => true
- case _ => false
- }
-
- override def strict_==(other: Equality) = other match {
- case x: NodeSeq => (length == x.length) && (theSeq sameElements x.theSeq)
- case _ => false
- }
-
- /** Projection function, which returns elements of `this` sequence based
- * on the string `that`. Use:
- * - `this \ "foo"` to get a list of all elements that are labelled with `"foo"`;
- * - `\ "_"` to get a list of all elements (wildcard);
- * - `ns \ "@foo"` to get the unprefixed attribute `"foo"`;
- * - `ns \ "@{uri}foo"` to get the prefixed attribute `"pre:foo"` whose
- * prefix `"pre"` is resolved to the namespace `"uri"`.
- *
- * For attribute projections, the resulting [[scala.xml.NodeSeq]] attribute
- * values are wrapped in a [[scala.xml.Group]].
- *
- * There is no support for searching a prefixed attribute by its literal prefix.
- *
- * The document order is preserved.
- */
- def \(that: String): NodeSeq = {
- def fail = throw new IllegalArgumentException(that)
- def atResult = {
- lazy val y = this(0)
- val attr =
- if (that.length == 1) fail
- else if (that(1) == '{') {
- val i = that indexOf '}'
- if (i == -1) fail
- val (uri, key) = (that.substring(2,i), that.substring(i+1, that.length()))
- if (uri == "" || key == "") fail
- else y.attribute(uri, key)
- }
- else y.attribute(that drop 1)
-
- attr match {
- case Some(x) => Group(x)
- case _ => NodeSeq.Empty
- }
- }
-
- def makeSeq(cond: (Node) => Boolean) =
- NodeSeq fromSeq (this flatMap (_.child) filter cond)
-
- that match {
- case "" => fail
- case "_" => makeSeq(!_.isAtom)
- case _ if (that(0) == '@' && this.length == 1) => atResult
- case _ => makeSeq(_.label == that)
- }
- }
-
- /** Projection function, which returns elements of `this` sequence and of
- * all its subsequences, based on the string `that`. Use:
- * - `this \\ 'foo` to get a list of all elements that are labelled with `"foo"`;
- * - `\\ "_"` to get a list of all elements (wildcard);
- * - `ns \\ "@foo"` to get the unprefixed attribute `"foo"`;
- * - `ns \\ "@{uri}foo"` to get each prefixed attribute `"pre:foo"` whose
- * prefix `"pre"` is resolved to the namespace `"uri"`.
- *
- * For attribute projections, the resulting [[scala.xml.NodeSeq]] attribute
- * values are wrapped in a [[scala.xml.Group]].
- *
- * There is no support for searching a prefixed attribute by its literal prefix.
- *
- * The document order is preserved.
- */
- def \\ (that: String): NodeSeq = {
- def filt(cond: (Node) => Boolean) = this flatMap (_.descendant_or_self) filter cond
- that match {
- case "_" => filt(!_.isAtom)
- case _ if that(0) == '@' => filt(!_.isAtom) flatMap (_ \ that)
- case _ => filt(x => !x.isAtom && x.label == that)
- }
- }
-
- /** Convenience method which returns string text of the named attribute. Use:
- * - `that \@ "foo"` to get the string text of attribute `"foo"`;
- */
- def \@(attributeName: String): String = (this \ ("@" + attributeName)).text
-
- override def toString(): String = theSeq.mkString
-
- def text: String = (this map (_.text)).mkString
-}
diff --git a/src/xml/scala/xml/Null.scala b/src/xml/scala/xml/Null.scala
deleted file mode 100644
index f763c023c4..0000000000
--- a/src/xml/scala/xml/Null.scala
+++ /dev/null
@@ -1,62 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala
-package xml
-
-import Utility.isNameStart
-import scala.collection.Iterator
-
-/** Essentially, every method in here is a dummy, returning Zero[T].
- * It provides a backstop for the unusual collection defined by MetaData,
- * sort of a linked list of tails.
- *
- * @author Burak Emir
- * @version 1.0
- */
-case object Null extends MetaData {
- override def iterator = Iterator.empty
- override def size = 0
- override def append(m: MetaData, scope: NamespaceBinding = TopScope): MetaData = m
- override def filter(f: MetaData => Boolean): MetaData = this
-
- def copy(next: MetaData) = next
- def getNamespace(owner: Node) = null
-
- override def hasNext = false
- def next = null
- def key = null
- def value = null
- def isPrefixed = false
-
- override def length = 0
- override def length(i: Int) = i
-
- override def strict_==(other: Equality) = other match {
- case x: MetaData => x.length == 0
- case _ => false
- }
- override protected def basisForHashCode: Seq[Any] = Nil
-
- def apply(namespace: String, scope: NamespaceBinding, key: String) = null
- def apply(key: String) =
- if (isNameStart(key.head)) null
- else throw new IllegalArgumentException("not a valid attribute name '"+key+"', so can never match !")
-
- protected def toString1(sb: StringBuilder) = ()
- override protected def toString1(): String = ""
-
- override def toString(): String = ""
-
- override def buildString(sb: StringBuilder): StringBuilder = sb
-
- override def wellformed(scope: NamespaceBinding) = true
-
- def remove(key: String) = this
- def remove(namespace: String, scope: NamespaceBinding, key: String) = this
-}
diff --git a/src/xml/scala/xml/PCData.scala b/src/xml/scala/xml/PCData.scala
deleted file mode 100644
index 31eea2b6d7..0000000000
--- a/src/xml/scala/xml/PCData.scala
+++ /dev/null
@@ -1,44 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala
-package xml
-
-/** This class (which is not used by all XML parsers, but always used by the
- * XHTML one) represents parseable character data, which appeared as CDATA
- * sections in the input and is to be preserved as CDATA section in the output.
- *
- * @author Burak Emir
- * @version 1.0
- */
-class PCData(data: String) extends Atom[String](data) {
-
- /** Returns text, with some characters escaped according to the XML
- * specification.
- *
- * @param sb the input string buffer associated to some XML element
- * @return the input string buffer with the formatted CDATA section
- */
- override def buildString(sb: StringBuilder): StringBuilder =
- sb append "<![CDATA[%s]]>".format(data)
-}
-
-/** This singleton object contains the `apply`and `unapply` methods for
- * convenient construction and deconstruction.
- *
- * @author Burak Emir
- * @version 1.0
- */
-object PCData {
- def apply(data: String) = new PCData(data)
- def unapply(other: Any): Option[String] = other match {
- case x: PCData => Some(x.data)
- case _ => None
- }
-}
-
diff --git a/src/xml/scala/xml/PrefixedAttribute.scala b/src/xml/scala/xml/PrefixedAttribute.scala
deleted file mode 100644
index 4ab79c8677..0000000000
--- a/src/xml/scala/xml/PrefixedAttribute.scala
+++ /dev/null
@@ -1,61 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-package scala
-package xml
-
-/** prefixed attributes always have a non-null namespace.
- *
- * @param pre
- * @param key
- * @param value the attribute value
- * @param next1
- */
-class PrefixedAttribute(
- val pre: String,
- val key: String,
- val value: Seq[Node],
- val next1: MetaData)
-extends Attribute
-{
- val next = if (value ne null) next1 else next1.remove(key)
-
- /** same as this(pre, key, Text(value), next), or no attribute if value is null */
- def this(pre: String, key: String, value: String, next: MetaData) =
- this(pre, key, if (value ne null) Text(value) else null: NodeSeq, next)
-
- /** same as this(pre, key, value.get, next), or no attribute if value is None */
- def this(pre: String, key: String, value: Option[Seq[Node]], next: MetaData) =
- this(pre, key, value.orNull, next)
-
- /** Returns a copy of this unprefixed attribute with the given
- * next field.
- */
- def copy(next: MetaData) =
- new PrefixedAttribute(pre, key, value, next)
-
- def getNamespace(owner: Node) =
- owner.getNamespace(pre)
-
- /** forwards the call to next (because caller looks for unprefixed attribute */
- def apply(key: String): Seq[Node] = next(key)
-
- /** gets attribute value of qualified (prefixed) attribute with given key
- */
- def apply(namespace: String, scope: NamespaceBinding, key: String): Seq[Node] = {
- if (key == this.key && scope.getURI(pre) == namespace)
- value
- else
- next(namespace, scope, key)
- }
-}
-
-object PrefixedAttribute {
- def unapply(x: PrefixedAttribute) = Some((x.pre, x.key, x.value, x.next))
-}
diff --git a/src/xml/scala/xml/PrettyPrinter.scala b/src/xml/scala/xml/PrettyPrinter.scala
deleted file mode 100755
index 9e01905357..0000000000
--- a/src/xml/scala/xml/PrettyPrinter.scala
+++ /dev/null
@@ -1,263 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala
-package xml
-
-import Utility.sbToString
-
-/** Class for pretty printing. After instantiating, you can use the
- * format() and formatNode() methods to convert XML to a formatted
- * string. The class can be reused to pretty print any number of
- * XML nodes.
- *
- * @author Burak Emir
- * @version 1.0
- *
- * @param width the width to fit the output into
- * @param step indentation
- */
-class PrettyPrinter(width: Int, step: Int) {
-
- class BrokenException() extends java.lang.Exception
-
- class Item
- case object Break extends Item {
- override def toString() = "\\"
- }
- case class Box(col: Int, s: String) extends Item
- case class Para(s: String) extends Item
-
- protected var items: List[Item] = Nil
-
- protected var cur = 0
-
- protected def reset() = {
- cur = 0
- items = Nil
- }
-
- /** Try to cut at whitespace.
- */
- protected def cut(s: String, ind: Int): List[Item] = {
- val tmp = width - cur
- if (s.length <= tmp)
- return List(Box(ind, s))
- var i = s indexOf ' '
- if (i > tmp || i == -1) throw new BrokenException() // cannot break
-
- var last: List[Int] = Nil
- while (i != -1 && i < tmp) {
- last = i::last
- i = s.indexOf(' ', i+1)
- }
- var res: List[Item] = Nil
- while (Nil != last) try {
- val b = Box(ind, s.substring(0, last.head))
- cur = ind
- res = b :: Break :: cut(s.substring(last.head, s.length), ind)
- // backtrack
- last = last.tail
- } catch {
- case _:BrokenException => last = last.tail
- }
- throw new BrokenException()
- }
-
- /** Try to make indented box, if possible, else para.
- */
- protected def makeBox(ind: Int, s: String) =
- if (cur + s.length > width) { // fits in this line
- items ::= Box(ind, s)
- cur += s.length
- }
- else try cut(s, ind) foreach (items ::= _) // break it up
- catch { case _: BrokenException => makePara(ind, s) } // give up, para
-
- // dont respect indent in para, but afterwards
- protected def makePara(ind: Int, s: String) = {
- items = Break::Para(s)::Break::items
- cur = ind
- }
-
- // respect indent
- protected def makeBreak() = { // using wrapping here...
- items = Break :: items
- cur = 0
- }
-
- protected def leafTag(n: Node) = {
- def mkLeaf(sb: StringBuilder) {
- sb append '<'
- n nameToString sb
- n.attributes buildString sb
- sb append "/>"
- }
- sbToString(mkLeaf)
- }
-
- protected def startTag(n: Node, pscope: NamespaceBinding): (String, Int) = {
- var i = 0
- def mkStart(sb: StringBuilder) {
- sb append '<'
- n nameToString sb
- i = sb.length + 1
- n.attributes buildString sb
- n.scope.buildString(sb, pscope)
- sb append '>'
- }
- (sbToString(mkStart), i)
- }
-
- protected def endTag(n: Node) = {
- def mkEnd(sb: StringBuilder) {
- sb append "</"
- n nameToString sb
- sb append '>'
- }
- sbToString(mkEnd)
- }
-
- protected def childrenAreLeaves(n: Node): Boolean = {
- def isLeaf(l: Node) = l match {
- case _:Atom[_] | _:Comment | _:EntityRef | _:ProcInstr => true
- case _ => false
- }
- n.child forall isLeaf
- }
-
- protected def fits(test: String) =
- test.length < width - cur
-
- private def doPreserve(node: Node) =
- node.attribute(XML.namespace, XML.space).map(_.toString == XML.preserve) getOrElse false
-
- protected def traverse(node: Node, pscope: NamespaceBinding, ind: Int): Unit = node match {
-
- case Text(s) if s.trim() == "" =>
- ;
- case _:Atom[_] | _:Comment | _:EntityRef | _:ProcInstr =>
- makeBox( ind, node.toString().trim() )
- case g @ Group(xs) =>
- traverse(xs.iterator, pscope, ind)
- case _ =>
- val test = {
- val sb = new StringBuilder()
- Utility.serialize(node, pscope, sb, stripComments = false)
- if (doPreserve(node)) sb.toString
- else TextBuffer.fromString(sb.toString).toText(0).data
- }
- if (childrenAreLeaves(node) && fits(test)) {
- makeBox(ind, test)
- } else {
- val (stg, len2) = startTag(node, pscope)
- val etg = endTag(node)
- if (stg.length < width - cur) { // start tag fits
- makeBox(ind, stg)
- makeBreak()
- traverse(node.child.iterator, node.scope, ind + step)
- makeBox(ind, etg)
- } else if (len2 < width - cur) {
- // <start label + attrs + tag + content + end tag
- makeBox(ind, stg.substring(0, len2))
- makeBreak() // todo: break the rest in pieces
- /*{ //@todo
- val sq:Seq[String] = stg.split(" ");
- val it = sq.iterator;
- it.next;
- for (c <- it) {
- makeBox(ind+len2-2, c)
- makeBreak()
- }
- }*/
- makeBox(ind, stg.substring(len2, stg.length))
- makeBreak()
- traverse(node.child.iterator, node.scope, ind + step)
- makeBox(cur, etg)
- makeBreak()
- } else { // give up
- makeBox(ind, test)
- makeBreak()
- }
- }
- }
-
- protected def traverse(it: Iterator[Node], scope: NamespaceBinding, ind: Int ): Unit =
- for (c <- it) {
- traverse(c, scope, ind)
- makeBreak()
- }
-
- /** Appends a formatted string containing well-formed XML with
- * given namespace to prefix mapping to the given string buffer.
- *
- * @param n the node to be serialized
- * @param sb the stringbuffer to append to
- */
- def format(n: Node, sb: StringBuilder) { // entry point
- format(n, null, sb)
- }
-
- def format(n: Node, pscope: NamespaceBinding, sb: StringBuilder) { // entry point
- var lastwasbreak = false
- reset()
- traverse(n, pscope, 0)
- var cur = 0
- for (b <- items.reverse) b match {
- case Break =>
- if (!lastwasbreak) sb.append('\n') // on windows: \r\n ?
- lastwasbreak = true
- cur = 0
-// while (cur < last) {
-// sb append ' '
-// cur += 1
-// }
-
- case Box(i, s) =>
- lastwasbreak = false
- while (cur < i) {
- sb append ' '
- cur += 1
- }
- sb.append(s)
- case Para( s ) =>
- lastwasbreak = false
- sb append s
- }
- }
-
- // public convenience methods
-
- /** Returns a formatted string containing well-formed XML with
- * given namespace to prefix mapping.
- *
- * @param n the node to be serialized
- * @param pscope the namespace to prefix mapping
- * @return the formatted string
- */
- def format(n: Node, pscope: NamespaceBinding = null): String =
- sbToString(format(n, pscope, _))
-
- /** Returns a formatted string containing well-formed XML.
- *
- * @param nodes the sequence of nodes to be serialized
- * @param pscope the namespace to prefix mapping
- */
- def formatNodes(nodes: Seq[Node], pscope: NamespaceBinding = null): String =
- sbToString(formatNodes(nodes, pscope, _))
-
- /** Appends a formatted string containing well-formed XML with
- * the given namespace to prefix mapping to the given stringbuffer.
- *
- * @param nodes the nodes to be serialized
- * @param pscope the namespace to prefix mapping
- * @param sb the string buffer to which to append to
- */
- def formatNodes(nodes: Seq[Node], pscope: NamespaceBinding, sb: StringBuilder): Unit =
- nodes foreach (n => sb append format(n, pscope))
-}
diff --git a/src/xml/scala/xml/ProcInstr.scala b/src/xml/scala/xml/ProcInstr.scala
deleted file mode 100644
index 189c1c6878..0000000000
--- a/src/xml/scala/xml/ProcInstr.scala
+++ /dev/null
@@ -1,39 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-package scala
-package xml
-
-/** an XML node for processing instructions (PI)
- *
- * @author Burak Emir
- * @param target target name of this PI
- * @param proctext text contained in this node, may not contain "?>"
- */
-case class ProcInstr(target: String, proctext: String) extends SpecialNode
-{
- if (!Utility.isName(target))
- throw new IllegalArgumentException(target+" must be an XML Name")
- if (proctext contains "?>")
- throw new IllegalArgumentException(proctext+" may not contain \"?>\"")
- if (target.toLowerCase == "xml")
- throw new IllegalArgumentException(target+" is reserved")
-
- final override def doCollectNamespaces = false
- final override def doTransform = false
-
- final def label = "#PI"
- override def text = ""
-
- /** appends &quot;&lt;?&quot; target (&quot; &quot;+text)?+&quot;?&gt;&quot;
- * to this stringbuffer.
- */
- override def buildString(sb: StringBuilder) =
- sb append "<?%s%s?>".format(target, (if (proctext == "") "" else " " + proctext))
-}
diff --git a/src/xml/scala/xml/QNode.scala b/src/xml/scala/xml/QNode.scala
deleted file mode 100644
index f9e3f1854b..0000000000
--- a/src/xml/scala/xml/QNode.scala
+++ /dev/null
@@ -1,20 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala
-package xml
-
-/** This object provides an extractor method to match a qualified node with
- * its namespace URI
- *
- * @author Burak Emir
- * @version 1.0
- */
-object QNode {
- def unapplySeq(n: Node) = Some((n.scope.getURI(n.prefix), n.label, n.attributes, n.child))
-}
diff --git a/src/xml/scala/xml/SpecialNode.scala b/src/xml/scala/xml/SpecialNode.scala
deleted file mode 100644
index 5fef8ef66c..0000000000
--- a/src/xml/scala/xml/SpecialNode.scala
+++ /dev/null
@@ -1,33 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala
-package xml
-
-/** `SpecialNode` is a special XML node which represents either text
- * `(PCDATA)`, a comment, a `PI`, or an entity ref.
- *
- * `SpecialNode`s also play the role of [[scala.xml.pull.XMLEvent]]s for
- * pull-parsing.
- *
- * @author Burak Emir
- */
-abstract class SpecialNode extends Node with pull.XMLEvent {
-
- /** always empty */
- final override def attributes = Null
-
- /** always Node.EmptyNamespace */
- final override def namespace = null
-
- /** always empty */
- final def child = Nil
-
- /** Append string representation to the given string buffer argument. */
- def buildString(sb: StringBuilder): StringBuilder
-}
diff --git a/src/xml/scala/xml/Text.scala b/src/xml/scala/xml/Text.scala
deleted file mode 100644
index debea0c025..0000000000
--- a/src/xml/scala/xml/Text.scala
+++ /dev/null
@@ -1,39 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala
-package xml
-
-/** The class `Text` implements an XML node for text (PCDATA).
- * It is used in both non-bound and bound XML representations.
- *
- * @author Burak Emir
- * @param data the text contained in this node, may not be null.
- */
-class Text(data: String) extends Atom[String](data) {
-
- /** Returns text, with some characters escaped according to the XML
- * specification.
- */
- override def buildString(sb: StringBuilder): StringBuilder =
- Utility.escape(data, sb)
-}
-
-/** This singleton object contains the `apply`and `unapply` methods for
- * convenient construction and deconstruction.
- *
- * @author Burak Emir
- * @version 1.0
- */
-object Text {
- def apply(data: String) = new Text(data)
- def unapply(other: Any): Option[String] = other match {
- case x: Text => Some(x.data)
- case _ => None
- }
-}
diff --git a/src/xml/scala/xml/TextBuffer.scala b/src/xml/scala/xml/TextBuffer.scala
deleted file mode 100644
index 514b1701af..0000000000
--- a/src/xml/scala/xml/TextBuffer.scala
+++ /dev/null
@@ -1,46 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-package scala
-package xml
-
-import Utility.isSpace
-
-object TextBuffer {
- def fromString(str: String): TextBuffer = new TextBuffer() append str
-}
-
-/** The class `TextBuffer` is for creating text nodes without surplus
- * whitespace. All occurrences of one or more whitespace in strings
- * appended with the `append` method will be replaced by a single space
- * character, and leading and trailing space will be removed completely.
- */
-class TextBuffer
-{
- val sb = new StringBuilder()
-
- /** Appends this string to the text buffer, trimming whitespaces as needed.
- */
- def append(cs: Seq[Char]): this.type = {
- cs foreach { c =>
- if (!isSpace(c)) sb append c
- else if (sb.isEmpty || !isSpace(sb.last)) sb append ' '
- }
- this
- }
-
- /** Returns an empty sequence if text is only whitespace.
- *
- * @return the text without whitespaces.
- */
- def toText: Seq[Text] = sb.toString.trim match {
- case "" => Nil
- case s => Seq(Text(s))
- }
-}
diff --git a/src/xml/scala/xml/TopScope.scala b/src/xml/scala/xml/TopScope.scala
deleted file mode 100644
index 474fbbbdb5..0000000000
--- a/src/xml/scala/xml/TopScope.scala
+++ /dev/null
@@ -1,31 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-package scala
-package xml
-
-/** top level namespace scope. only contains the predefined binding
- * for the &quot;xml&quot; prefix which is bound to
- * &quot;http://www.w3.org/XML/1998/namespace&quot;
- */
-object TopScope extends NamespaceBinding(null, null, null) {
-
- import XML.{ xml, namespace }
-
- override def getURI(prefix1: String): String =
- if (prefix1 == xml) namespace else null
-
- override def getPrefix(uri1: String): String =
- if (uri1 == namespace) xml else null
-
- override def toString() = ""
-
- override def buildString(stop: NamespaceBinding) = ""
- override def buildString(sb: StringBuilder, ignore: NamespaceBinding) = {}
-}
diff --git a/src/xml/scala/xml/TypeSymbol.scala b/src/xml/scala/xml/TypeSymbol.scala
deleted file mode 100644
index fb371ee340..0000000000
--- a/src/xml/scala/xml/TypeSymbol.scala
+++ /dev/null
@@ -1,15 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala
-package xml
-
-
-abstract class TypeSymbol
diff --git a/src/xml/scala/xml/Unparsed.scala b/src/xml/scala/xml/Unparsed.scala
deleted file mode 100644
index bc190eb724..0000000000
--- a/src/xml/scala/xml/Unparsed.scala
+++ /dev/null
@@ -1,36 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala
-package xml
-
-/** An XML node for unparsed content. It will be output verbatim, all bets
- * are off regarding wellformedness etc.
- *
- * @author Burak Emir
- * @param data content in this node, may not be null.
- */
-class Unparsed(data: String) extends Atom[String](data) {
-
- /** Returns text, with some characters escaped according to XML
- * specification.
- */
- override def buildString(sb: StringBuilder): StringBuilder =
- sb append data
-}
-
-/** This singleton object contains the `apply`and `unapply` methods for
- * convenient construction and deconstruction.
- *
- * @author Burak Emir
- * @version 1.0
- */
-object Unparsed {
- def apply(data: String) = new Unparsed(data)
- def unapply(x: Unparsed) = Some(x.data)
-}
diff --git a/src/xml/scala/xml/UnprefixedAttribute.scala b/src/xml/scala/xml/UnprefixedAttribute.scala
deleted file mode 100644
index 6fa827da5f..0000000000
--- a/src/xml/scala/xml/UnprefixedAttribute.scala
+++ /dev/null
@@ -1,61 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-package scala
-package xml
-
-/** Unprefixed attributes have the null namespace, and no prefix field
- *
- * @author Burak Emir
- */
-class UnprefixedAttribute(
- val key: String,
- val value: Seq[Node],
- next1: MetaData)
-extends Attribute
-{
- final val pre = null
- val next = if (value ne null) next1 else next1.remove(key)
-
- /** same as this(key, Text(value), next), or no attribute if value is null */
- def this(key: String, value: String, next: MetaData) =
- this(key, if (value ne null) Text(value) else null: NodeSeq, next)
-
- /** same as this(key, value.get, next), or no attribute if value is None */
- def this(key: String, value: Option[Seq[Node]], next: MetaData) =
- this(key, value.orNull, next)
-
- /** returns a copy of this unprefixed attribute with the given next field*/
- def copy(next: MetaData) = new UnprefixedAttribute(key, value, next)
-
- final def getNamespace(owner: Node): String = null
-
- /**
- * Gets value of unqualified (unprefixed) attribute with given key, null if not found
- *
- * @param key
- * @return value as Seq[Node] if key is found, null otherwise
- */
- def apply(key: String): Seq[Node] =
- if (key == this.key) value else next(key)
-
- /**
- * Forwards the call to next (because caller looks for prefixed attribute).
- *
- * @param namespace
- * @param scope
- * @param key
- * @return ..
- */
- def apply(namespace: String, scope: NamespaceBinding, key: String): Seq[Node] =
- next(namespace, scope, key)
-}
-object UnprefixedAttribute {
- def unapply(x: UnprefixedAttribute) = Some((x.key, x.value, x.next))
-}
diff --git a/src/xml/scala/xml/Utility.scala b/src/xml/scala/xml/Utility.scala
deleted file mode 100755
index 9134476401..0000000000
--- a/src/xml/scala/xml/Utility.scala
+++ /dev/null
@@ -1,410 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala
-package xml
-
-import scala.collection.mutable
-import parsing.XhtmlEntities
-import scala.language.implicitConversions
-
-/**
- * The `Utility` object provides utility functions for processing instances
- * of bound and not bound XML classes, as well as escaping text nodes.
- *
- * @author Burak Emir
- */
-object Utility extends AnyRef with parsing.TokenTests {
- final val SU = '\u001A'
-
- // [Martin] This looks dubious. We don't convert StringBuilders to
- // Strings anywhere else, why do it here?
- implicit def implicitSbToString(sb: StringBuilder) = sb.toString()
-
- // helper for the extremely oft-repeated sequence of creating a
- // StringBuilder, passing it around, and then grabbing its String.
- private [xml] def sbToString(f: (StringBuilder) => Unit): String = {
- val sb = new StringBuilder
- f(sb)
- sb.toString
- }
- private[xml] def isAtomAndNotText(x: Node) = x.isAtom && !x.isInstanceOf[Text]
-
- /** Trims an element - call this method, when you know that it is an
- * element (and not a text node) so you know that it will not be trimmed
- * away. With this assumption, the function can return a `Node`, rather
- * than a `Seq[Node]`. If you don't know, call `trimProper` and account
- * for the fact that you may get back an empty sequence of nodes.
- *
- * Precondition: node is not a text node (it might be trimmed)
- */
- def trim(x: Node): Node = x match {
- case Elem(pre, lab, md, scp, child@_*) =>
- Elem(pre, lab, md, scp, (child flatMap trimProper):_*)
- }
-
- /** trim a child of an element. `Attribute` values and `Atom` nodes that
- * are not `Text` nodes are unaffected.
- */
- def trimProper(x:Node): Seq[Node] = x match {
- case Elem(pre,lab,md,scp,child@_*) =>
- Elem(pre,lab,md,scp, (child flatMap trimProper):_*)
- case Text(s) =>
- new TextBuffer().append(s).toText
- case _ =>
- x
- }
-
- /** returns a sorted attribute list */
- def sort(md: MetaData): MetaData = if((md eq Null) || (md.next eq Null)) md else {
- val key = md.key
- val smaller = sort(md.filter { m => m.key < key })
- val greater = sort(md.filter { m => m.key > key })
- smaller.foldRight (md copy greater) ((x, xs) => x copy xs)
- }
-
- /** Return the node with its attribute list sorted alphabetically
- * (prefixes are ignored) */
- def sort(n:Node): Node = n match {
- case Elem(pre,lab,md,scp,child@_*) =>
- Elem(pre,lab,sort(md),scp, (child map sort):_*)
- case _ => n
- }
-
- /**
- * Escapes the characters &lt; &gt; &amp; and &quot; from string.
- */
- final def escape(text: String): String = sbToString(escape(text, _))
-
- object Escapes {
- /** For reasons unclear escape and unescape are a long ways from
- * being logical inverses. */
- val pairs = Map(
- "lt" -> '<',
- "gt" -> '>',
- "amp" -> '&',
- "quot" -> '"'
- // enigmatic comment explaining why this isn't escaped --
- // is valid xhtml but not html, and IE doesn't know it, says jweb
- // "apos" -> '\''
- )
- val escMap = pairs map { case (s, c) => c-> ("&%s;" format s) }
- val unescMap = pairs ++ Map("apos" -> '\'')
- }
- import Escapes.{ escMap, unescMap }
-
- /**
- * Appends escaped string to `s`.
- */
- final def escape(text: String, s: StringBuilder): StringBuilder = {
- // Implemented per XML spec:
- // http://www.w3.org/International/questions/qa-controls
- // imperative code 3x-4x faster than current implementation
- // dpp (David Pollak) 2010/02/03
- val len = text.length
- var pos = 0
- while (pos < len) {
- text.charAt(pos) match {
- case '<' => s.append("&lt;")
- case '>' => s.append("&gt;")
- case '&' => s.append("&amp;")
- case '"' => s.append("&quot;")
- case '\n' => s.append('\n')
- case '\r' => s.append('\r')
- case '\t' => s.append('\t')
- case c => if (c >= ' ') s.append(c)
- }
-
- pos += 1
- }
- s
- }
-
- /**
- * Appends unescaped string to `s`, `amp` becomes `&amp;`,
- * `lt` becomes `&lt;` etc..
- *
- * @return `'''null'''` if `ref` was not a predefined entity.
- */
- final def unescape(ref: String, s: StringBuilder): StringBuilder =
- ((unescMap get ref) map (s append _)).orNull
-
- /**
- * Returns a set of all namespaces used in a sequence of nodes
- * and all their descendants, including the empty namespaces.
- */
- def collectNamespaces(nodes: Seq[Node]): mutable.Set[String] =
- nodes.foldLeft(new mutable.HashSet[String]) { (set, x) => collectNamespaces(x, set) ; set }
-
- /**
- * Adds all namespaces in node to set.
- */
- def collectNamespaces(n: Node, set: mutable.Set[String]) {
- if (n.doCollectNamespaces) {
- set += n.namespace
- for (a <- n.attributes) a match {
- case _:PrefixedAttribute =>
- set += a.getNamespace(n)
- case _ =>
- }
- for (i <- n.child)
- collectNamespaces(i, set)
- }
- }
-
- // def toXML(
- // x: Node,
- // pscope: NamespaceBinding = TopScope,
- // sb: StringBuilder = new StringBuilder,
- // stripComments: Boolean = false,
- // decodeEntities: Boolean = true,
- // preserveWhitespace: Boolean = false,
- // minimizeTags: Boolean = false): String =
- // {
- // toXMLsb(x, pscope, sb, stripComments, decodeEntities, preserveWhitespace, minimizeTags)
- // sb.toString()
- // }
-
- /**
- * Serialize the provided Node to the provided StringBuilder.
- * <p/>
- * Note that calling this source-compatible method will result in the same old, arguably almost universally unwanted,
- * behaviour.
- */
- @deprecated("Please use `serialize` instead and specify a `minimizeTags` parameter", "2.10.0")
- def toXML(
- x: Node,
- pscope: NamespaceBinding = TopScope,
- sb: StringBuilder = new StringBuilder,
- stripComments: Boolean = false,
- decodeEntities: Boolean = true,
- preserveWhitespace: Boolean = false,
- minimizeTags: Boolean = false): StringBuilder =
- {
- serialize(x, pscope, sb, stripComments, decodeEntities, preserveWhitespace, if (minimizeTags) MinimizeMode.Always else MinimizeMode.Never)
- }
-
- /**
- * Serialize an XML Node to a StringBuilder.
- *
- * This is essentially a minor rework of `toXML` that can't have the same name due to an unfortunate
- * combination of named/default arguments and overloading.
- *
- * @todo use a Writer instead
- */
- def serialize(
- x: Node,
- pscope: NamespaceBinding = TopScope,
- sb: StringBuilder = new StringBuilder,
- stripComments: Boolean = false,
- decodeEntities: Boolean = true,
- preserveWhitespace: Boolean = false,
- minimizeTags: MinimizeMode.Value = MinimizeMode.Default): StringBuilder =
- {
- x match {
- case c: Comment if !stripComments => c buildString sb
- case s: SpecialNode => s buildString sb
- case g: Group => for (c <- g.nodes) serialize(c, g.scope, sb, minimizeTags = minimizeTags) ; sb
- case el: Elem =>
- // print tag with namespace declarations
- sb.append('<')
- el.nameToString(sb)
- if (el.attributes ne null) el.attributes.buildString(sb)
- el.scope.buildString(sb, pscope)
- if (el.child.isEmpty &&
- (minimizeTags == MinimizeMode.Always ||
- (minimizeTags == MinimizeMode.Default && el.minimizeEmpty)))
- {
- // no children, so use short form: <xyz .../>
- sb.append("/>")
- } else {
- // children, so use long form: <xyz ...>...</xyz>
- sb.append('>')
- sequenceToXML(el.child, el.scope, sb, stripComments)
- sb.append("</")
- el.nameToString(sb)
- sb.append('>')
- }
- case _ => throw new IllegalArgumentException("Don't know how to serialize a " + x.getClass.getName)
- }
- }
-
- def sequenceToXML(
- children: Seq[Node],
- pscope: NamespaceBinding = TopScope,
- sb: StringBuilder = new StringBuilder,
- stripComments: Boolean = false,
- decodeEntities: Boolean = true,
- preserveWhitespace: Boolean = false,
- minimizeTags: MinimizeMode.Value = MinimizeMode.Default): Unit =
- {
- if (children.isEmpty) return
- else if (children forall isAtomAndNotText) { // add space
- val it = children.iterator
- val f = it.next()
- serialize(f, pscope, sb, stripComments, decodeEntities, preserveWhitespace, minimizeTags)
- while (it.hasNext) {
- val x = it.next()
- sb.append(' ')
- serialize(x, pscope, sb, stripComments, decodeEntities, preserveWhitespace, minimizeTags)
- }
- }
- else children foreach { serialize(_, pscope, sb, stripComments, decodeEntities, preserveWhitespace, minimizeTags) }
- }
-
- /**
- * Returns prefix of qualified name if any.
- */
- final def prefix(name: String): Option[String] = (name indexOf ':') match {
- case -1 => None
- case i => Some(name.substring(0, i))
- }
-
- /**
- * Returns a hashcode for the given constituents of a node
- */
- def hashCode(pre: String, label: String, attribHashCode: Int, scpeHash: Int, children: Seq[Node]) =
- scala.util.hashing.MurmurHash3.orderedHash(label +: attribHashCode +: scpeHash +: children, pre.##)
-
- def appendQuoted(s: String): String = sbToString(appendQuoted(s, _))
-
- /**
- * Appends &quot;s&quot; if string `s` does not contain &quot;,
- * &apos;s&apos; otherwise.
- */
- def appendQuoted(s: String, sb: StringBuilder) = {
- val ch = if (s contains '"') '\'' else '"'
- sb.append(ch).append(s).append(ch)
- }
-
- /**
- * Appends &quot;s&quot; and escapes and &quot; i s with \&quot;
- */
- def appendEscapedQuoted(s: String, sb: StringBuilder): StringBuilder = {
- sb.append('"')
- for (c <- s) c match {
- case '"' => sb.append('\\'); sb.append('"')
- case _ => sb.append(c)
- }
- sb.append('"')
- }
-
- def getName(s: String, index: Int): String = {
- if (index >= s.length) null
- else {
- val xs = s drop index
- if (xs.nonEmpty && isNameStart(xs.head)) xs takeWhile isNameChar
- else ""
- }
- }
-
- /**
- * Returns `'''null'''` if the value is a correct attribute value,
- * error message if it isn't.
- */
- def checkAttributeValue(value: String): String = {
- var i = 0
- while (i < value.length) {
- value.charAt(i) match {
- case '<' =>
- return "< not allowed in attribute value"
- case '&' =>
- val n = getName(value, i+1)
- if (n eq null)
- return "malformed entity reference in attribute value ["+value+"]"
- i = i + n.length + 1
- if (i >= value.length || value.charAt(i) != ';')
- return "malformed entity reference in attribute value ["+value+"]"
- case _ =>
- }
- i = i + 1
- }
- null
- }
-
- def parseAttributeValue(value: String): Seq[Node] = {
- val sb = new StringBuilder
- var rfb: StringBuilder = null
- val nb = new NodeBuffer()
-
- val it = value.iterator
- while (it.hasNext) {
- var c = it.next()
- // entity! flush buffer into text node
- if (c == '&') {
- c = it.next()
- if (c == '#') {
- c = it.next()
- val theChar = parseCharRef ({ ()=> c },{ () => c = it.next() },{s => throw new RuntimeException(s)}, {s => throw new RuntimeException(s)})
- sb.append(theChar)
- }
- else {
- if (rfb eq null) rfb = new StringBuilder()
- rfb append c
- c = it.next()
- while (c != ';') {
- rfb.append(c)
- c = it.next()
- }
- val ref = rfb.toString()
- rfb.clear()
- unescape(ref,sb) match {
- case null =>
- if (sb.length > 0) { // flush buffer
- nb += Text(sb.toString())
- sb.clear()
- }
- nb += EntityRef(ref) // add entityref
- case _ =>
- }
- }
- }
- else sb append c
- }
- if (sb.length > 0) { // flush buffer
- val x = Text(sb.toString())
- if (nb.length == 0)
- return x
- else
- nb += x
- }
- nb
- }
-
- /**
- * {{{
- * CharRef ::= "&amp;#" '0'..'9' {'0'..'9'} ";"
- * | "&amp;#x" '0'..'9'|'A'..'F'|'a'..'f' { hexdigit } ";"
- * }}}
- * See [66]
- */
- def parseCharRef(ch: () => Char, nextch: () => Unit, reportSyntaxError: String => Unit, reportTruncatedError: String => Unit): String = {
- val hex = (ch() == 'x') && { nextch(); true }
- val base = if (hex) 16 else 10
- var i = 0
- while (ch() != ';') {
- ch() match {
- case '0' | '1' | '2' | '3' | '4' | '5' | '6' | '7' | '8' | '9' =>
- i = i * base + ch().asDigit
- case 'a' | 'b' | 'c' | 'd' | 'e' | 'f'
- | 'A' | 'B' | 'C' | 'D' | 'E' | 'F' =>
- if (! hex)
- reportSyntaxError("hex char not allowed in decimal char ref\n" +
- "Did you mean to write &#x ?")
- else
- i = i * base + ch().asDigit
- case SU =>
- reportTruncatedError("")
- case _ =>
- reportSyntaxError("character '" + ch() + "' not allowed in char ref\n")
- }
- nextch()
- }
- new String(Array(i), 0, 1)
- }
-}
diff --git a/src/xml/scala/xml/XML.scala b/src/xml/scala/xml/XML.scala
deleted file mode 100755
index 020264e509..0000000000
--- a/src/xml/scala/xml/XML.scala
+++ /dev/null
@@ -1,109 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala
-package xml
-
-import parsing.NoBindingFactoryAdapter
-import factory.XMLLoader
-import java.io.{ File, FileDescriptor, FileInputStream, FileOutputStream }
-import java.io.{ InputStream, Reader, StringReader, Writer }
-import java.nio.channels.Channels
-import scala.util.control.Exception.ultimately
-
-object Source {
- def fromFile(file: File) = new InputSource(new FileInputStream(file))
- def fromFile(fd: FileDescriptor) = new InputSource(new FileInputStream(fd))
- def fromFile(name: String) = new InputSource(new FileInputStream(name))
-
- def fromInputStream(is: InputStream) = new InputSource(is)
- def fromReader(reader: Reader) = new InputSource(reader)
- def fromSysId(sysID: String) = new InputSource(sysID)
- def fromString(string: String) = fromReader(new StringReader(string))
-}
-
-/**
- * Governs how empty elements (i.e. those without child elements) should be serialized.
- */
-object MinimizeMode extends Enumeration {
- /** Minimize empty tags if they were originally empty when parsed, or if they were constructed
- * with [[scala.xml.Elem]]`#minimizeEmpty` == true
- */
- val Default = Value
-
- /** Always minimize empty tags. Note that this may be problematic for XHTML, in which
- * case [[scala.xml.Xhtml]]`#toXhtml` should be used instead.
- */
- val Always = Value
-
- /** Never minimize empty tags.
- */
- val Never = Value
-}
-
-/** The object `XML` provides constants, and functions to load
- * and save XML elements. Use this when data binding is not desired, i.e.
- * when XML is handled using `Symbol` nodes.
- *
- * @author Burak Emir
- * @version 1.0, 25/04/2005
- */
-object XML extends XMLLoader[Elem] {
- val xml = "xml"
- val xmlns = "xmlns"
- val namespace = "http://www.w3.org/XML/1998/namespace"
- val preserve = "preserve"
- val space = "space"
- val lang = "lang"
- val encoding = "ISO-8859-1"
-
- /** Returns an XMLLoader whose load* methods will use the supplied SAXParser. */
- def withSAXParser(p: SAXParser): XMLLoader[Elem] =
- new XMLLoader[Elem] { override val parser: SAXParser = p }
-
- /** Saves a node to a file with given filename using given encoding
- * optionally with xmldecl and doctype declaration.
- *
- * @param filename the filename
- * @param node the xml node we want to write
- * @param enc encoding to use
- * @param xmlDecl if true, write xml declaration
- * @param doctype if not null, write doctype declaration
- */
- final def save(
- filename: String,
- node: Node,
- enc: String = encoding,
- xmlDecl: Boolean = false,
- doctype: dtd.DocType = null
- ): Unit =
- {
- val fos = new FileOutputStream(filename)
- val w = Channels.newWriter(fos.getChannel(), enc)
-
- ultimately(w.close())(
- write(w, node, enc, xmlDecl, doctype)
- )
- }
-
- /** Writes the given node using writer, optionally with xml decl and doctype.
- * It's the caller's responsibility to close the writer.
- *
- * @param w the writer
- * @param node the xml node we want to write
- * @param enc the string to be used in `xmlDecl`
- * @param xmlDecl if true, write xml declaration
- * @param doctype if not null, write doctype declaration
- */
- final def write(w: java.io.Writer, node: Node, enc: String, xmlDecl: Boolean, doctype: dtd.DocType, minimizeTags: MinimizeMode.Value = MinimizeMode.Default) {
- /* TODO: optimize by giving writer parameter to toXML*/
- if (xmlDecl) w.write("<?xml version='1.0' encoding='" + enc + "'?>\n")
- if (doctype ne null) w.write( doctype.toString() + "\n")
- w.write(Utility.serialize(node, minimizeTags = minimizeTags).toString)
- }
-}
diff --git a/src/xml/scala/xml/Xhtml.scala b/src/xml/scala/xml/Xhtml.scala
deleted file mode 100644
index 6a12c1a89a..0000000000
--- a/src/xml/scala/xml/Xhtml.scala
+++ /dev/null
@@ -1,97 +0,0 @@
-
-package scala
-package xml
-
-import parsing.XhtmlEntities
-import Utility.{ sbToString, isAtomAndNotText }
-
-/* (c) David Pollak 2007 WorldWide Conferencing, LLC */
-
-object Xhtml
-{
- /**
- * Convenience function: same as toXhtml(node, false, false)
- *
- * @param node the node
- */
- def toXhtml(node: Node): String = sbToString(sb => toXhtml(x = node, sb = sb))
-
- /**
- * Convenience function: amounts to calling toXhtml(node) on each
- * node in the sequence.
- *
- * @param nodeSeq the node sequence
- */
- def toXhtml(nodeSeq: NodeSeq): String = sbToString(sb => sequenceToXML(nodeSeq: Seq[Node], sb = sb))
-
- /** Elements which we believe are safe to minimize if minimizeTags is true.
- * See http://www.w3.org/TR/xhtml1/guidelines.html#C_3
- */
- private val minimizableElements =
- List("base", "meta", "link", "hr", "br", "param", "img", "area", "input", "col")
-
- def toXhtml(
- x: Node,
- pscope: NamespaceBinding = TopScope,
- sb: StringBuilder = new StringBuilder,
- stripComments: Boolean = false,
- decodeEntities: Boolean = false,
- preserveWhitespace: Boolean = false,
- minimizeTags: Boolean = true): Unit =
- {
- def decode(er: EntityRef) = XhtmlEntities.entMap.get(er.entityName) match {
- case Some(chr) if chr.toInt >= 128 => sb.append(chr)
- case _ => er.buildString(sb)
- }
- def shortForm =
- minimizeTags &&
- (x.child == null || x.child.length == 0) &&
- (minimizableElements contains x.label)
-
- x match {
- case c: Comment => if (!stripComments) c buildString sb
- case er: EntityRef if decodeEntities => decode(er)
- case x: SpecialNode => x buildString sb
- case g: Group =>
- g.nodes foreach { toXhtml(_, x.scope, sb, stripComments, decodeEntities, preserveWhitespace, minimizeTags) }
-
- case _ =>
- sb.append('<')
- x.nameToString(sb)
- if (x.attributes ne null) x.attributes.buildString(sb)
- x.scope.buildString(sb, pscope)
-
- if (shortForm) sb.append(" />")
- else {
- sb.append('>')
- sequenceToXML(x.child, x.scope, sb, stripComments, decodeEntities, preserveWhitespace, minimizeTags)
- sb.append("</")
- x.nameToString(sb)
- sb.append('>')
- }
- }
- }
-
- /**
- * Amounts to calling toXhtml(node, ...) with the given parameters on each node.
- */
- def sequenceToXML(
- children: Seq[Node],
- pscope: NamespaceBinding = TopScope,
- sb: StringBuilder = new StringBuilder,
- stripComments: Boolean = false,
- decodeEntities: Boolean = false,
- preserveWhitespace: Boolean = false,
- minimizeTags: Boolean = true): Unit =
- {
- if (children.isEmpty)
- return
-
- val doSpaces = children forall isAtomAndNotText // interleave spaces
- for (c <- children.take(children.length - 1)) {
- toXhtml(c, pscope, sb, stripComments, decodeEntities, preserveWhitespace, minimizeTags)
- if (doSpaces) sb append ' '
- }
- toXhtml(children.last, pscope, sb, stripComments, decodeEntities, preserveWhitespace, minimizeTags)
- }
-}
diff --git a/src/xml/scala/xml/dtd/ContentModel.scala b/src/xml/scala/xml/dtd/ContentModel.scala
deleted file mode 100644
index 4007985dce..0000000000
--- a/src/xml/scala/xml/dtd/ContentModel.scala
+++ /dev/null
@@ -1,118 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala
-package xml
-package dtd
-
-import scala.xml.dtd.impl._
-import scala.xml.Utility.sbToString
-import PartialFunction._
-
-object ContentModel extends WordExp {
- type _labelT = ElemName
- type _regexpT = RegExp
-
- object Translator extends WordBerrySethi {
- override val lang: ContentModel.this.type = ContentModel.this
- }
-
- case class ElemName(name: String) extends Label {
- override def toString() = """ElemName("%s")""" format name
- }
-
- def isMixed(cm: ContentModel) = cond(cm) { case _: MIXED => true }
- def containsText(cm: ContentModel) = (cm == PCDATA) || isMixed(cm)
- def parse(s: String): ContentModel = ContentModelParser.parse(s)
-
- def getLabels(r: RegExp): Set[String] = {
- def traverse(r: RegExp): Set[String] = r match { // !!! check for match translation problem
- case Letter(ElemName(name)) => Set(name)
- case Star( x @ _ ) => traverse( x ) // bug if x@_*
- case Sequ( xs @ _* ) => Set(xs flatMap traverse: _*)
- case Alt( xs @ _* ) => Set(xs flatMap traverse: _*)
- }
-
- traverse(r)
- }
-
- def buildString(r: RegExp): String = sbToString(buildString(r, _))
-
- /* precond: rs.length >= 1 */
- private def buildString(rs: Seq[RegExp], sb: StringBuilder, sep: Char) {
- buildString(rs.head, sb)
- for (z <- rs.tail) {
- sb append sep
- buildString(z, sb)
- }
- }
-
- def buildString(c: ContentModel, sb: StringBuilder): StringBuilder = c match {
- case ANY => sb append "ANY"
- case EMPTY => sb append "EMPTY"
- case PCDATA => sb append "(#PCDATA)"
- case ELEMENTS(_) | MIXED(_) => c buildString sb
- }
-
- def buildString(r: RegExp, sb: StringBuilder): StringBuilder =
- r match { // !!! check for match translation problem
- case Eps =>
- sb
- case Sequ(rs @ _*) =>
- sb.append( '(' ); buildString(rs, sb, ','); sb.append( ')' )
- case Alt(rs @ _*) =>
- sb.append( '(' ); buildString(rs, sb, '|'); sb.append( ')' )
- case Star(r: RegExp) =>
- sb.append( '(' ); buildString(r, sb); sb.append( ")*" )
- case Letter(ElemName(name)) =>
- sb.append(name)
- }
-
-}
-
-sealed abstract class ContentModel
-{
- override def toString(): String = sbToString(buildString)
- def buildString(sb: StringBuilder): StringBuilder
-}
-
-case object PCDATA extends ContentModel {
- override def buildString(sb: StringBuilder): StringBuilder = sb.append("(#PCDATA)")
-}
-case object EMPTY extends ContentModel {
- override def buildString(sb: StringBuilder): StringBuilder = sb.append("EMPTY")
-}
-case object ANY extends ContentModel {
- override def buildString(sb: StringBuilder): StringBuilder = sb.append("ANY")
-}
-sealed abstract class DFAContentModel extends ContentModel {
- import ContentModel.{ ElemName, Translator }
- def r: ContentModel.RegExp
-
- lazy val dfa: DetWordAutom[ElemName] = {
- val nfa = Translator.automatonFrom(r, 1)
- new SubsetConstruction(nfa).determinize
- }
-}
-
-case class MIXED(r: ContentModel.RegExp) extends DFAContentModel {
- import ContentModel.{ Alt, RegExp }
-
- override def buildString(sb: StringBuilder): StringBuilder = {
- val newAlt = r match { case Alt(rs @ _*) => Alt(rs drop 1: _*) }
-
- sb append "(#PCDATA|"
- ContentModel.buildString(newAlt: RegExp, sb)
- sb append ")*"
- }
-}
-
-case class ELEMENTS(r: ContentModel.RegExp) extends DFAContentModel {
- override def buildString(sb: StringBuilder): StringBuilder =
- ContentModel.buildString(r, sb)
-}
diff --git a/src/xml/scala/xml/dtd/ContentModelParser.scala b/src/xml/scala/xml/dtd/ContentModelParser.scala
deleted file mode 100644
index 71b391c422..0000000000
--- a/src/xml/scala/xml/dtd/ContentModelParser.scala
+++ /dev/null
@@ -1,129 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://www.scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala
-package xml
-package dtd
-
-/** Parser for regexps (content models in DTD element declarations) */
-
-object ContentModelParser extends Scanner { // a bit too permissive concerning #PCDATA
- import ContentModel._
-
- /** parses the argument to a regexp */
- def parse(s: String): ContentModel = { initScanner(s); contentspec }
-
- def accept(tok: Int) = {
- if (token != tok) {
- if ((tok == STAR) && (token == END)) // common mistake
- scala.sys.error("in DTDs, \n"+
- "mixed content models must be like (#PCDATA|Name|Name|...)*")
- else
- scala.sys.error("expected "+token2string(tok)+
- ", got unexpected token:"+token2string(token))
- }
- nextToken()
- }
-
- // s [ '+' | '*' | '?' ]
- def maybeSuffix(s: RegExp) = token match {
- case STAR => nextToken(); Star(s)
- case PLUS => nextToken(); Sequ(s, Star(s))
- case OPT => nextToken(); Alt(Eps, s)
- case _ => s
- }
-
- // contentspec ::= EMPTY | ANY | (#PCDATA) | "(#PCDATA|"regexp)
-
- def contentspec: ContentModel = token match {
-
- case NAME => value match {
- case "ANY" => ANY
- case "EMPTY" => EMPTY
- case _ => scala.sys.error("expected ANY, EMPTY or '(' instead of " + value )
- }
- case LPAREN =>
-
- nextToken()
- sOpt()
- if (token != TOKEN_PCDATA)
- ELEMENTS(regexp)
- else {
- nextToken()
- token match {
- case RPAREN =>
- PCDATA
- case CHOICE =>
- val res = MIXED(choiceRest(Eps))
- sOpt()
- accept( RPAREN )
- accept( STAR )
- res
- case _ =>
- scala.sys.error("unexpected token:" + token2string(token) )
- }
- }
-
- case _ =>
- scala.sys.error("unexpected token:" + token2string(token) )
- }
- // sopt ::= S?
- def sOpt() = if( token == S ) nextToken()
-
- // (' S? mixed ::= '#PCDATA' S? ')'
- // | '#PCDATA' (S? '|' S? atom)* S? ')*'
-
- // '(' S? regexp ::= cp S? [seqRest|choiceRest] ')' [ '+' | '*' | '?' ]
- def regexp: RegExp = {
- val p = particle
- sOpt()
- maybeSuffix(token match {
- case RPAREN => nextToken(); p
- case CHOICE => val q = choiceRest( p );accept( RPAREN ); q
- case COMMA => val q = seqRest( p ); accept( RPAREN ); q
- })
- }
-
- // seqRest ::= (',' S? cp S?)+
- def seqRest(p: RegExp) = {
- var k = List(p)
- while( token == COMMA ) {
- nextToken()
- sOpt()
- k = particle::k
- sOpt()
- }
- Sequ( k.reverse:_* )
- }
-
- // choiceRest ::= ('|' S? cp S?)+
- def choiceRest( p:RegExp ) = {
- var k = List( p )
- while( token == CHOICE ) {
- nextToken()
- sOpt()
- k = particle::k
- sOpt()
- }
- Alt( k.reverse:_* )
- }
-
- // particle ::= '(' S? regexp
- // | name [ '+' | '*' | '?' ]
- def particle = token match {
- case LPAREN => nextToken(); sOpt(); regexp
- case NAME => val a = Letter(ElemName(value)); nextToken(); maybeSuffix(a)
- case _ => scala.sys.error("expected '(' or Name, got:"+token2string(token))
- }
-
- // atom ::= name
- def atom = token match {
- case NAME => val a = Letter(ElemName(value)); nextToken(); a
- case _ => scala.sys.error("expected Name, got:"+token2string(token))
- }
-}
diff --git a/src/xml/scala/xml/dtd/DTD.scala b/src/xml/scala/xml/dtd/DTD.scala
deleted file mode 100644
index 16a824fe2c..0000000000
--- a/src/xml/scala/xml/dtd/DTD.scala
+++ /dev/null
@@ -1,35 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-package scala
-package xml
-package dtd
-
-import scala.collection.mutable
-
-/** A document type declaration.
- *
- * @author Burak Emir
- */
-abstract class DTD {
- var externalID: ExternalID = null
- var decls: List[Decl] = Nil
- def notations: Seq[NotationDecl] = Nil
- def unparsedEntities: Seq[EntityDecl] = Nil
-
- var elem: mutable.Map[String, ElemDecl] = new mutable.HashMap[String, ElemDecl]()
- var attr: mutable.Map[String, AttListDecl] = new mutable.HashMap[String, AttListDecl]()
- var ent: mutable.Map[String, EntityDecl] = new mutable.HashMap[String, EntityDecl]()
-
- override def toString() =
- "DTD [\n%s%s]".format(
- Option(externalID) getOrElse "",
- decls.mkString("", "\n", "\n")
- )
-}
diff --git a/src/xml/scala/xml/dtd/Decl.scala b/src/xml/scala/xml/dtd/Decl.scala
deleted file mode 100644
index 8bf859c460..0000000000
--- a/src/xml/scala/xml/dtd/Decl.scala
+++ /dev/null
@@ -1,157 +0,0 @@
-/* __ *\
- ** ________ ___ / / ___ Scala API **
- ** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
- ** __\ \/ /__/ __ |/ /__/ __ | http://www.scala-lang.org/ **
- ** /____/\___/_/ |_/____/_/ | | **
- ** |/ **
- \* */
-
-package scala
-package xml
-package dtd
-
-import Utility.sbToString
-
-sealed abstract class Decl
-
-sealed abstract class MarkupDecl extends Decl {
- def buildString(sb: StringBuilder): StringBuilder
-}
-
-/** an element declaration
- */
-case class ElemDecl(name: String, contentModel: ContentModel)
-extends MarkupDecl {
- override def buildString(sb: StringBuilder): StringBuilder = {
- sb append "<!ELEMENT " append name append ' '
-
- ContentModel.buildString(contentModel, sb)
- sb append '>'
- }
-}
-
-case class AttListDecl(name: String, attrs:List[AttrDecl])
-extends MarkupDecl {
- override def buildString(sb: StringBuilder): StringBuilder = {
- sb append "<!ATTLIST " append name append '\n' append attrs.mkString("","\n",">")
- }
-}
-
-/** an attribute declaration. at this point, the tpe is a string. Future
- * versions might provide a way to access the attribute types more
- * directly.
- */
-case class AttrDecl(name: String, tpe: String, default: DefaultDecl) {
- override def toString(): String = sbToString(buildString)
-
- def buildString(sb: StringBuilder): StringBuilder = {
- sb append " " append name append ' ' append tpe append ' '
- default buildString sb
- }
-
-}
-
-/** an entity declaration */
-sealed abstract class EntityDecl extends MarkupDecl
-
-/** a parsed general entity declaration */
-case class ParsedEntityDecl(name: String, entdef: EntityDef) extends EntityDecl {
- override def buildString(sb: StringBuilder): StringBuilder = {
- sb append "<!ENTITY " append name append ' '
- entdef buildString sb append '>'
- }
-}
-
-/** a parameter entity declaration */
-case class ParameterEntityDecl(name: String, entdef: EntityDef) extends EntityDecl {
- override def buildString(sb: StringBuilder): StringBuilder = {
- sb append "<!ENTITY % " append name append ' '
- entdef buildString sb append '>'
- }
-}
-
-/** an unparsed entity declaration */
-case class UnparsedEntityDecl( name:String, extID:ExternalID, notation:String ) extends EntityDecl {
- override def buildString(sb: StringBuilder): StringBuilder = {
- sb append "<!ENTITY " append name append ' '
- extID buildString sb append " NDATA " append notation append '>'
- }
-}
-/** a notation declaration */
-case class NotationDecl( name:String, extID:ExternalID ) extends MarkupDecl {
- override def buildString(sb: StringBuilder): StringBuilder = {
- sb append "<!NOTATION " append name append ' '
- extID buildString sb
- }
-}
-
-sealed abstract class EntityDef {
- def buildString(sb: StringBuilder): StringBuilder
-}
-
-case class IntDef(value:String) extends EntityDef {
- private def validateValue() {
- var tmp = value
- var ix = tmp indexOf '%'
- while (ix != -1) {
- val iz = tmp.indexOf(';', ix)
- if(iz == -1 && iz == ix + 1)
- throw new IllegalArgumentException("no % allowed in entity value, except for parameter-entity-references")
- else {
- val n = tmp.substring(ix, iz)
-
- if (!Utility.isName(n))
- throw new IllegalArgumentException("internal entity def: \""+n+"\" must be an XML Name")
-
- tmp = tmp.substring(iz+1, tmp.length)
- ix = tmp indexOf '%'
- }
- }
- }
- validateValue()
-
- override def buildString(sb: StringBuilder): StringBuilder =
- Utility.appendQuoted(value, sb)
-
-}
-
-case class ExtDef(extID:ExternalID) extends EntityDef {
- override def buildString(sb: StringBuilder): StringBuilder =
- extID buildString sb
-}
-
-
-/** a parsed entity reference */
-case class PEReference(ent:String) extends MarkupDecl {
- if( !Utility.isName( ent ))
- throw new IllegalArgumentException("ent must be an XML Name")
-
- override def buildString(sb: StringBuilder): StringBuilder =
- sb append '%' append ent append ';'
-}
-
-
-// default declarations for attributes
-
-sealed abstract class DefaultDecl {
- override def toString(): String
- def buildString(sb: StringBuilder): StringBuilder
-}
-
-case object REQUIRED extends DefaultDecl {
- override def toString(): String = "#REQUIRED"
- override def buildString(sb: StringBuilder) = sb append "#REQUIRED"
-}
-
-case object IMPLIED extends DefaultDecl {
- override def toString(): String = "#IMPLIED"
- override def buildString(sb: StringBuilder) = sb append "#IMPLIED"
-}
-
-case class DEFAULT(fixed: Boolean, attValue: String) extends DefaultDecl {
- override def toString(): String = sbToString(buildString)
- override def buildString(sb: StringBuilder): StringBuilder = {
- if (fixed) sb append "#FIXED "
- Utility.appendEscapedQuoted(attValue, sb)
- }
-}
diff --git a/src/xml/scala/xml/dtd/DocType.scala b/src/xml/scala/xml/dtd/DocType.scala
deleted file mode 100644
index 849d560cc9..0000000000
--- a/src/xml/scala/xml/dtd/DocType.scala
+++ /dev/null
@@ -1,39 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-package scala
-package xml
-package dtd
-
-/** An XML node for document type declaration.
- *
- * @author Burak Emir
- *
- * @param name name of this DOCTYPE
- * @param extID NoExternalID or the external ID of this doctype
- * @param intSubset sequence of internal subset declarations
- */
-case class DocType(name: String, extID: ExternalID, intSubset: Seq[dtd.Decl]) {
- if (!Utility.isName(name))
- throw new IllegalArgumentException(name+" must be an XML Name")
-
- /** returns "&lt;!DOCTYPE + name + extID? + ("["+intSubSet+"]")? >" */
- final override def toString() = {
- def intString =
- if (intSubset.isEmpty) ""
- else intSubset.mkString("[", "", "]")
-
- """<!DOCTYPE %s %s%s>""".format(name, extID.toString(), intString)
- }
-}
-
-object DocType {
- /** Creates a doctype with no external id, nor internal subset declarations. */
- def apply(name: String): DocType = apply(name, NoExternalID, Nil)
-}
diff --git a/src/xml/scala/xml/dtd/ElementValidator.scala b/src/xml/scala/xml/dtd/ElementValidator.scala
deleted file mode 100644
index 4830769a7d..0000000000
--- a/src/xml/scala/xml/dtd/ElementValidator.scala
+++ /dev/null
@@ -1,132 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://www.scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala
-package xml
-package dtd
-
-import PartialFunction._
-import scala.collection.mutable
-
-import ContentModel.ElemName
-import MakeValidationException._ // @todo other exceptions
-
-import impl._
-
-/** validate children and/or attributes of an element
- * exceptions are created but not thrown.
- */
-class ElementValidator() extends Function1[Node,Boolean] {
-
- private var exc: List[ValidationException] = Nil
-
- protected var contentModel: ContentModel = _
- protected var dfa: DetWordAutom[ElemName] = _
- protected var adecls: List[AttrDecl] = _
-
- /** set content model, enabling element validation */
- def setContentModel(cm: ContentModel) = {
- contentModel = cm
- cm match {
- case ELEMENTS(r) =>
- val nfa = ContentModel.Translator.automatonFrom(r, 1)
- dfa = new SubsetConstruction(nfa).determinize
- case _ =>
- dfa = null
- }
- }
-
- def getContentModel = contentModel
-
- /** set meta data, enabling attribute validation */
- def setMetaData(adecls: List[AttrDecl]) { this.adecls = adecls }
-
- def getIterable(nodes: Seq[Node], skipPCDATA: Boolean): Iterable[ElemName] = {
- def isAllWhitespace(a: Atom[_]) = cond(a.data) { case s: String if s.trim == "" => true }
-
- nodes.filter {
- case y: SpecialNode => y match {
- case a: Atom[_] if isAllWhitespace(a) => false // always skip all-whitespace nodes
- case _ => !skipPCDATA
- }
- case x => x.namespace eq null
- } . map (x => ElemName(x.label))
- }
-
- /** check attributes, return true if md corresponds to attribute declarations in adecls.
- */
- def check(md: MetaData): Boolean = {
- val len: Int = exc.length
- val ok = new mutable.BitSet(adecls.length)
-
- for (attr <- md) {
- def attrStr = attr.value.toString
- def find(Key: String): Option[AttrDecl] = {
- adecls.zipWithIndex find {
- case (a @ AttrDecl(Key, _, _), j) => ok += j ; return Some(a)
- case _ => false
- }
- None
- }
-
- find(attr.key) match {
- case None =>
- exc ::= fromUndefinedAttribute(attr.key)
-
- case Some(AttrDecl(_, tpe, DEFAULT(true, fixedValue))) if attrStr != fixedValue =>
- exc ::= fromFixedAttribute(attr.key, fixedValue, attrStr)
-
- case _ =>
- }
- }
-
- adecls.zipWithIndex foreach {
- case (AttrDecl(key, tpe, REQUIRED), j) if !ok(j) => exc ::= fromMissingAttribute(key, tpe)
- case _ =>
- }
-
- exc.length == len //- true if no new exception
- }
-
- /** check children, return true if conform to content model
- * @note contentModel != null
- */
- def check(nodes: Seq[Node]): Boolean = contentModel match {
- case ANY => true
- case EMPTY => getIterable(nodes, skipPCDATA = false).isEmpty
- case PCDATA => getIterable(nodes, skipPCDATA = true).isEmpty
- case MIXED(ContentModel.Alt(branches @ _*)) => // @todo
- val j = exc.length
- def find(Key: String): Boolean =
- branches exists { case ContentModel.Letter(ElemName(Key)) => true ; case _ => false }
-
- getIterable(nodes, skipPCDATA = true) map (_.name) filterNot find foreach {
- exc ::= MakeValidationException fromUndefinedElement _
- }
- (exc.length == j) // - true if no new exception
-
- case _: ELEMENTS =>
- dfa isFinal {
- getIterable(nodes, skipPCDATA = false).foldLeft(0) { (q, e) =>
- (dfa delta q).getOrElse(e, throw ValidationException("element %s not allowed here" format e))
- }
- }
- case _ => false
- }
-
- /** applies various validations - accumulates error messages in exc
- * @todo fail on first error, ignore other errors (rearranging conditions)
- */
- def apply(n: Node): Boolean =
- //- ? check children
- ((contentModel == null) || check(n.child)) &&
- //- ? check attributes
- ((adecls == null) || check(n.attributes))
-}
diff --git a/src/xml/scala/xml/dtd/ExternalID.scala b/src/xml/scala/xml/dtd/ExternalID.scala
deleted file mode 100644
index 880633d860..0000000000
--- a/src/xml/scala/xml/dtd/ExternalID.scala
+++ /dev/null
@@ -1,86 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://www.scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-package scala
-package xml
-package dtd
-
-/** an ExternalIDs - either PublicID or SystemID
- *
- * @author Burak Emir
- */
-sealed abstract class ExternalID extends parsing.TokenTests {
- def quoted(s: String) = {
- val c = if (s contains '"') '\'' else '"'
- c + s + c
- }
-
- // public != null: PUBLIC " " publicLiteral " " [systemLiteral]
- // public == null: SYSTEM " " systemLiteral
- override def toString(): String = {
- lazy val quotedSystemLiteral = quoted(systemId)
- lazy val quotedPublicLiteral = quoted(publicId)
-
- if (publicId == null) "SYSTEM " + quotedSystemLiteral
- else "PUBLIC " + quotedPublicLiteral +
- (if (systemId == null) "" else " " + quotedSystemLiteral)
- }
- def buildString(sb: StringBuilder): StringBuilder =
- sb.append(this.toString())
-
- def systemId: String
- def publicId: String
-}
-
-/** a system identifier
- *
- * @author Burak Emir
- * @param systemId the system identifier literal
- */
-case class SystemID(systemId: String) extends ExternalID {
- val publicId = null
-
- if (!checkSysID(systemId))
- throw new IllegalArgumentException("can't use both \" and ' in systemId")
-}
-
-
-/** a public identifier (see http://www.w3.org/QA/2002/04/valid-dtd-list.html).
- *
- * @author Burak Emir
- * @param publicId the public identifier literal
- * @param systemId (can be null for notation pubIDs) the system identifier literal
- */
-case class PublicID(publicId: String, systemId: String) extends ExternalID {
- if (!checkPubID(publicId))
- throw new IllegalArgumentException("publicId must consist of PubidChars")
-
- if (systemId != null && !checkSysID(systemId))
- throw new IllegalArgumentException("can't use both \" and ' in systemId")
-
- /** the constant "#PI" */
- def label = "#PI"
-
- /** always empty */
- def attribute = Node.NoAttributes
-
- /** always empty */
- def child = Nil
-}
-
-/** A marker used when a `DocType` contains no external id.
- *
- * @author Michael Bayne
- */
-object NoExternalID extends ExternalID {
- val publicId = null
- val systemId = null
-
- override def toString = ""
-}
diff --git a/src/xml/scala/xml/dtd/Scanner.scala b/src/xml/scala/xml/dtd/Scanner.scala
deleted file mode 100644
index 5f9d1ccaed..0000000000
--- a/src/xml/scala/xml/dtd/Scanner.scala
+++ /dev/null
@@ -1,79 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-package scala
-package xml
-package dtd
-
-/** Scanner for regexps (content models in DTD element declarations)
- * todo: cleanup
- */
-class Scanner extends Tokens with parsing.TokenTests {
-
- final val ENDCH = '\u0000'
-
- var token:Int = END
- var value:String = _
-
- private var it: Iterator[Char] = null
- private var c: Char = 'z'
-
- /** initializes the scanner on input s */
- final def initScanner(s: String) {
- value = ""
- it = (s).iterator
- token = 1+END
- next()
- nextToken()
- }
-
- /** scans the next token */
- final def nextToken() {
- if (token != END) token = readToken
- }
-
- // todo: see XML specification... probably isLetter,isDigit is fine
- final def isIdentChar = ( ('a' <= c && c <= 'z')
- || ('A' <= c && c <= 'Z'))
-
- final def next() = if (it.hasNext) c = it.next() else c = ENDCH
-
- final def acc(d: Char) {
- if (c == d) next() else scala.sys.error("expected '"+d+"' found '"+c+"' !")
- }
-
- final def accS(ds: Seq[Char]) { ds foreach acc }
-
- final def readToken: Int =
- if (isSpace(c)) {
- while (isSpace(c)) c = it.next()
- S
- } else c match {
- case '(' => next(); LPAREN
- case ')' => next(); RPAREN
- case ',' => next(); COMMA
- case '*' => next(); STAR
- case '+' => next(); PLUS
- case '?' => next(); OPT
- case '|' => next(); CHOICE
- case '#' => next(); accS( "PCDATA" ); TOKEN_PCDATA
- case ENDCH => END
- case _ =>
- if (isNameStart(c)) name; // NAME
- else scala.sys.error("unexpected character:" + c)
- }
-
- final def name = {
- val sb = new StringBuilder()
- do { sb.append(c); next() } while (isNameChar(c))
- value = sb.toString()
- NAME
- }
-
-}
diff --git a/src/xml/scala/xml/dtd/Tokens.scala b/src/xml/scala/xml/dtd/Tokens.scala
deleted file mode 100644
index 07e888e77a..0000000000
--- a/src/xml/scala/xml/dtd/Tokens.scala
+++ /dev/null
@@ -1,45 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://www.scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala
-package xml
-package dtd
-
-
-class Tokens {
-
- // Tokens
-
- final val TOKEN_PCDATA = 0
- final val NAME = 1
- final val LPAREN = 3
- final val RPAREN = 4
- final val COMMA = 5
- final val STAR = 6
- final val PLUS = 7
- final val OPT = 8
- final val CHOICE = 9
- final val END = 10
- final val S = 13
-
- final def token2string(i: Int): String = i match {
- case 0 => "#PCDATA"
- case 1 => "NAME"
- case 3 => "("
- case 4 => ")"
- case 5 => ","
- case 6 => "*"
- case 7 => "+"
- case 8 => "?"
- case 9 => "|"
- case 10 => "END"
- case 13 => " "
- }
-}
diff --git a/src/xml/scala/xml/dtd/ValidationException.scala b/src/xml/scala/xml/dtd/ValidationException.scala
deleted file mode 100644
index 1bfae55286..0000000000
--- a/src/xml/scala/xml/dtd/ValidationException.scala
+++ /dev/null
@@ -1,44 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://www.scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala
-package xml
-package dtd
-
-
-case class ValidationException(e: String) extends Exception(e)
-
-/**
- * @author Burak Emir
- */
-object MakeValidationException {
- def fromFixedAttribute(k: String, value: String, actual: String) =
- ValidationException("value of attribute " + k + " FIXED to \""+
- value+"\", but document tries \""+actual+"\"")
-
- def fromNonEmptyElement() =
- new ValidationException("element should be *empty*")
-
- def fromUndefinedElement(label: String) =
- new ValidationException("element \""+ label +"\" not allowed here")
-
- def fromUndefinedAttribute(key: String) =
- new ValidationException("attribute " + key +" not allowed here")
-
- def fromMissingAttribute(allKeys: Set[String]) = {
- val sb = new StringBuilder("missing value for REQUIRED attribute")
- if (allKeys.size > 1) sb.append('s')
- allKeys foreach (k => sb append "'%s'".format(k))
- new ValidationException(sb.toString())
- }
-
- def fromMissingAttribute(key: String, tpe: String) =
- new ValidationException("missing value for REQUIRED attribute %s of type %s".format(key, tpe))
-}
diff --git a/src/xml/scala/xml/dtd/impl/Base.scala b/src/xml/scala/xml/dtd/impl/Base.scala
deleted file mode 100644
index 91ff03a93a..0000000000
--- a/src/xml/scala/xml/dtd/impl/Base.scala
+++ /dev/null
@@ -1,67 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala
-package xml.dtd.impl
-
-/** Basic regular expressions.
- *
- * @author Burak Emir
- * @version 1.0
- */
-
-@deprecated("This class will be removed", "2.10.0")
-private[dtd] abstract class Base {
- type _regexpT <: RegExp
-
- abstract class RegExp {
- val isNullable: Boolean
- }
-
- object Alt {
- /** `Alt( R,R,R* )`. */
- def apply(rs: _regexpT*) =
- if (rs.size < 2) throw new SyntaxError("need at least 2 branches in Alt")
- else new Alt(rs: _*)
- // Can't enforce that statically without changing the interface
- // def apply(r1: _regexpT, r2: _regexpT, rs: _regexpT*) = new Alt(Seq(r1, r2) ++ rs: _*)
- def unapplySeq(x: Alt) = Some(x.rs)
- }
-
- class Alt private (val rs: _regexpT*) extends RegExp {
- final val isNullable = rs exists (_.isNullable)
- }
-
- object Sequ {
- /** Sequ( R,R* ) */
- def apply(rs: _regexpT*) = if (rs.isEmpty) Eps else new Sequ(rs: _*)
- def unapplySeq(x: Sequ) = Some(x.rs)
- }
-
- class Sequ private (val rs: _regexpT*) extends RegExp {
- final val isNullable = rs forall (_.isNullable)
- }
-
- case class Star(r: _regexpT) extends RegExp {
- final lazy val isNullable = true
- }
-
- // The empty Sequ.
- case object Eps extends RegExp {
- final lazy val isNullable = true
- override def toString() = "Eps"
- }
-
- /** this class can be used to add meta information to regexps. */
- class Meta(r1: _regexpT) extends RegExp {
- final val isNullable = r1.isNullable
- def r = r1
- }
-}
diff --git a/src/xml/scala/xml/dtd/impl/BaseBerrySethi.scala b/src/xml/scala/xml/dtd/impl/BaseBerrySethi.scala
deleted file mode 100644
index f30309b037..0000000000
--- a/src/xml/scala/xml/dtd/impl/BaseBerrySethi.scala
+++ /dev/null
@@ -1,98 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala
-package xml.dtd.impl
-
-import scala.collection.{ mutable, immutable }
-
-// todo: replace global variable pos with acc
-
-/** This class turns a regular expression over `A` into a
- * [[scala.util.automata.NondetWordAutom]] over `A` using the celebrated
- * position automata construction (also called ''Berry-Sethi'' or ''Glushkov'').
- */
-@deprecated("This class will be removed", "2.10.0")
-private[dtd] abstract class BaseBerrySethi {
- val lang: Base
- import lang.{ Alt, Eps, Meta, RegExp, Sequ, Star }
-
- protected var pos = 0
-
- // results which hold all info for the NondetWordAutomaton
- protected var follow: mutable.HashMap[Int, Set[Int]] = _
-
- protected var finalTag: Int = _
-
- protected var finals: immutable.Map[Int, Int] = _ // final states
-
- // constants --------------------------
-
- final val emptySet: Set[Int] = Set()
-
- private def doComp(r: RegExp, compFunction: RegExp => Set[Int]) = r match {
- case x: Alt => (x.rs map compFirst).foldLeft(emptySet)(_ ++ _)
- case Eps => emptySet
- case x: Meta => compFunction(x.r)
- case x: Sequ =>
- val (l1, l2) = x.rs span (_.isNullable)
- ((l1 ++ (l2 take 1)) map compFunction).foldLeft(emptySet)(_ ++ _)
- case Star(t) => compFunction(t)
- case _ => throw new IllegalArgumentException("unexpected pattern " + r.getClass)
- }
-
- /** Computes `first(r)` for the word regexp `r`. */
- protected def compFirst(r: RegExp): Set[Int] = doComp(r, compFirst)
-
- /** Computes `last(r)` for the regexp `r`. */
- protected def compLast(r: RegExp): Set[Int] = doComp(r, compLast)
-
- /** Starts from the right-to-left
- * precondition: pos is final
- * pats are successor patterns of a Sequence node
- */
- protected def compFollow(rs: Seq[RegExp]): Set[Int] = {
- follow(0) =
- if (rs.isEmpty) emptySet
- else rs.foldRight(Set(pos))((p, fol) => {
- val first = compFollow1(fol, p)
-
- if (p.isNullable) fol ++ first
- else first
- })
-
- follow(0)
- }
-
- /** Returns the first set of an expression, setting the follow set along the way.
- */
- protected def compFollow1(fol1: Set[Int], r: RegExp): Set[Int] = r match {
- case x: Alt => Set((x.rs reverseMap (compFollow1(fol1, _))).flatten: _*)
- case x: Meta => compFollow1(fol1, x.r)
- case x: Star => compFollow1(fol1 ++ compFirst(x.r), x.r)
- case x: Sequ =>
- x.rs.foldRight(fol1) { (p, fol) =>
- val first = compFollow1(fol, p)
-
- if (p.isNullable) fol ++ first
- else first
- }
- case _ => throw new IllegalArgumentException("unexpected pattern: " + r.getClass)
- }
-
- /** Returns the "Sethi-length" of a pattern, creating the set of position along the way.
- */
- protected def traverse(r: RegExp): Unit = r match {
- // (is tree automaton stuff, more than Berry-Sethi)
- case x: Alt => x.rs foreach traverse
- case x: Sequ => x.rs foreach traverse
- case x: Meta => traverse(x.r)
- case Star(t) => traverse(t)
- case _ => throw new IllegalArgumentException("unexp pattern " + r.getClass)
- }
-}
diff --git a/src/xml/scala/xml/dtd/impl/DetWordAutom.scala b/src/xml/scala/xml/dtd/impl/DetWordAutom.scala
deleted file mode 100644
index 6f8ba4de72..0000000000
--- a/src/xml/scala/xml/dtd/impl/DetWordAutom.scala
+++ /dev/null
@@ -1,50 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala
-package xml.dtd.impl
-
-import scala.collection.{ mutable, immutable }
-
-/** A deterministic automaton. States are integers, where
- * 0 is always the only initial state. Transitions are represented
- * in the delta function. A default transitions is one that
- * is taken when no other transition can be taken.
- * All states are reachable. Accepting states are those for which
- * the partial function 'finals' is defined.
- *
- * @author Burak Emir
- * @version 1.0
- */
-@deprecated("This class will be removed", "2.10.0")
-private[dtd] abstract class DetWordAutom[T <: AnyRef] {
- val nstates: Int
- val finals: Array[Int]
- val delta: Array[mutable.Map[T, Int]]
- val default: Array[Int]
-
- def isFinal(q: Int) = finals(q) != 0
- def isSink(q: Int) = delta(q).isEmpty && default(q) == q
- def next(q: Int, label: T) = delta(q).getOrElse(label, default(q))
-
- override def toString() = {
- val sb = new StringBuilder("[DetWordAutom nstates=")
- sb.append(nstates)
- sb.append(" finals=")
- val map = Map(finals.zipWithIndex map (_.swap): _*)
- sb.append(map.toString())
- sb.append(" delta=\n")
-
- for (i <- 0 until nstates) {
- sb append "%d->%s\n".format(i, delta(i))
- if (i < default.length)
- sb append "_>%s\n".format(default(i))
- }
- sb.toString
- }
-}
diff --git a/src/xml/scala/xml/dtd/impl/Inclusion.scala b/src/xml/scala/xml/dtd/impl/Inclusion.scala
deleted file mode 100644
index 07b6afaeba..0000000000
--- a/src/xml/scala/xml/dtd/impl/Inclusion.scala
+++ /dev/null
@@ -1,70 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala
-package xml.dtd.impl
-
-
-/** A fast test of language inclusion between minimal automata.
- * inspired by the ''AMoRE automata library''.
- *
- * @author Burak Emir
- * @version 1.0
- */
-@deprecated("This class will be removed", "2.10.0")
-private[dtd] trait Inclusion[A <: AnyRef] {
-
- val labels: Seq[A]
-
- /** Returns true if `dfa1` is included in `dfa2`.
- */
- def inclusion(dfa1: DetWordAutom[A], dfa2: DetWordAutom[A]) = {
-
- def encode(q1: Int, q2: Int) = 1 + q1 + q2 * dfa1.nstates
- def decode2(c: Int) = (c-1) / (dfa1.nstates) //integer division
- def decode1(c: Int) = (c-1) % (dfa1.nstates)
-
- var q1 = 0 //dfa1.initstate; // == 0
- var q2 = 0 //dfa2.initstate; // == 0
-
- val max = 1 + dfa1.nstates * dfa2.nstates
- val mark = new Array[Int](max)
-
- var result = true
- var current = encode(q1, q2)
- var last = current
- mark(last) = max // mark (q1,q2)
- while (current != 0 && result) {
- //Console.println("current = [["+q1+" "+q2+"]] = "+current);
- for (letter <- labels) {
- val r1 = dfa1.next(q1,letter)
- val r2 = dfa2.next(q2,letter)
- if (dfa1.isFinal(r1) && !dfa2.isFinal(r2))
- result = false
- val test = encode(r1, r2)
- //Console.println("test = [["+r1+" "+r2+"]] = "+test);
- if (mark(test) == 0) {
- mark(last) = test
- mark(test) = max
- last = test
- }
- }
- val ncurrent = mark(current)
- if( ncurrent != max ) {
- q1 = decode1(ncurrent)
- q2 = decode2(ncurrent)
- current = ncurrent
- } else {
- current = 0
- }
- }
- result
- }
-}
diff --git a/src/xml/scala/xml/dtd/impl/NondetWordAutom.scala b/src/xml/scala/xml/dtd/impl/NondetWordAutom.scala
deleted file mode 100644
index 0bb19a7e3e..0000000000
--- a/src/xml/scala/xml/dtd/impl/NondetWordAutom.scala
+++ /dev/null
@@ -1,60 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala
-package xml.dtd.impl
-
-import scala.collection.{ immutable, mutable }
-
-/** A nondeterministic automaton. States are integers, where
- * 0 is always the only initial state. Transitions are represented
- * in the delta function. Default transitions are transitions that
- * are taken when no other transitions can be applied.
- * All states are reachable. Accepting states are those for which
- * the partial function `finals` is defined.
- */
-@deprecated("This class will be removed", "2.10.0")
-private[dtd] abstract class NondetWordAutom[T <: AnyRef] {
- val nstates: Int
- val labels: Seq[T]
- val finals: Array[Int] // 0 means not final
- val delta: Array[mutable.Map[T, immutable.BitSet]]
- val default: Array[immutable.BitSet]
-
- /** @return true if the state is final */
- final def isFinal(state: Int) = finals(state) > 0
-
- /** @return tag of final state */
- final def finalTag(state: Int) = finals(state)
-
- /** @return true if the set of states contains at least one final state */
- final def containsFinal(Q: immutable.BitSet): Boolean = Q exists isFinal
-
- /** @return true if there are no accepting states */
- final def isEmpty = (0 until nstates) forall (x => !isFinal(x))
-
- /** @return an immutable.BitSet with the next states for given state and label */
- def next(q: Int, a: T): immutable.BitSet = delta(q).getOrElse(a, default(q))
-
- /** @return an immutable.BitSet with the next states for given state and label */
- def next(Q: immutable.BitSet, a: T): immutable.BitSet = next(Q, next(_, a))
- def nextDefault(Q: immutable.BitSet): immutable.BitSet = next(Q, default)
-
- private def next(Q: immutable.BitSet, f: (Int) => immutable.BitSet): immutable.BitSet =
- (Q map f).foldLeft(immutable.BitSet.empty)(_ ++ _)
-
- private def finalStates = 0 until nstates filter isFinal
- override def toString = {
-
- val finalString = Map(finalStates map (j => j -> finals(j)) : _*).toString
- val deltaString = (0 until nstates)
- .map(i => " %d->%s\n _>%s\n".format(i, delta(i), default(i))).mkString
-
- "[NondetWordAutom nstates=%d finals=%s delta=\n%s".format(nstates, finalString, deltaString)
- }
-}
diff --git a/src/xml/scala/xml/dtd/impl/PointedHedgeExp.scala b/src/xml/scala/xml/dtd/impl/PointedHedgeExp.scala
deleted file mode 100644
index 1720604132..0000000000
--- a/src/xml/scala/xml/dtd/impl/PointedHedgeExp.scala
+++ /dev/null
@@ -1,37 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala
-package xml.dtd.impl
-
-/** Pointed regular hedge expressions, a useful subclass of regular hedge expressions.
- *
- * @author Burak Emir
- * @version 1.0
- */
-@deprecated("This class will be removed", "2.10.0")
-private[dtd] abstract class PointedHedgeExp extends Base {
-
- type _regexpT <: RegExp
- type _labelT
-
- case class Node(label: _labelT, r: _regexpT) extends RegExp {
- final val isNullable = false
- }
-
- case class TopIter(r1: _regexpT, r2: _regexpT) extends RegExp {
- final val isNullable = r1.isNullable && r2.isNullable //?
- }
-
- case object Point extends RegExp {
- final val isNullable = false
- }
-
-}
diff --git a/src/xml/scala/xml/dtd/impl/SubsetConstruction.scala b/src/xml/scala/xml/dtd/impl/SubsetConstruction.scala
deleted file mode 100644
index 632ca1eb18..0000000000
--- a/src/xml/scala/xml/dtd/impl/SubsetConstruction.scala
+++ /dev/null
@@ -1,108 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala
-package xml.dtd.impl
-
-import scala.collection.{ mutable, immutable }
-
-@deprecated("This class will be removed", "2.10.0")
-private[dtd] class SubsetConstruction[T <: AnyRef](val nfa: NondetWordAutom[T]) {
- import nfa.labels
-
- def selectTag(Q: immutable.BitSet, finals: Array[Int]) =
- (Q map finals filter (_ > 0)).min
-
- def determinize: DetWordAutom[T] = {
- // for assigning numbers to bitsets
- var indexMap = scala.collection.Map[immutable.BitSet, Int]()
- var invIndexMap = scala.collection.Map[Int, immutable.BitSet]()
- var ix = 0
-
- // we compute the dfa with states = bitsets
- val q0 = immutable.BitSet(0) // the set { 0 }
- val sink = immutable.BitSet.empty // the set { }
-
- var states = Set(q0, sink) // initial set of sets
- val delta = new mutable.HashMap[immutable.BitSet, mutable.HashMap[T, immutable.BitSet]]
- var deftrans = mutable.Map(q0 -> sink, sink -> sink) // initial transitions
- var finals: mutable.Map[immutable.BitSet, Int] = mutable.Map()
- val rest = new mutable.Stack[immutable.BitSet]
-
- rest.push(sink, q0)
-
- def addFinal(q: immutable.BitSet) {
- if (nfa containsFinal q)
- finals = finals.updated(q, selectTag(q, nfa.finals))
- }
- def add(Q: immutable.BitSet) {
- if (!states(Q)) {
- states += Q
- rest push Q
- addFinal(Q)
- }
- }
-
- addFinal(q0) // initial state may also be a final state
-
- while (!rest.isEmpty) {
- val P = rest.pop()
- // assign a number to this bitset
- indexMap = indexMap.updated(P, ix)
- invIndexMap = invIndexMap.updated(ix, P)
- ix += 1
-
- // make transition map
- val Pdelta = new mutable.HashMap[T, immutable.BitSet]
- delta.update(P, Pdelta)
-
- labels foreach { label =>
- val Q = nfa.next(P, label)
- Pdelta.update(label, Q)
- add(Q)
- }
-
- // collect default transitions
- val Pdef = nfa nextDefault P
- deftrans = deftrans.updated(P, Pdef)
- add(Pdef)
- }
-
- // create DetWordAutom, using indices instead of sets
- val nstatesR = states.size
- val deltaR = new Array[mutable.Map[T, Int]](nstatesR)
- val defaultR = new Array[Int](nstatesR)
- val finalsR = new Array[Int](nstatesR)
-
- for (Q <- states) {
- val q = indexMap(Q)
- val trans = delta(Q)
- val transDef = deftrans(Q)
- val qDef = indexMap(transDef)
- val ntrans = new mutable.HashMap[T, Int]()
-
- for ((label, value) <- trans) {
- val p = indexMap(value)
- if (p != qDef)
- ntrans.update(label, p)
- }
-
- deltaR(q) = ntrans
- defaultR(q) = qDef
- }
-
- finals foreach { case (k,v) => finalsR(indexMap(k)) = v }
-
- new DetWordAutom [T] {
- val nstates = nstatesR
- val delta = deltaR
- val default = defaultR
- val finals = finalsR
- }
- }
-}
diff --git a/src/xml/scala/xml/dtd/impl/SyntaxError.scala b/src/xml/scala/xml/dtd/impl/SyntaxError.scala
deleted file mode 100644
index a5b8a5aba0..0000000000
--- a/src/xml/scala/xml/dtd/impl/SyntaxError.scala
+++ /dev/null
@@ -1,21 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala
-package xml.dtd.impl
-
-/** This runtime exception is thrown if an attempt to instantiate a
- * syntactically incorrect expression is detected.
- *
- * @author Burak Emir
- * @version 1.0
- */
-@deprecated("This class will be removed", "2.10.0")
-private[dtd] class SyntaxError(e: String) extends RuntimeException(e)
diff --git a/src/xml/scala/xml/dtd/impl/WordBerrySethi.scala b/src/xml/scala/xml/dtd/impl/WordBerrySethi.scala
deleted file mode 100644
index 9bf3fa518b..0000000000
--- a/src/xml/scala/xml/dtd/impl/WordBerrySethi.scala
+++ /dev/null
@@ -1,162 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala
-package xml.dtd.impl
-
-import scala.collection.{ immutable, mutable }
-
-/** This class turns a regular expression into a [[scala.util.automata.NondetWordAutom]]
- * celebrated position automata construction (also called ''Berry-Sethi'' or ''Glushkov'').
- *
- * @author Burak Emir
- * @version 1.0
- */
-@deprecated("This class will be removed", "2.10.0")
-private[dtd] abstract class WordBerrySethi extends BaseBerrySethi {
- override val lang: WordExp
-
- import lang.{ Alt, Eps, Letter, RegExp, Sequ, Star, _labelT }
-
- protected var labels: mutable.HashSet[_labelT] = _
- // don't let this fool you, only labelAt is a real, surjective mapping
- protected var labelAt: Map[Int, _labelT] = _ // new alphabet "gamma"
- protected var deltaq: Array[mutable.HashMap[_labelT, List[Int]]] = _ // delta
- protected var defaultq: Array[List[Int]] = _ // default transitions
- protected var initials: Set[Int] = _
-
- /** Computes `first(r)` where the word regexp `r`.
- *
- * @param r the regular expression
- * @return the computed set `first(r)`
- */
- protected override def compFirst(r: RegExp): Set[Int] = r match {
- case x: Letter => Set(x.pos)
- case _ => super.compFirst(r)
- }
-
- /** Computes `last(r)` where the word regexp `r`.
- *
- * @param r the regular expression
- * @return the computed set `last(r)`
- */
- protected override def compLast(r: RegExp): Set[Int] = r match {
- case x: Letter => Set(x.pos)
- case _ => super.compLast(r)
- }
-
- /** Returns the first set of an expression, setting the follow set along
- * the way.
- *
- * @param r the regular expression
- * @return the computed set
- */
- protected override def compFollow1(fol1: Set[Int], r: RegExp): Set[Int] = r match {
- case x: Letter => follow(x.pos) = fol1 ; Set(x.pos)
- case Eps => emptySet
- case _ => super.compFollow1(fol1, r)
- }
-
- /** Returns "Sethi-length" of a pattern, creating the set of position
- * along the way
- */
-
- /** Called at the leaves of the regexp */
- protected def seenLabel(r: RegExp, i: Int, label: _labelT) {
- labelAt = labelAt.updated(i, label)
- this.labels += label
- }
-
- // overridden in BindingBerrySethi
- protected def seenLabel(r: RegExp, label: _labelT): Int = {
- pos += 1
- seenLabel(r, pos, label)
- pos
- }
-
- // todo: replace global variable pos with acc
- override def traverse(r: RegExp): Unit = r match {
- case a @ Letter(label) => a.pos = seenLabel(r, label)
- case Eps => // ignore
- case _ => super.traverse(r)
- }
-
-
- protected def makeTransition(src: Int, dest: Int, label: _labelT) {
- val q = deltaq(src)
- q.update(label, dest :: q.getOrElse(label, Nil))
- }
-
- protected def initialize(subexpr: Seq[RegExp]): Unit = {
- this.labelAt = immutable.Map()
- this.follow = mutable.HashMap()
- this.labels = mutable.HashSet()
- this.pos = 0
-
- // determine "Sethi-length" of the regexp
- subexpr foreach traverse
-
- this.initials = Set(0)
- }
-
- protected def initializeAutom() {
- finals = immutable.Map.empty[Int, Int] // final states
- deltaq = new Array[mutable.HashMap[_labelT, List[Int]]](pos) // delta
- defaultq = new Array[List[Int]](pos) // default transitions
-
- for (j <- 0 until pos) {
- deltaq(j) = mutable.HashMap[_labelT, List[Int]]()
- defaultq(j) = Nil
- }
- }
-
- protected def collectTransitions(): Unit = // make transitions
- for (j <- 0 until pos ; fol = follow(j) ; k <- fol) {
- if (pos == k) finals = finals.updated(j, finalTag)
- else makeTransition(j, k, labelAt(k))
- }
-
- def automatonFrom(pat: RegExp, finalTag: Int): NondetWordAutom[_labelT] = {
- this.finalTag = finalTag
-
- pat match {
- case x: Sequ =>
- // (1,2) compute follow + first
- initialize(x.rs)
- pos += 1
- compFollow(x.rs) // this used to be assigned to var globalFirst and then never used.
-
- // (3) make automaton from follow sets
- initializeAutom()
- collectTransitions()
-
- if (x.isNullable) // initial state is final
- finals = finals.updated(0, finalTag)
-
- val delta1 = immutable.Map(deltaq.zipWithIndex map (_.swap): _*)
- val finalsArr = (0 until pos map (k => finals.getOrElse(k, 0))).toArray // 0 == not final
-
- val deltaArr: Array[mutable.Map[_labelT, immutable.BitSet]] =
- (0 until pos map { x =>
- mutable.HashMap(delta1(x).toSeq map { case (k, v) => k -> immutable.BitSet(v: _*) } : _*)
- }).toArray
-
- val defaultArr = (0 until pos map (k => immutable.BitSet(defaultq(k): _*))).toArray
-
- new NondetWordAutom[_labelT] {
- val nstates = pos
- val labels = WordBerrySethi.this.labels.toList
- val finals = finalsArr
- val delta = deltaArr
- val default = defaultArr
- }
- case z =>
- automatonFrom(Sequ(z.asInstanceOf[this.lang._regexpT]), finalTag)
- }
- }
-}
diff --git a/src/xml/scala/xml/dtd/impl/WordExp.scala b/src/xml/scala/xml/dtd/impl/WordExp.scala
deleted file mode 100644
index a4bb54c1ea..0000000000
--- a/src/xml/scala/xml/dtd/impl/WordExp.scala
+++ /dev/null
@@ -1,59 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala
-package xml.dtd.impl
-
-/**
- * The class `WordExp` provides regular word expressions.
- *
- * Users have to instantiate type member `_regexpT <;: RegExp`
- * (from class `Base`) and a type member `_labelT <;: Label`.
- *
- * Here is a short example:
- * {{{
- * import scala.util.regexp._
- * import scala.util.automata._
- * object MyLang extends WordExp {
- * type _regexpT = RegExp
- * type _labelT = MyChar
- *
- * case class MyChar(c:Char) extends Label
- * }
- * import MyLang._
- * // (a* | b)*
- * val rex = Star(Alt(Star(Letter(MyChar('a'))),Letter(MyChar('b'))))
- * object MyBerriSethi extends WordBerrySethi {
- * override val lang = MyLang
- * }
- * val nfa = MyBerriSethi.automatonFrom(Sequ(rex), 1)
- * }}}
- *
- * @author Burak Emir
- * @version 1.0
- */
-@deprecated("This class will be removed", "2.10.0")
-private[dtd] abstract class WordExp extends Base {
-
- abstract class Label
-
- type _regexpT <: RegExp
- type _labelT <: Label
-
- case class Letter(a: _labelT) extends RegExp {
- final lazy val isNullable = false
- var pos = -1
- }
-
- case class Wildcard() extends RegExp {
- final lazy val isNullable = false
- var pos = -1
- }
-}
diff --git a/src/xml/scala/xml/factory/Binder.scala b/src/xml/scala/xml/factory/Binder.scala
deleted file mode 100755
index 947f99e6a4..0000000000
--- a/src/xml/scala/xml/factory/Binder.scala
+++ /dev/null
@@ -1,61 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala
-package xml
-package factory
-
-import parsing.ValidatingMarkupHandler
-
-/**
- * @author Burak Emir
- */
-abstract class Binder(val preserveWS: Boolean) extends ValidatingMarkupHandler {
-
- var result: NodeBuffer = new NodeBuffer()
-
- def reportSyntaxError(pos:Int, str:String) = {}
-
- final def procInstr(pos: Int, target: String, txt: String) =
- ProcInstr(target, txt)
-
- final def comment(pos: Int, txt: String) =
- Comment(txt)
-
- final def entityRef(pos: Int, n: String) =
- EntityRef(n)
-
- final def text(pos: Int, txt: String) =
- Text(txt)
-
- final def traverse(n:Node): Unit = n match {
- case x:ProcInstr =>
- result &+ procInstr(0, x.target, x.text)
- case x:Comment =>
- result &+ comment(0, x.text)
- case x:Text =>
- result &+ text(0, x.data)
- case x:EntityRef =>
- result &+ entityRef(0, x.entityName)
- case x:Elem =>
- elemStart(0, x.prefix, x.label, x.attributes, x.scope)
- val old = result
- result = new NodeBuffer()
- for (m <- x.child) traverse(m)
- result = old &+ elem(0, x.prefix, x.label, x.attributes, x.scope, x.minimizeEmpty, NodeSeq.fromSeq(result)).toList
- elemEnd(0, x.prefix, x.label)
- }
-
- final def validate(n: Node): Node = {
- this.rootLabel = n.label
- traverse(n)
- result(0)
- }
-}
diff --git a/src/xml/scala/xml/factory/LoggedNodeFactory.scala b/src/xml/scala/xml/factory/LoggedNodeFactory.scala
deleted file mode 100644
index bc074bfc83..0000000000
--- a/src/xml/scala/xml/factory/LoggedNodeFactory.scala
+++ /dev/null
@@ -1,90 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala
-package xml
-package factory
-
-/** This class logs what the nodefactory is actually doing.
- * If you want to see what happens during loading, use it like this:
-{{{
-object testLogged extends App {
- val x = new scala.xml.parsing.NoBindingFactoryAdapter
- with scala.xml.factory.LoggedNodeFactory[scala.xml.Elem] {
- override def log(s: String) = println(s)
- }
-
- Console.println("Start")
- val doc = x.load(new java.net.URL("http://example.com/file.xml"))
- Console.println("End")
- Console.println(doc)
-}
-}}}
- *
- * @author Burak Emir
- * @version 1.0
- */
-@deprecated("This trait will be removed.", "2.11")
-trait LoggedNodeFactory[A <: Node] extends NodeFactory[A] {
- // configuration values
- val logNode = true
- val logText = false
- val logComment = false
- val logProcInstr = false
-
- final val NONE = 0
- final val CACHE = 1
- final val FULL = 2
- /** 0 = no logging, 1 = cache hits, 2 = detail */
- val logCompressLevel = 1
-
- // methods of NodeFactory
-
- /** logged version of makeNode method */
- override def makeNode(pre: String, label: String, attrSeq: MetaData,
- scope: NamespaceBinding, children: Seq[Node]): A = {
- if (logNode)
- log("[makeNode for "+label+"]")
-
- val hash = Utility.hashCode(pre, label, attrSeq.##, scope.##, children)
-
- /*
- if(logCompressLevel >= FULL) {
- log("[hashcode total:"+hash);
- log(" elem name "+uname+" hash "+ ? ));
- log(" attrs "+attrSeq+" hash "+attrSeq.hashCode());
- log(" children :"+children+" hash "+children.hashCode());
- }
- */
- if (!cache.get( hash ).isEmpty && (logCompressLevel >= CACHE))
- log("[cache hit !]")
-
- super.makeNode(pre, label, attrSeq, scope, children)
- }
-
- override def makeText(s: String) = {
- if (logText)
- log("[makeText:\""+s+"\"]")
- super.makeText(s)
- }
-
- override def makeComment(s: String): Seq[Comment] = {
- if (logComment)
- log("[makeComment:\""+s+"\"]")
- super.makeComment(s)
- }
-
- override def makeProcInstr(t: String, s: String): Seq[ProcInstr] = {
- if (logProcInstr)
- log("[makeProcInstr:\""+t+" "+ s+"\"]")
- super.makeProcInstr(t, s)
- }
-
- @deprecated("This method and its usages will be removed. Use a debugger to debug code.", "2.11")
- def log(msg: String): Unit = {}
-}
diff --git a/src/xml/scala/xml/factory/NodeFactory.scala b/src/xml/scala/xml/factory/NodeFactory.scala
deleted file mode 100644
index 94801bb554..0000000000
--- a/src/xml/scala/xml/factory/NodeFactory.scala
+++ /dev/null
@@ -1,61 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-package scala
-package xml
-package factory
-
-import parsing.{ FactoryAdapter, NoBindingFactoryAdapter }
-import java.io.{ InputStream, Reader, StringReader, File, FileDescriptor, FileInputStream }
-
-trait NodeFactory[A <: Node] {
- val ignoreComments = false
- val ignoreProcInstr = false
-
- /* default behaviour is to use hash-consing */
- val cache = new scala.collection.mutable.HashMap[Int, List[A]]
-
- protected def create(pre: String, name: String, attrs: MetaData, scope: NamespaceBinding, children:Seq[Node]): A
-
- protected def construct(hash: Int, old:List[A], pre: String, name: String, attrSeq:MetaData, scope: NamespaceBinding, children:Seq[Node]): A = {
- val el = create(pre, name, attrSeq, scope, children)
- cache.update(hash, el :: old)
- el
- }
-
- def eqElements(ch1: Seq[Node], ch2: Seq[Node]): Boolean =
- ch1.view.zipAll(ch2.view, null, null) forall { case (x,y) => x eq y }
-
- def nodeEquals(n: Node, pre: String, name: String, attrSeq:MetaData, scope: NamespaceBinding, children: Seq[Node]) =
- n.prefix == pre &&
- n.label == name &&
- n.attributes == attrSeq &&
- // scope?
- eqElements(n.child, children)
-
- def makeNode(pre: String, name: String, attrSeq: MetaData, scope: NamespaceBinding, children: Seq[Node]): A = {
- val hash = Utility.hashCode( pre, name, attrSeq.##, scope.##, children)
- def cons(old: List[A]) = construct(hash, old, pre, name, attrSeq, scope, children)
-
- (cache get hash) match {
- case Some(list) => // find structurally equal
- list.find(nodeEquals(_, pre, name, attrSeq, scope, children)) match {
- case Some(x) => x
- case _ => cons(list)
- }
- case None => cons(Nil)
- }
- }
-
- def makeText(s: String) = Text(s)
- def makeComment(s: String): Seq[Comment] =
- if (ignoreComments) Nil else List(Comment(s))
- def makeProcInstr(t: String, s: String): Seq[ProcInstr] =
- if (ignoreProcInstr) Nil else List(ProcInstr(t, s))
-}
diff --git a/src/xml/scala/xml/factory/XMLLoader.scala b/src/xml/scala/xml/factory/XMLLoader.scala
deleted file mode 100644
index b69f187039..0000000000
--- a/src/xml/scala/xml/factory/XMLLoader.scala
+++ /dev/null
@@ -1,61 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-package scala
-package xml
-package factory
-
-import javax.xml.parsers.SAXParserFactory
-import parsing.{ FactoryAdapter, NoBindingFactoryAdapter }
-import java.io.{ InputStream, Reader, File, FileDescriptor }
-import java.net.URL
-
-/** Presents collection of XML loading methods which use the parser
- * created by "def parser".
- */
-trait XMLLoader[T <: Node]
-{
- import scala.xml.Source._
- def adapter: FactoryAdapter = new NoBindingFactoryAdapter()
-
- /* Override this to use a different SAXParser. */
- def parser: SAXParser = {
- val f = SAXParserFactory.newInstance()
- f.setNamespaceAware(false)
- f.newSAXParser()
- }
-
- /** Loads XML from the given InputSource, using the supplied parser.
- * The methods available in scala.xml.XML use the XML parser in the JDK.
- */
- def loadXML(source: InputSource, parser: SAXParser): T = {
- val newAdapter = adapter
-
- newAdapter.scopeStack push TopScope
- parser.parse(source, newAdapter)
- newAdapter.scopeStack.pop()
-
- newAdapter.rootElem.asInstanceOf[T]
- }
-
- /** Loads XML from the given file, file descriptor, or filename. */
- def loadFile(file: File): T = loadXML(fromFile(file), parser)
- def loadFile(fd: FileDescriptor): T = loadXML(fromFile(fd), parser)
- def loadFile(name: String): T = loadXML(fromFile(name), parser)
-
- /** loads XML from given InputStream, Reader, sysID, InputSource, or URL. */
- def load(is: InputStream): T = loadXML(fromInputStream(is), parser)
- def load(reader: Reader): T = loadXML(fromReader(reader), parser)
- def load(sysID: String): T = loadXML(fromSysId(sysID), parser)
- def load(source: InputSource): T = loadXML(source, parser)
- def load(url: URL): T = loadXML(fromInputStream(url.openStream()), parser)
-
- /** Loads XML from the given String. */
- def loadString(string: String): T = loadXML(fromString(string), parser)
-}
diff --git a/src/xml/scala/xml/include/CircularIncludeException.scala b/src/xml/scala/xml/include/CircularIncludeException.scala
deleted file mode 100644
index 351f403008..0000000000
--- a/src/xml/scala/xml/include/CircularIncludeException.scala
+++ /dev/null
@@ -1,25 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala
-package xml
-package include
-
-/**
- * A `CircularIncludeException` is thrown when an included document attempts
- * to include itself or one of its ancestor documents.
- */
-class CircularIncludeException(message: String) extends XIncludeException {
-
- /**
- * Constructs a `CircularIncludeException` with `'''null'''`.
- * as its error detail message.
- */
- def this() = this(null)
-
-}
diff --git a/src/xml/scala/xml/include/UnavailableResourceException.scala b/src/xml/scala/xml/include/UnavailableResourceException.scala
deleted file mode 100644
index 47b176e0f3..0000000000
--- a/src/xml/scala/xml/include/UnavailableResourceException.scala
+++ /dev/null
@@ -1,20 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala
-package xml
-package include
-
-/**
- * An `UnavailableResourceException` is thrown when an included document
- * cannot be found or loaded.
- */
-class UnavailableResourceException(message: String)
-extends XIncludeException(message) {
- def this() = this(null)
-}
diff --git a/src/xml/scala/xml/include/XIncludeException.scala b/src/xml/scala/xml/include/XIncludeException.scala
deleted file mode 100644
index 11e1644d83..0000000000
--- a/src/xml/scala/xml/include/XIncludeException.scala
+++ /dev/null
@@ -1,58 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala
-package xml
-package include
-
-/**
- * `XIncludeException` is the generic superclass for all checked exceptions
- * that may be thrown as a result of a violation of XInclude's rules.
- *
- * Constructs an `XIncludeException` with the specified detail message.
- * The error message string `message` can later be retrieved by the
- * `{@link java.lang.Throwable#getMessage}`
- * method of class `java.lang.Throwable`.
- *
- * @param message the detail message.
- */
-class XIncludeException(message: String) extends Exception(message) {
-
- /**
- * uses `'''null'''` as its error detail message.
- */
- def this() = this(null)
-
- private var rootCause: Throwable = null
-
- /**
- * When an `IOException`, `MalformedURLException` or other generic
- * exception is thrown while processing an XML document for XIncludes,
- * it is customarily replaced by some form of `XIncludeException`.
- * This method allows you to store the original exception.
- *
- * @param nestedException the underlying exception which
- * caused the XIncludeException to be thrown
- */
- def setRootCause(nestedException: Throwable ) {
- this.rootCause = nestedException
- }
-
- /**
- * When an `IOException`, `MalformedURLException` or other generic
- * exception is thrown while processing an XML document for XIncludes,
- * it is customarily replaced by some form of `XIncludeException`.
- * This method allows you to retrieve the original exception.
- * It returns null if no such exception caused this `XIncludeException`.
- *
- * @return Throwable the underlying exception which caused the
- * `XIncludeException` to be thrown
- */
- def getRootCause(): Throwable = this.rootCause
-
-}
diff --git a/src/xml/scala/xml/include/sax/EncodingHeuristics.scala b/src/xml/scala/xml/include/sax/EncodingHeuristics.scala
deleted file mode 100644
index 57ab5ed91c..0000000000
--- a/src/xml/scala/xml/include/sax/EncodingHeuristics.scala
+++ /dev/null
@@ -1,98 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala
-package xml
-package include.sax
-
-import java.io.InputStream
-import scala.util.matching.Regex
-
-/** `EncodingHeuristics` reads from a stream
- * (which should be buffered) and attempts to guess
- * what the encoding of the text in the stream is.
- * If it fails to determine the type of the encoding,
- * it returns the default UTF-8.
- *
- * @author Burak Emir
- * @author Paul Phillips
- */
-object EncodingHeuristics
-{
- object EncodingNames {
- // UCS-4 isn't yet implemented in java releases anyway...
- val bigUCS4 = "UCS-4"
- val littleUCS4 = "UCS-4"
- val unusualUCS4 = "UCS-4"
- val bigUTF16 = "UTF-16BE"
- val littleUTF16 = "UTF-16LE"
- val utf8 = "UTF-8"
- val default = utf8
- }
- import EncodingNames._
-
- /** This utility method attempts to determine the XML character encoding
- * by examining the input stream, as specified at
- * [[http://www.w3.org/TR/xml/#sec-guessing w3]].
- *
- * @param in `InputStream` to read from.
- * @throws IOException if the stream cannot be reset
- * @return the name of the encoding.
- */
- def readEncodingFromStream(in: InputStream): String = {
- var ret: String = null
- val bytesToRead = 1024 // enough to read most XML encoding declarations
- def resetAndRet = { in.reset ; ret }
-
- // This may fail if there are a lot of space characters before the end
- // of the encoding declaration
- in mark bytesToRead
- val bytes = (in.read, in.read, in.read, in.read)
-
- // first look for byte order mark
- ret = bytes match {
- case (0x00, 0x00, 0xFE, 0xFF) => bigUCS4
- case (0xFF, 0xFE, 0x00, 0x00) => littleUCS4
- case (0x00, 0x00, 0xFF, 0xFE) => unusualUCS4
- case (0xFE, 0xFF, 0x00, 0x00) => unusualUCS4
- case (0xFE, 0xFF, _ , _ ) => bigUTF16
- case (0xFF, 0xFE, _ , _ ) => littleUTF16
- case (0xEF, 0xBB, 0xBF, _ ) => utf8
- case _ => null
- }
- if (ret != null)
- return resetAndRet
-
- def readASCIIEncoding: String = {
- val data = new Array[Byte](bytesToRead - 4)
- val length = in.read(data, 0, bytesToRead - 4)
-
- // Use Latin-1 (ISO-8859-1) because all byte sequences are legal.
- val declaration = new String(data, 0, length, "ISO-8859-1")
- val regexp = """(?m).*?encoding\s*=\s*["'](.+?)['"]""".r
- (regexp findFirstMatchIn declaration) match {
- case None => default
- case Some(md) => md.subgroups(0)
- }
- }
-
- // no byte order mark present; first character must be '<' or whitespace
- ret = bytes match {
- case (0x00, 0x00, 0x00, '<' ) => bigUCS4
- case ('<' , 0x00, 0x00, 0x00) => littleUCS4
- case (0x00, 0x00, '<' , 0x00) => unusualUCS4
- case (0x00, '<' , 0x00, 0x00) => unusualUCS4
- case (0x00, '<' , 0x00, '?' ) => bigUTF16 // XXX must read encoding
- case ('<' , 0x00, '?' , 0x00) => littleUTF16 // XXX must read encoding
- case ('<' , '?' , 'x' , 'm' ) => readASCIIEncoding
- case (0x4C, 0x6F, 0xA7, 0x94) => utf8 // XXX EBCDIC
- case _ => utf8 // no XML or text declaration present
- }
- resetAndRet
- }
-}
diff --git a/src/xml/scala/xml/include/sax/XIncludeFilter.scala b/src/xml/scala/xml/include/sax/XIncludeFilter.scala
deleted file mode 100644
index 3fa3beefb0..0000000000
--- a/src/xml/scala/xml/include/sax/XIncludeFilter.scala
+++ /dev/null
@@ -1,373 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala
-package xml
-package include.sax
-
-import scala.xml.include._
-
-import org.xml.sax.{ Attributes, XMLReader, Locator }
-import org.xml.sax.helpers.{ XMLReaderFactory, XMLFilterImpl, NamespaceSupport, AttributesImpl }
-
-import java.io.{ InputStream, BufferedInputStream, InputStreamReader, IOException, UnsupportedEncodingException }
-import java.util.Stack
-import java.net.{ URL, MalformedURLException }
-
-/** This is a SAX filter which resolves all XInclude include elements before
- * passing them on to the client application. Currently this class has the
- * following known deviation from the XInclude specification:
- *
- * 1. XPointer is not supported.
- *
- * Furthermore, I would definitely use a new instance of this class for each
- * document you want to process. I doubt it can be used successfully on
- * multiple documents. Furthermore, I can virtually guarantee that this
- * class is not thread safe. You have been warned.
- *
- * Since this class is not designed to be subclassed, and since I have not
- * yet considered how that might affect the methods herein or what other
- * protected methods might be needed to support subclasses, I have declared
- * this class final. I may remove this restriction later, though the use-case
- * for subclassing is weak. This class is designed to have its functionality
- * extended via a horizontal chain of filters, not a vertical hierarchy of
- * sub and superclasses.
- *
- * To use this class:
- *
- * - Construct an `XIncludeFilter` object with a known base URL
- * - Pass the `XMLReader` object from which the raw document will be read to
- * the `setParent()` method of this object.
- * - Pass your own `ContentHandler` object to the `setContentHandler()`
- * method of this object. This is the object which will receive events
- * from the parsed and included document.
- * - Optional: if you wish to receive comments, set your own `LexicalHandler`
- * object as the value of this object's
- * `http://xml.org/sax/properties/lexical-handler` property.
- * Also make sure your `LexicalHandler` asks this object for the status of
- * each comment using `insideIncludeElement` before doing anything with the
- * comment.
- * - Pass the URL of the document to read to this object's `parse()` method
- *
- * e.g.
- * {{{
- * val includer = new XIncludeFilter(base)
- * includer setParent parser
- * includer setContentHandler new SAXXIncluder(System.out)
- * includer parse args(i)
- * }}}
- * translated from Elliotte Rusty Harold's Java source.
- *
- * @author Burak Emir
- */
-class XIncludeFilter extends XMLFilterImpl {
-
- final val XINCLUDE_NAMESPACE = "http://www.w3.org/2001/XInclude"
-
- private val bases = new Stack[URL]()
- private val locators = new Stack[Locator]()
-
-/* private EntityResolver resolver;
-
- public XIncludeFilter() {
- this(null);
- }
-
- public XIncludeFilter(EntityResolver resolver) {
- this.resolver = resolver;
- } */
-
-
- // what if this isn't called????
- // do I need to check this in startDocument() and push something
- // there????
- override def setDocumentLocator(locator: Locator) {
- locators push locator
- val base = locator.getSystemId()
- try {
- bases.push(new URL(base))
- }
- catch {
- case e:MalformedURLException =>
- throw new UnsupportedOperationException("Unrecognized SYSTEM ID: " + base)
- }
- super.setDocumentLocator(locator)
- }
-
-
- // necessary to throw away contents of non-empty XInclude elements
- private var level = 0
-
- /** This utility method returns true if and only if this reader is
- * currently inside a non-empty include element. (This is '''not''' the
- * same as being inside the node set which replaces the include element.)
- * This is primarily needed for comments inside include elements.
- * It must be checked by the actual `LexicalHandler` to see whether
- * a comment is passed or not.
- *
- * @return boolean
- */
- def insideIncludeElement(): Boolean = level != 0
-
- override def startElement(uri: String, localName: String, qName: String, atts1: Attributes) {
- var atts = atts1
- if (level == 0) { // We're not inside an xi:include element
-
- // Adjust bases stack by pushing either the new
- // value of xml:base or the base of the parent
- val base = atts.getValue(NamespaceSupport.XMLNS, "base")
- val parentBase = bases.peek().asInstanceOf[URL]
- var currentBase = parentBase
- if (base != null) {
- try {
- currentBase = new URL(parentBase, base)
- }
- catch {
- case e: MalformedURLException =>
- throw new SAXException("Malformed base URL: "
- + currentBase, e)
- }
- }
- bases push currentBase
-
- if (uri.equals(XINCLUDE_NAMESPACE) && localName.equals("include")) {
- // include external document
- val href = atts.getValue("href")
- // Verify that there is an href attribute
- if (href == null) {
- throw new SAXException("Missing href attribute")
- }
-
- var parse = atts getValue "parse"
- if (parse == null) parse = "xml"
-
- if (parse equals "text") {
- val encoding = atts getValue "encoding"
- includeTextDocument(href, encoding)
- }
- else if (parse equals "xml") {
- includeXMLDocument(href)
- }
- // Need to check this also in DOM and JDOM????
- else {
- throw new SAXException(
- "Illegal value for parse attribute: " + parse)
- }
- level += 1
- }
- else {
- if (atRoot) {
- // add xml:base attribute if necessary
- val attsImpl = new AttributesImpl(atts)
- attsImpl.addAttribute(NamespaceSupport.XMLNS, "base",
- "xml:base", "CDATA", currentBase.toExternalForm())
- atts = attsImpl
- atRoot = false
- }
- super.startElement(uri, localName, qName, atts)
- }
- }
- }
-
- override def endElement(uri: String, localName: String, qName: String) {
- if (uri.equals(XINCLUDE_NAMESPACE)
- && localName.equals("include")) {
- level -= 1
- }
- else if (level == 0) {
- bases.pop()
- super.endElement(uri, localName, qName)
- }
- }
-
- private var depth = 0
-
- override def startDocument() {
- level = 0
- if (depth == 0) super.startDocument()
- depth += 1
- }
-
- override def endDocument() {
- locators.pop()
- bases.pop() // pop the URL for the document itself
- depth -= 1
- if (depth == 0) super.endDocument()
- }
-
- // how do prefix mappings move across documents????
- override def startPrefixMapping(prefix: String , uri: String) {
- if (level == 0) super.startPrefixMapping(prefix, uri)
- }
-
- override def endPrefixMapping(prefix: String) {
- if (level == 0) super.endPrefixMapping(prefix)
- }
-
- override def characters(ch: Array[Char], start: Int, length: Int) {
- if (level == 0) super.characters(ch, start, length)
- }
-
- override def ignorableWhitespace(ch: Array[Char], start: Int, length: Int) {
- if (level == 0) super.ignorableWhitespace(ch, start, length)
- }
-
- override def processingInstruction(target: String, data: String) {
- if (level == 0) super.processingInstruction(target, data)
- }
-
- override def skippedEntity(name: String) {
- if (level == 0) super.skippedEntity(name)
- }
-
- // convenience method for error messages
- private def getLocation(): String = {
- var locationString = ""
- val locator = locators.peek().asInstanceOf[Locator]
- var publicID = ""
- var systemID = ""
- var column = -1
- var line = -1
- if (locator != null) {
- publicID = locator.getPublicId()
- systemID = locator.getSystemId()
- line = locator.getLineNumber()
- column = locator.getColumnNumber()
- }
- locationString = (" in document included from " + publicID
- + " at " + systemID
- + " at line " + line + ", column " + column)
-
- locationString
- }
-
- /** This utility method reads a document at a specified URL and fires off
- * calls to `characters()`. It's used to include files with `parse="text"`.
- *
- * @param url URL of the document that will be read
- * @param encoding1 Encoding of the document; e.g. UTF-8,
- * ISO-8859-1, etc.
- * @return void
- * @throws SAXException if the requested document cannot
- be downloaded from the specified URL
- or if the encoding is not recognized
- */
- private def includeTextDocument(url: String, encoding1: String) {
- var encoding = encoding1
- if (encoding == null || encoding.trim().equals("")) encoding = "UTF-8"
- var source: URL = null
- try {
- val base = bases.peek().asInstanceOf[URL]
- source = new URL(base, url)
- }
- catch {
- case e: MalformedURLException =>
- val ex = new UnavailableResourceException("Unresolvable URL " + url
- + getLocation())
- ex.setRootCause(e)
- throw new SAXException("Unresolvable URL " + url + getLocation(), ex)
- }
-
- try {
- val uc = source.openConnection()
- val in = new BufferedInputStream(uc.getInputStream())
- val encodingFromHeader = uc.getContentEncoding()
- var contentType = uc.getContentType()
- if (encodingFromHeader != null)
- encoding = encodingFromHeader
- else {
- // What if file does not have a MIME type but name ends in .xml????
- // MIME types are case-insensitive
- // Java may be picking this up from file URL
- if (contentType != null) {
- contentType = contentType.toLowerCase()
- if (contentType.equals("text/xml")
- || contentType.equals("application/xml")
- || (contentType.startsWith("text/") && contentType.endsWith("+xml") )
- || (contentType.startsWith("application/") && contentType.endsWith("+xml"))) {
- encoding = EncodingHeuristics.readEncodingFromStream(in)
- }
- }
- }
- val reader = new InputStreamReader(in, encoding)
- val c = new Array[Char](1024)
- var charsRead: Int = 0 // bogus init value
- do {
- charsRead = reader.read(c, 0, 1024)
- if (charsRead > 0) this.characters(c, 0, charsRead)
- } while (charsRead != -1)
- }
- catch {
- case e: UnsupportedEncodingException =>
- throw new SAXException("Unsupported encoding: "
- + encoding + getLocation(), e)
- case e: IOException =>
- throw new SAXException("Document not found: "
- + source.toExternalForm() + getLocation(), e)
- }
-
- }
-
- private var atRoot = false
-
- /** This utility method reads a document at a specified URL
- * and fires off calls to various `ContentHandler` methods.
- * It's used to include files with `parse="xml"`.
- *
- * @param url URL of the document that will be read
- * @return void
- * @throws SAXException if the requested document cannot
- be downloaded from the specified URL.
- */
- private def includeXMLDocument(url: String) {
- val source =
- try new URL(bases.peek(), url)
- catch {
- case e: MalformedURLException =>
- val ex = new UnavailableResourceException("Unresolvable URL " + url + getLocation())
- ex setRootCause e
- throw new SAXException("Unresolvable URL " + url + getLocation(), ex)
- }
-
- try {
- val parser: XMLReader =
- try XMLReaderFactory.createXMLReader()
- catch {
- case e: SAXException =>
- try XMLReaderFactory.createXMLReader(XercesClassName)
- catch { case _: SAXException => return System.err.println("Could not find an XML parser") }
- }
-
- parser setContentHandler this
- val resolver = this.getEntityResolver()
- if (resolver != null)
- parser setEntityResolver resolver
-
- // save old level and base
- val previousLevel = level
- this.level = 0
- if (bases contains source)
- throw new SAXException(
- "Circular XInclude Reference",
- new CircularIncludeException("Circular XInclude Reference to " + source + getLocation())
- )
-
- bases push source
- atRoot = true
- parser parse source.toExternalForm()
-
- // restore old level and base
- this.level = previousLevel
- bases.pop()
- }
- catch {
- case e: IOException =>
- throw new SAXException("Document not found: " + source.toExternalForm() + getLocation(), e)
- }
- }
-}
diff --git a/src/xml/scala/xml/include/sax/XIncluder.scala b/src/xml/scala/xml/include/sax/XIncluder.scala
deleted file mode 100644
index 1939fa1875..0000000000
--- a/src/xml/scala/xml/include/sax/XIncluder.scala
+++ /dev/null
@@ -1,187 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala
-package xml
-package include.sax
-
-import scala.collection.mutable
-import org.xml.sax.{ ContentHandler, XMLReader, Locator, Attributes }
-import org.xml.sax.ext.LexicalHandler
-import java.io.{ File, OutputStream, OutputStreamWriter, Writer, IOException }
-
-/** XIncluder is a SAX `ContentHandler` that writes its XML document onto
- * an output stream after resolving all `xinclude:include` elements.
- *
- * Based on Eliotte Rusty Harold's SAXXIncluder.
- */
-class XIncluder(outs: OutputStream, encoding: String) extends ContentHandler with LexicalHandler {
-
- var out = new OutputStreamWriter(outs, encoding)
-
- def setDocumentLocator(locator: Locator) {}
-
- def startDocument() {
- try {
- out.write("<?xml version='1.0' encoding='"
- + encoding + "'?>\r\n")
- }
- catch {
- case e:IOException =>
- throw new SAXException("Write failed", e)
- }
- }
-
- def endDocument() {
- try {
- out.flush()
- }
- catch {
- case e:IOException =>
- throw new SAXException("Flush failed", e)
- }
- }
-
- def startPrefixMapping(prefix: String , uri: String) {}
-
- def endPrefixMapping(prefix: String) {}
-
- def startElement(namespaceURI: String, localName: String, qualifiedName: String, atts: Attributes) = {
- try {
- out.write("<" + qualifiedName)
- var i = 0; while (i < atts.getLength()) {
- out.write(" ")
- out.write(atts.getQName(i))
- out.write("='")
- val value = atts.getValue(i)
- // @todo Need to use character references if the encoding
- // can't support the character
- out.write(scala.xml.Utility.escape(value))
- out.write("'")
- i += 1
- }
- out.write(">")
- }
- catch {
- case e:IOException =>
- throw new SAXException("Write failed", e)
- }
- }
-
- def endElement(namespaceURI: String, localName:String, qualifiedName: String) {
- try {
- out.write("</" + qualifiedName + ">")
- }
- catch {
- case e: IOException =>
- throw new SAXException("Write failed", e)
- }
- }
-
- // need to escape characters that are not in the given
- // encoding using character references????
- def characters(ch: Array[Char], start: Int, length: Int) {
- try {
- var i = 0; while (i < length) {
- val c = ch(start+i)
- if (c == '&') out.write("&amp;")
- else if (c == '<') out.write("&lt;")
- // This next fix is normally not necessary.
- // However, it is required if text contains ]]>
- // (The end CDATA section delimiter)
- else if (c == '>') out.write("&gt;")
- else out.write(c.toInt)
- i += 1
- }
- }
- catch {
- case e: IOException =>
- throw new SAXException("Write failed", e)
- }
- }
-
- def ignorableWhitespace(ch: Array[Char], start: Int , length: Int) {
- this.characters(ch, start, length)
- }
-
- // do I need to escape text in PI????
- def processingInstruction(target: String, data: String) {
- try {
- out.write("<?" + target + " " + data + "?>")
- }
- catch {
- case e:IOException =>
- throw new SAXException("Write failed", e)
- }
- }
-
- def skippedEntity(name: String) {
- try {
- out.write("&" + name + ";")
- }
- catch {
- case e:IOException =>
- throw new SAXException("Write failed", e)
- }
- }
-
- // LexicalHandler methods
- private var inDTD: Boolean = false
- private val entities = new mutable.Stack[String]()
-
- def startDTD(name: String, publicID: String, systemID: String) {
- inDTD = true
- // if this is the source document, output a DOCTYPE declaration
- if (entities.isEmpty) {
- var id = ""
- if (publicID != null) id = " PUBLIC \"" + publicID + "\" \"" + systemID + '"'
- else if (systemID != null) id = " SYSTEM \"" + systemID + '"'
- try {
- out.write("<!DOCTYPE " + name + id + ">\r\n")
- }
- catch {
- case e:IOException =>
- throw new SAXException("Error while writing DOCTYPE", e)
- }
- }
- }
- def endDTD() {}
-
- def startEntity(name: String) {
- entities push name
- }
-
- def endEntity(name: String) {
- entities.pop()
- }
-
- def startCDATA() {}
- def endCDATA() {}
-
- // Just need this reference so we can ask if a comment is
- // inside an include element or not
- private var filter: XIncludeFilter = null
-
- def setFilter(filter: XIncludeFilter) {
- this.filter = filter
- }
-
- def comment(ch: Array[Char], start: Int, length: Int) {
- if (!inDTD && !filter.insideIncludeElement()) {
- try {
- out.write("<!--")
- out.write(ch, start, length)
- out.write("-->")
- }
- catch {
- case e: IOException =>
- throw new SAXException("Write failed", e)
- }
- }
- }
-}
diff --git a/src/xml/scala/xml/package.scala b/src/xml/scala/xml/package.scala
deleted file mode 100644
index 4001cc5ffb..0000000000
--- a/src/xml/scala/xml/package.scala
+++ /dev/null
@@ -1,19 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala
-
-package object xml {
- val XercesClassName = "org.apache.xerces.parsers.SAXParser"
-
- type SAXException = org.xml.sax.SAXException
- type SAXParseException = org.xml.sax.SAXParseException
- type EntityResolver = org.xml.sax.EntityResolver
- type InputSource = org.xml.sax.InputSource
- type SAXParser = javax.xml.parsers.SAXParser
-}
diff --git a/src/xml/scala/xml/parsing/ConstructingHandler.scala b/src/xml/scala/xml/parsing/ConstructingHandler.scala
deleted file mode 100755
index ba416e4301..0000000000
--- a/src/xml/scala/xml/parsing/ConstructingHandler.scala
+++ /dev/null
@@ -1,34 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala
-package xml
-package parsing
-
-/** Implementation of MarkupHandler that constructs nodes.
- *
- * @author Burak Emir
- * @version 1.0
- */
-abstract class ConstructingHandler extends MarkupHandler
-{
- val preserveWS: Boolean
-
- def elem(pos: Int, pre: String, label: String, attrs: MetaData,
- pscope: NamespaceBinding, empty: Boolean, nodes: NodeSeq): NodeSeq =
- Elem(pre, label, attrs, pscope, empty, nodes:_*)
-
- def procInstr(pos: Int, target: String, txt: String) =
- ProcInstr(target, txt)
-
- def comment(pos: Int, txt: String) = Comment(txt)
- def entityRef(pos: Int, n: String) = EntityRef(n)
- def text(pos: Int, txt: String) = Text(txt)
-}
diff --git a/src/xml/scala/xml/parsing/ConstructingParser.scala b/src/xml/scala/xml/parsing/ConstructingParser.scala
deleted file mode 100644
index 3caeddabf4..0000000000
--- a/src/xml/scala/xml/parsing/ConstructingParser.scala
+++ /dev/null
@@ -1,55 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala
-package xml
-package parsing
-
-import java.io.File
-import scala.io.Source
-
-object ConstructingParser {
- def fromFile(inp: File, preserveWS: Boolean) =
- new ConstructingParser(Source.fromFile(inp), preserveWS).initialize
-
- def fromSource(inp: Source, preserveWS: Boolean) =
- new ConstructingParser(inp, preserveWS).initialize
-}
-
-/** An xml parser. parses XML and invokes callback methods of a MarkupHandler.
- * Don't forget to call next.ch on a freshly instantiated parser in order to
- * initialize it. If you get the parser from the object method, initialization
- * is already done for you.
- *
- * {{{
- * object parseFromURL {
- * def main(args: Array[String]) {
- * val url = args(0)
- * val src = scala.io.Source.fromURL(url)
- * val cpa = scala.xml.parsing.ConstructingParser.fromSource(src, false) // fromSource initializes automatically
- * val doc = cpa.document()
- *
- * // let's see what it is
- * val ppr = new scala.xml.PrettyPrinter(80, 5)
- * val ele = doc.docElem
- * println("finished parsing")
- * val out = ppr.format(ele)
- * println(out)
- * }
- * }
- * }}} */
-class ConstructingParser(val input: Source, val preserveWS: Boolean)
-extends ConstructingHandler
-with ExternalSources
-with MarkupParser {
-
- // default impl. of Logged
- override def log(msg: String): Unit = {}
-}
diff --git a/src/xml/scala/xml/parsing/DefaultMarkupHandler.scala b/src/xml/scala/xml/parsing/DefaultMarkupHandler.scala
deleted file mode 100755
index 6ec7474843..0000000000
--- a/src/xml/scala/xml/parsing/DefaultMarkupHandler.scala
+++ /dev/null
@@ -1,30 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala
-package xml
-package parsing
-
-
-/** Default implementation of markup handler always returns `NodeSeq.Empty` */
-abstract class DefaultMarkupHandler extends MarkupHandler {
-
- def elem(pos: Int, pre: String, label: String, attrs: MetaData,
- scope:NamespaceBinding, empty: Boolean, args: NodeSeq) = NodeSeq.Empty
-
- def procInstr(pos: Int, target: String, txt: String) = NodeSeq.Empty
-
- def comment(pos: Int, comment: String ): NodeSeq = NodeSeq.Empty
-
- def entityRef(pos: Int, n: String) = NodeSeq.Empty
-
- def text(pos: Int, txt:String) = NodeSeq.Empty
-
-}
diff --git a/src/xml/scala/xml/parsing/ExternalSources.scala b/src/xml/scala/xml/parsing/ExternalSources.scala
deleted file mode 100644
index bb939bca95..0000000000
--- a/src/xml/scala/xml/parsing/ExternalSources.scala
+++ /dev/null
@@ -1,38 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala
-package xml
-package parsing
-
-import java.net.URL
-import java.io.File.separator
-
-import scala.io.Source
-
-/**
- * @author Burak Emir
- * @version 1.0
- */
-trait ExternalSources {
- self: ExternalSources with MarkupParser with MarkupHandler =>
-
- def externalSource(systemId: String): Source = {
- if (systemId startsWith "http:")
- return Source fromURL new URL(systemId)
-
- val fileStr: String = input.descr match {
- case x if x startsWith "file:" => x drop 5
- case x => x take ((x lastIndexOf separator) + 1)
- }
-
- Source.fromFile(fileStr + systemId)
- }
-}
diff --git a/src/xml/scala/xml/parsing/FactoryAdapter.scala b/src/xml/scala/xml/parsing/FactoryAdapter.scala
deleted file mode 100644
index 2154bdf5ba..0000000000
--- a/src/xml/scala/xml/parsing/FactoryAdapter.scala
+++ /dev/null
@@ -1,187 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala
-package xml
-package parsing
-
-import java.io.{ InputStream, Reader, File, FileDescriptor, FileInputStream }
-import scala.collection.{ mutable, Iterator }
-import org.xml.sax.Attributes
-import org.xml.sax.helpers.DefaultHandler
-
-// can be mixed into FactoryAdapter if desired
-trait ConsoleErrorHandler extends DefaultHandler {
- // ignore warning, crimson warns even for entity resolution!
- override def warning(ex: SAXParseException): Unit = { }
- override def error(ex: SAXParseException): Unit = printError("Error", ex)
- override def fatalError(ex: SAXParseException): Unit = printError("Fatal Error", ex)
-
- protected def printError(errtype: String, ex: SAXParseException): Unit =
- Console.withOut(Console.err) {
- val s = "[%s]:%d:%d: %s".format(
- errtype, ex.getLineNumber, ex.getColumnNumber, ex.getMessage)
- Console.println(s)
- Console.flush()
- }
-}
-
-/** SAX adapter class, for use with Java SAX parser. Keeps track of
- * namespace bindings, without relying on namespace handling of the
- * underlying SAX parser.
- */
-abstract class FactoryAdapter extends DefaultHandler with factory.XMLLoader[Node] {
- var rootElem: Node = null
-
- val buffer = new StringBuilder()
- val attribStack = new mutable.Stack[MetaData]
- val hStack = new mutable.Stack[Node] // [ element ] contains siblings
- val tagStack = new mutable.Stack[String]
- var scopeStack = new mutable.Stack[NamespaceBinding]
-
- var curTag : String = null
- var capture: Boolean = false
-
- // abstract methods
-
- /** Tests if an XML element contains text.
- * @return true if element named `localName` contains text.
- */
- def nodeContainsText(localName: String): Boolean // abstract
-
- /** creates an new non-text(tree) node.
- * @param elemName
- * @param attribs
- * @param chIter
- * @return a new XML element.
- */
- def createNode(pre: String, elemName: String, attribs: MetaData,
- scope: NamespaceBinding, chIter: List[Node]): Node // abstract
-
- /** creates a Text node.
- * @param text
- * @return a new Text node.
- */
- def createText(text: String): Text // abstract
-
- /** creates a new processing instruction node.
- */
- def createProcInstr(target: String, data: String): Seq[ProcInstr]
-
- //
- // ContentHandler methods
- //
-
- val normalizeWhitespace = false
-
- /** Characters.
- * @param ch
- * @param offset
- * @param length
- */
- override def characters(ch: Array[Char], offset: Int, length: Int): Unit = {
- if (!capture) return
- // compliant: report every character
- else if (!normalizeWhitespace) buffer.appendAll(ch, offset, length)
- // normalizing whitespace is not compliant, but useful
- else {
- var it = ch.slice(offset, offset + length).iterator
- while (it.hasNext) {
- val c = it.next()
- val isSpace = c.isWhitespace
- buffer append (if (isSpace) ' ' else c)
- if (isSpace)
- it = it dropWhile (_.isWhitespace)
- }
- }
- }
-
- private def splitName(s: String) = {
- val idx = s indexOf ':'
- if (idx < 0) (null, s)
- else (s take idx, s drop (idx + 1))
- }
-
- /* ContentHandler methods */
-
- /* Start element. */
- override def startElement(
- uri: String,
- _localName: String,
- qname: String,
- attributes: Attributes): Unit =
- {
- captureText()
- tagStack push curTag
- curTag = qname
-
- val localName = splitName(qname)._2
- capture = nodeContainsText(localName)
-
- hStack push null
- var m: MetaData = Null
- var scpe: NamespaceBinding =
- if (scopeStack.isEmpty) TopScope
- else scopeStack.top
-
- for (i <- 0 until attributes.getLength()) {
- val qname = attributes getQName i
- val value = attributes getValue i
- val (pre, key) = splitName(qname)
- def nullIfEmpty(s: String) = if (s == "") null else s
-
- if (pre == "xmlns" || (pre == null && qname == "xmlns")) {
- val arg = if (pre == null) null else key
- scpe = new NamespaceBinding(arg, nullIfEmpty(value), scpe)
- }
- else
- m = Attribute(Option(pre), key, Text(value), m)
- }
-
- scopeStack push scpe
- attribStack push m
- }
-
-
- /** captures text, possibly normalizing whitespace
- */
- def captureText(): Unit = {
- if (capture && buffer.length > 0)
- hStack push createText(buffer.toString)
-
- buffer.clear()
- }
-
- /** End element.
- * @param uri
- * @param _localName
- * @param qname
- * @throws org.xml.sax.SAXException if ..
- */
- override def endElement(uri: String , _localName: String, qname: String): Unit = {
- captureText()
- val metaData = attribStack.pop()
-
- // reverse order to get it right
- val v = (Iterator continually hStack.pop takeWhile (_ != null)).toList.reverse
- val (pre, localName) = splitName(qname)
- val scp = scopeStack.pop()
-
- // create element
- rootElem = createNode(pre, localName, metaData, scp, v)
- hStack push rootElem
- curTag = tagStack.pop()
- capture = curTag != null && nodeContainsText(curTag) // root level
- }
-
- /** Processing instruction.
- */
- override def processingInstruction(target: String, data: String) {
- hStack pushAll createProcInstr(target, data)
- }
-}
diff --git a/src/xml/scala/xml/parsing/FatalError.scala b/src/xml/scala/xml/parsing/FatalError.scala
deleted file mode 100644
index ab3cb2a74d..0000000000
--- a/src/xml/scala/xml/parsing/FatalError.scala
+++ /dev/null
@@ -1,17 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala
-package xml
-package parsing
-
-/** !!! This is poorly named, but I guess it's in the API.
- */
-case class FatalError(msg: String) extends java.lang.RuntimeException(msg)
diff --git a/src/xml/scala/xml/parsing/MarkupHandler.scala b/src/xml/scala/xml/parsing/MarkupHandler.scala
deleted file mode 100755
index 1ebffb9c90..0000000000
--- a/src/xml/scala/xml/parsing/MarkupHandler.scala
+++ /dev/null
@@ -1,127 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala
-package xml
-package parsing
-
-import scala.collection.mutable
-import scala.io.Source
-import scala.xml.dtd._
-
-/** class that handles markup - provides callback methods to MarkupParser.
- * the default is nonvalidating behaviour
- *
- * @author Burak Emir
- * @version 1.0
- *
- * @todo can we ignore more entity declarations (i.e. those with extIDs)?
- * @todo expanding entity references
- */
-abstract class MarkupHandler {
-
- /** returns true is this markup handler is validating */
- val isValidating: Boolean = false
-
- var decls: List[Decl] = Nil
- var ent: mutable.Map[String, EntityDecl] = new mutable.HashMap[String, EntityDecl]()
-
- def lookupElemDecl(Label: String): ElemDecl = {
- for (z @ ElemDecl(Label, _) <- decls)
- return z
-
- null
- }
-
- def replacementText(entityName: String): Source =
- Source fromString ((ent get entityName) match {
- case Some(ParsedEntityDecl(_, IntDef(value))) => value
- case Some(ParameterEntityDecl(_, IntDef(value))) => " %s " format value
- case Some(_) => "<!-- %s; -->" format entityName
- case None => "<!-- unknown entity %s; -->" format entityName
- })
-
- def endDTD(n: String): Unit = ()
-
- /** callback method invoked by MarkupParser after start-tag of element.
- *
- * @param pos the position in the sourcefile
- * @param pre the prefix
- * @param label the local name
- * @param attrs the attributes (metadata)
- */
- def elemStart(pos: Int, pre: String, label: String, attrs: MetaData, scope: NamespaceBinding): Unit = ()
-
- /** callback method invoked by MarkupParser after end-tag of element.
- *
- * @param pos the position in the source file
- * @param pre the prefix
- * @param label the local name
- */
- def elemEnd(pos: Int, pre: String, label: String): Unit = ()
-
- /** callback method invoked by MarkupParser after parsing an element,
- * between the elemStart and elemEnd callbacks
- *
- * @param pos the position in the source file
- * @param pre the prefix
- * @param label the local name
- * @param attrs the attributes (metadata)
- * @param empty `true` if the element was previously empty; `false` otherwise.
- * @param args the children of this element
- */
- def elem(pos: Int, pre: String, label: String, attrs: MetaData, scope: NamespaceBinding, empty: Boolean, args: NodeSeq): NodeSeq
-
- /** callback method invoked by MarkupParser after parsing PI.
- */
- def procInstr(pos: Int, target: String, txt: String): NodeSeq
-
- /** callback method invoked by MarkupParser after parsing comment.
- */
- def comment(pos: Int, comment: String): NodeSeq
-
- /** callback method invoked by MarkupParser after parsing entity ref.
- * @todo expanding entity references
- */
- def entityRef(pos: Int, n: String): NodeSeq
-
- /** callback method invoked by MarkupParser after parsing text.
- */
- def text(pos: Int, txt: String): NodeSeq
-
- // DTD handler methods
-
- def elemDecl(n: String, cmstr: String): Unit = ()
-
- def attListDecl(name: String, attList: List[AttrDecl]): Unit = ()
-
- private def someEntityDecl(name: String, edef: EntityDef, f: (String, EntityDef) => EntityDecl): Unit =
- edef match {
- case _: ExtDef if !isValidating => // ignore (cf REC-xml 4.4.1)
- case _ =>
- val y = f(name, edef)
- decls ::= y
- ent.update(name, y)
- }
-
- def parameterEntityDecl(name: String, edef: EntityDef): Unit =
- someEntityDecl(name, edef, ParameterEntityDecl.apply _)
-
- def parsedEntityDecl(name: String, edef: EntityDef): Unit =
- someEntityDecl(name, edef, ParsedEntityDecl.apply _)
-
- def peReference(name: String) { decls ::= PEReference(name) }
- def unparsedEntityDecl(name: String, extID: ExternalID, notat: String): Unit = ()
- def notationDecl(notat: String, extID: ExternalID): Unit = ()
- def reportSyntaxError(pos: Int, str: String): Unit
-
- @deprecated("This method and its usages will be removed. Use a debugger to debug code.", "2.11")
- def log(msg: String): Unit = {}
-}
diff --git a/src/xml/scala/xml/parsing/MarkupParser.scala b/src/xml/scala/xml/parsing/MarkupParser.scala
deleted file mode 100755
index 3bbd136b67..0000000000
--- a/src/xml/scala/xml/parsing/MarkupParser.scala
+++ /dev/null
@@ -1,938 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala
-package xml
-package parsing
-
-import scala.io.Source
-import scala.xml.dtd._
-import Utility.Escapes.{ pairs => unescape }
-
-/**
- * An XML parser.
- *
- * Parses XML 1.0, invokes callback methods of a `MarkupHandler` and returns
- * whatever the markup handler returns. Use `ConstructingParser` if you just
- * want to parse XML to construct instances of `scala.xml.Node`.
- *
- * While XML elements are returned, DTD declarations - if handled - are
- * collected using side-effects.
- *
- * @author Burak Emir
- * @version 1.0
- */
-trait MarkupParser extends MarkupParserCommon with TokenTests
-{
- self: MarkupParser with MarkupHandler =>
-
- type PositionType = Int
- type InputType = Source
- type ElementType = NodeSeq
- type AttributesType = (MetaData, NamespaceBinding)
- type NamespaceType = NamespaceBinding
-
- def truncatedError(msg: String): Nothing = throw FatalError(msg)
- def errorNoEnd(tag: String) = throw FatalError("expected closing tag of " + tag)
-
- def xHandleError(that: Char, msg: String) = reportSyntaxError(msg)
-
- val input: Source
-
- /** if true, does not remove surplus whitespace */
- val preserveWS: Boolean
-
- def externalSource(systemLiteral: String): Source
-
- //
- // variables, values
- //
-
- protected var curInput: Source = input
-
- // See ticket #3720 for motivations.
- private class WithLookAhead(underlying: Source) extends Source {
- private val queue = scala.collection.mutable.Queue[Char]()
- def lookahead(): BufferedIterator[Char] = {
- val iter = queue.iterator ++ new Iterator[Char] {
- def hasNext = underlying.hasNext
- def next() = { val x = underlying.next(); queue += x; x }
- }
- iter.buffered
- }
- val iter = new Iterator[Char] {
- def hasNext = underlying.hasNext || !queue.isEmpty
- def next() = if (!queue.isEmpty) queue.dequeue() else underlying.next()
- }
- }
-
- def lookahead(): BufferedIterator[Char] = curInput match {
- case curInputWLA:WithLookAhead =>
- curInputWLA.lookahead()
- case _ =>
- val newInput = new WithLookAhead(curInput)
- curInput = newInput
- newInput.lookahead()
- }
-
-
- /** the handler of the markup, returns this */
- private val handle: MarkupHandler = this
-
- /** stack of inputs */
- var inpStack: List[Source] = Nil
-
- /** holds the position in the source file */
- var pos: Int = _
-
- /* used when reading external subset */
- var extIndex = -1
-
- /** holds temporary values of pos */
- var tmppos: Int = _
-
- /** holds the next character */
- var nextChNeeded: Boolean = false
- var reachedEof: Boolean = false
- var lastChRead: Char = _
- def ch: Char = {
- if (nextChNeeded) {
- if (curInput.hasNext) {
- lastChRead = curInput.next()
- pos = curInput.pos
- } else {
- val ilen = inpStack.length
- //Console.println(" ilen = "+ilen+ " extIndex = "+extIndex);
- if ((ilen != extIndex) && (ilen > 0)) {
- /* for external source, inpStack == Nil ! need notify of eof! */
- pop()
- } else {
- reachedEof = true
- lastChRead = 0.asInstanceOf[Char]
- }
- }
- nextChNeeded = false
- }
- lastChRead
- }
-
- /** character buffer, for names */
- protected val cbuf = new StringBuilder()
-
- var dtd: DTD = null
-
- protected var doc: Document = null
-
- def eof: Boolean = { ch; reachedEof }
-
- //
- // methods
- //
-
- /** {{{
- * <? prolog ::= xml S ... ?>
- * }}} */
- def xmlProcInstr(): MetaData = {
- xToken("xml")
- xSpace()
- val (md,scp) = xAttributes(TopScope)
- if (scp != TopScope)
- reportSyntaxError("no xmlns definitions here, please.")
- xToken('?')
- xToken('>')
- md
- }
-
- /** Factored out common code.
- */
- private def prologOrTextDecl(isProlog: Boolean): (Option[String], Option[String], Option[Boolean]) = {
- var info_ver: Option[String] = None
- var info_enc: Option[String] = None
- var info_stdl: Option[Boolean] = None
-
- val m = xmlProcInstr()
- var n = 0
-
- if (isProlog)
- xSpaceOpt()
-
- m("version") match {
- case null =>
- case Text("1.0") => info_ver = Some("1.0"); n += 1
- case _ => reportSyntaxError("cannot deal with versions != 1.0")
- }
-
- m("encoding") match {
- case null =>
- case Text(enc) =>
- if (!isValidIANAEncoding(enc))
- reportSyntaxError("\"" + enc + "\" is not a valid encoding")
- else {
- info_enc = Some(enc)
- n += 1
- }
- }
-
- if (isProlog) {
- m("standalone") match {
- case null =>
- case Text("yes") => info_stdl = Some(true); n += 1
- case Text("no") => info_stdl = Some(false); n += 1
- case _ => reportSyntaxError("either 'yes' or 'no' expected")
- }
- }
-
- if (m.length - n != 0) {
- val s = if (isProlog) "SDDecl? " else ""
- reportSyntaxError("VersionInfo EncodingDecl? %sor '?>' expected!" format s)
- }
-
- (info_ver, info_enc, info_stdl)
- }
-
- /** {{{
- * <? prolog ::= xml S?
- * // this is a bit more lenient than necessary...
- * }}} */
- def prolog(): (Option[String], Option[String], Option[Boolean]) =
- prologOrTextDecl(isProlog = true)
-
- /** prolog, but without standalone */
- def textDecl(): (Option[String], Option[String]) =
- prologOrTextDecl(isProlog = false) match { case (x1, x2, _) => (x1, x2) }
-
- /** {{{
- * [22] prolog ::= XMLDecl? Misc* (doctypedecl Misc*)?
- * [23] XMLDecl ::= '<?xml' VersionInfo EncodingDecl? SDDecl? S? '?>'
- * [24] VersionInfo ::= S 'version' Eq ("'" VersionNum "'" | '"' VersionNum '"')
- * [25] Eq ::= S? '=' S?
- * [26] VersionNum ::= '1.0'
- * [27] Misc ::= Comment | PI | S
- * }}} */
- def document(): Document = {
- doc = new Document()
-
- this.dtd = null
- var info_prolog: (Option[String], Option[String], Option[Boolean]) = (None, None, None)
- if ('<' != ch) {
- reportSyntaxError("< expected")
- return null
- }
-
- nextch() // is prolog ?
- var children: NodeSeq = null
- if ('?' == ch) {
- nextch()
- info_prolog = prolog()
- doc.version = info_prolog._1
- doc.encoding = info_prolog._2
- doc.standAlone = info_prolog._3
-
- children = content(TopScope) // DTD handled as side effect
- }
- else {
- val ts = new NodeBuffer()
- content1(TopScope, ts) // DTD handled as side effect
- ts &+ content(TopScope)
- children = NodeSeq.fromSeq(ts)
- }
- //println("[MarkupParser::document] children now: "+children.toList)
- var elemCount = 0
- var theNode: Node = null
- for (c <- children) c match {
- case _:ProcInstr =>
- case _:Comment =>
- case _:EntityRef => // todo: fix entities, shouldn't be "special"
- reportSyntaxError("no entity references allowed here")
- case s:SpecialNode =>
- if (s.toString.trim().length > 0) //non-empty text nodes not allowed
- elemCount += 2
- case m:Node =>
- elemCount += 1
- theNode = m
- }
- if (1 != elemCount) {
- reportSyntaxError("document must contain exactly one element")
- Console.println(children.toList)
- }
-
- doc.children = children
- doc.docElem = theNode
- doc
- }
-
- /** append Unicode character to name buffer*/
- protected def putChar(c: Char) = cbuf append c
-
- /** As the current code requires you to call nextch once manually
- * after construction, this method formalizes that suboptimal reality.
- */
- def initialize: this.type = {
- nextch()
- this
- }
-
- protected def ch_returning_nextch: Char = { val res = ch; nextch(); res }
-
- def mkAttributes(name: String, pscope: NamespaceBinding): AttributesType =
- if (isNameStart (ch)) xAttributes(pscope)
- else (Null, pscope)
-
- def mkProcInstr(position: Int, name: String, text: String): ElementType =
- handle.procInstr(position, name, text)
-
- /** this method tells ch to get the next character when next called */
- def nextch() {
- // Read current ch if needed
- ch
-
- // Mark next ch to be required
- nextChNeeded = true
- }
-
- /** parse attribute and create namespace scope, metadata
- * {{{
- * [41] Attributes ::= { S Name Eq AttValue }
- * }}}
- */
- def xAttributes(pscope: NamespaceBinding): (MetaData, NamespaceBinding) = {
- var scope: NamespaceBinding = pscope
- var aMap: MetaData = Null
- while (isNameStart(ch)) {
- val qname = xName
- xEQ() // side effect
- val value = xAttributeValue()
-
- Utility.prefix(qname) match {
- case Some("xmlns") =>
- val prefix = qname.substring(6 /*xmlns:*/ , qname.length)
- scope = new NamespaceBinding(prefix, value, scope)
-
- case Some(prefix) =>
- val key = qname.substring(prefix.length+1, qname.length)
- aMap = new PrefixedAttribute(prefix, key, Text(value), aMap)
-
- case _ =>
- if( qname == "xmlns" )
- scope = new NamespaceBinding(null, value, scope)
- else
- aMap = new UnprefixedAttribute(qname, Text(value), aMap)
- }
-
- if ((ch != '/') && (ch != '>') && ('?' != ch))
- xSpace()
- }
-
- if(!aMap.wellformed(scope))
- reportSyntaxError( "double attribute")
-
- (aMap,scope)
- }
-
- /** entity value, terminated by either ' or ". value may not contain &lt;.
- * {{{
- * AttValue ::= `'` { _ } `'`
- * | `"` { _ } `"`
- * }}}
- */
- def xEntityValue(): String = {
- val endch = ch
- nextch()
- while (ch != endch && !eof) {
- putChar(ch)
- nextch()
- }
- nextch()
- val str = cbuf.toString()
- cbuf.length = 0
- str
- }
-
- /** {{{
- * '<! CharData ::= [CDATA[ ( {char} - {char}"]]>"{char} ) ']]>'
- *
- * see [15]
- * }}} */
- def xCharData: NodeSeq = {
- xToken("[CDATA[")
- def mkResult(pos: Int, s: String): NodeSeq = {
- handle.text(pos, s)
- PCData(s)
- }
- xTakeUntil(mkResult, () => pos, "]]>")
- }
-
- /** {{{
- * Comment ::= '<!--' ((Char - '-') | ('-' (Char - '-')))* '-->'
- *
- * see [15]
- * }}} */
- def xComment: NodeSeq = {
- val sb: StringBuilder = new StringBuilder()
- xToken("--")
- while (true) {
- if (ch == '-' && { sb.append(ch); nextch(); ch == '-' }) {
- sb.length = sb.length - 1
- nextch()
- xToken('>')
- return handle.comment(pos, sb.toString())
- } else sb.append(ch)
- nextch()
- }
- throw FatalError("this cannot happen")
- }
-
- /* todo: move this into the NodeBuilder class */
- def appendText(pos: Int, ts: NodeBuffer, txt: String): Unit = {
- if (preserveWS)
- ts &+ handle.text(pos, txt)
- else
- for (t <- TextBuffer.fromString(txt).toText) {
- ts &+ handle.text(pos, t.text)
- }
- }
-
- /** {{{
- * '<' content1 ::= ...
- * }}} */
- def content1(pscope: NamespaceBinding, ts: NodeBuffer) {
- ch match {
- case '!' =>
- nextch()
- if ('[' == ch) // CDATA
- ts &+ xCharData
- else if ('D' == ch) // doctypedecl, parse DTD // @todo REMOVE HACK
- parseDTD()
- else // comment
- ts &+ xComment
- case '?' => // PI
- nextch()
- ts &+ xProcInstr
- case _ =>
- ts &+ element1(pscope) // child
- }
- }
-
- /** {{{
- * content1 ::= '<' content1 | '&' charref ...
- * }}} */
- def content(pscope: NamespaceBinding): NodeSeq = {
- val ts = new NodeBuffer
- var exit = eof
- // todo: optimize seq repr.
- def done = new NodeSeq { val theSeq = ts.toList }
-
- while (!exit) {
- tmppos = pos
- exit = eof
-
- if (eof)
- return done
-
- ch match {
- case '<' => // another tag
- nextch(); ch match {
- case '/' => exit = true // end tag
- case _ => content1(pscope, ts)
- }
-
- // postcond: xEmbeddedBlock == false!
- case '&' => // EntityRef or CharRef
- nextch(); ch match {
- case '#' => // CharacterRef
- nextch()
- val theChar = handle.text(tmppos, xCharRef(() => ch, () => nextch()))
- xToken(';')
- ts &+ theChar
- case _ => // EntityRef
- val n = xName
- xToken(';')
-
- if (unescape contains n) {
- handle.entityRef(tmppos, n)
- ts &+ unescape(n)
- } else push(n)
- }
- case _ => // text content
- appendText(tmppos, ts, xText)
- }
- }
- done
- } // content(NamespaceBinding)
-
- /** {{{
- * externalID ::= SYSTEM S syslit
- * PUBLIC S pubid S syslit
- * }}} */
- def externalID(): ExternalID = ch match {
- case 'S' =>
- nextch()
- xToken("YSTEM")
- xSpace()
- val sysID = systemLiteral()
- new SystemID(sysID)
- case 'P' =>
- nextch(); xToken("UBLIC")
- xSpace()
- val pubID = pubidLiteral()
- xSpace()
- val sysID = systemLiteral()
- new PublicID(pubID, sysID)
- }
-
-
- /** parses document type declaration and assigns it to instance variable
- * dtd.
- * {{{
- * <! parseDTD ::= DOCTYPE name ... >
- * }}} */
- def parseDTD() { // dirty but fast
- var extID: ExternalID = null
- if (this.dtd ne null)
- reportSyntaxError("unexpected character (DOCTYPE already defined")
- xToken("DOCTYPE")
- xSpace()
- val n = xName
- xSpace()
- //external ID
- if ('S' == ch || 'P' == ch) {
- extID = externalID()
- xSpaceOpt()
- }
-
- /* parse external subset of DTD
- */
-
- if ((null != extID) && isValidating) {
-
- pushExternal(extID.systemId)
- extIndex = inpStack.length
-
- extSubset()
- pop()
- extIndex = -1
- }
-
- if ('[' == ch) { // internal subset
- nextch()
- /* TODO */
- intSubset()
- // TODO: do the DTD parsing?? ?!?!?!?!!
- xToken(']')
- xSpaceOpt()
- }
- xToken('>')
- this.dtd = new DTD {
- /*override var*/ externalID = extID
- /*override val */decls = handle.decls.reverse
- }
- //this.dtd.initializeEntities();
- if (doc ne null)
- doc.dtd = this.dtd
-
- handle.endDTD(n)
- }
-
- def element(pscope: NamespaceBinding): NodeSeq = {
- xToken('<')
- element1(pscope)
- }
-
- /** {{{
- * '<' element ::= xmlTag1 '>' { xmlExpr | '{' simpleExpr '}' } ETag
- * | xmlTag1 '/' '>'
- * }}} */
- def element1(pscope: NamespaceBinding): NodeSeq = {
- val pos = this.pos
- val (qname, (aMap, scope)) = xTag(pscope)
- val (pre, local) = Utility.prefix(qname) match {
- case Some(p) => (p, qname drop p.length+1)
- case _ => (null, qname)
- }
- val ts = {
- if (ch == '/') { // empty element
- xToken("/>")
- handle.elemStart(pos, pre, local, aMap, scope)
- NodeSeq.Empty
- }
- else { // element with content
- xToken('>')
- handle.elemStart(pos, pre, local, aMap, scope)
- val tmp = content(scope)
- xEndTag(qname)
- tmp
- }
- }
- val res = handle.elem(pos, pre, local, aMap, scope, ts == NodeSeq.Empty, ts)
- handle.elemEnd(pos, pre, local)
- res
- }
-
- /** Parse character data.
- *
- * precondition: `xEmbeddedBlock == false` (we are not in a scala block)
- */
- private def xText: String = {
- var exit = false
- while (! exit) {
- putChar(ch)
- nextch()
-
- exit = eof || ( ch == '<' ) || ( ch == '&' )
- }
- val str = cbuf.toString
- cbuf.length = 0
- str
- }
-
- /** attribute value, terminated by either ' or ". value may not contain &lt;.
- * {{{
- * AttValue ::= `'` { _ } `'`
- * | `"` { _ } `"`
- * }}} */
- def systemLiteral(): String = {
- val endch = ch
- if (ch != '\'' && ch != '"')
- reportSyntaxError("quote ' or \" expected")
- nextch()
- while (ch != endch && !eof) {
- putChar(ch)
- nextch()
- }
- nextch()
- val str = cbuf.toString()
- cbuf.length = 0
- str
- }
-
- /** {{{
- * [12] PubidLiteral ::= '"' PubidChar* '"' | "'" (PubidChar - "'")* "'"
- * }}} */
- def pubidLiteral(): String = {
- val endch = ch
- if (ch!='\'' && ch != '"')
- reportSyntaxError("quote ' or \" expected")
- nextch()
- while (ch != endch && !eof) {
- putChar(ch)
- //println("hello '"+ch+"'"+isPubIDChar(ch))
- if (!isPubIDChar(ch))
- reportSyntaxError("char '"+ch+"' is not allowed in public id")
- nextch()
- }
- nextch()
- val str = cbuf.toString
- cbuf.length = 0
- str
- }
-
- //
- // dtd parsing
- //
-
- def extSubset(): Unit = {
- var textdecl: (Option[String],Option[String]) = null
- if (ch == '<') {
- nextch()
- if (ch == '?') {
- nextch()
- textdecl = textDecl()
- } else
- markupDecl1()
- }
- while (!eof)
- markupDecl()
- }
-
- def markupDecl1() = {
- def doInclude() = {
- xToken('['); while(']' != ch) markupDecl(); nextch() // ']'
- }
- def doIgnore() = {
- xToken('['); while(']' != ch) nextch(); nextch() // ']'
- }
- if ('?' == ch) {
- nextch()
- xProcInstr // simply ignore processing instructions!
- } else {
- xToken('!')
- ch match {
- case '-' =>
- xComment // ignore comments
-
- case 'E' =>
- nextch()
- if ('L' == ch) {
- nextch()
- elementDecl()
- } else
- entityDecl()
-
- case 'A' =>
- nextch()
- attrDecl()
-
- case 'N' =>
- nextch()
- notationDecl()
-
- case '[' if inpStack.length >= extIndex =>
- nextch()
- xSpaceOpt()
- ch match {
- case '%' =>
- nextch()
- val ent = xName
- xToken(';')
- xSpaceOpt()
-
- push(ent)
- xSpaceOpt()
- val stmt = xName
- xSpaceOpt()
-
- stmt match {
- // parameter entity
- case "INCLUDE" => doInclude()
- case "IGNORE" => doIgnore()
- }
- case 'I' =>
- nextch()
- ch match {
- case 'G' =>
- nextch()
- xToken("NORE")
- xSpaceOpt()
- doIgnore()
- case 'N' =>
- nextch()
- xToken("NCLUDE")
- doInclude()
- }
- }
- xToken(']')
- xToken('>')
-
- case _ =>
- curInput.reportError(pos, "unexpected character '"+ch+"', expected some markupdecl")
- while (ch!='>')
- nextch()
- }
- }
- }
-
- def markupDecl(): Unit = ch match {
- case '%' => // parameter entity reference
- nextch()
- val ent = xName
- xToken(';')
- if (!isValidating)
- handle.peReference(ent) // n-v: just create PE-reference
- else
- push(ent) // v: parse replacementText
-
- //peReference
- case '<' =>
- nextch()
- markupDecl1()
- case _ if isSpace(ch) =>
- xSpace()
- case _ =>
- reportSyntaxError("markupdecl: unexpected character '"+ch+"' #" + ch.toInt)
- nextch()
- }
-
- /** "rec-xml/#ExtSubset" pe references may not occur within markup declarations
- */
- def intSubset() {
- //Console.println("(DEBUG) intSubset()")
- xSpace()
- while (']' != ch)
- markupDecl()
- }
-
- /** &lt;! element := ELEMENT
- */
- def elementDecl() {
- xToken("EMENT")
- xSpace()
- val n = xName
- xSpace()
- while ('>' != ch) {
- //Console.println("["+ch+"]")
- putChar(ch)
- nextch()
- }
- //Console.println("END["+ch+"]")
- nextch()
- val cmstr = cbuf.toString()
- cbuf.length = 0
- handle.elemDecl(n, cmstr)
- }
-
- /** {{{
- * <! attlist := ATTLIST
- * }}} */
- def attrDecl() = {
- xToken("TTLIST")
- xSpace()
- val n = xName
- xSpace()
- var attList: List[AttrDecl] = Nil
-
- // later: find the elemDecl for n
- while ('>' != ch) {
- val aname = xName
- xSpace()
- // could be enumeration (foo,bar) parse this later :-/
- while ('"' != ch && '\'' != ch && '#' != ch && '<' != ch) {
- if (!isSpace(ch))
- cbuf.append(ch)
- nextch()
- }
- val atpe = cbuf.toString
- cbuf.length = 0
-
- val defdecl: DefaultDecl = ch match {
- case '\'' | '"' =>
- DEFAULT(fixed = false, xAttributeValue())
-
- case '#' =>
- nextch()
- xName match {
- case "FIXED" => xSpace() ; DEFAULT(fixed = true, xAttributeValue())
- case "IMPLIED" => IMPLIED
- case "REQUIRED" => REQUIRED
- }
- case _ =>
- null
- }
- xSpaceOpt()
-
- attList ::= AttrDecl(aname, atpe, defdecl)
- cbuf.length = 0
- }
- nextch()
- handle.attListDecl(n, attList.reverse)
- }
-
- /** {{{
- * <! element := ELEMENT
- * }}} */
- def entityDecl() = {
- var isParameterEntity = false
- xToken("NTITY")
- xSpace()
- if ('%' == ch) {
- nextch()
- isParameterEntity = true
- xSpace()
- }
- val n = xName
- xSpace()
- ch match {
- case 'S' | 'P' => //sy
- val extID = externalID()
- if (isParameterEntity) {
- xSpaceOpt()
- xToken('>')
- handle.parameterEntityDecl(n, ExtDef(extID))
- } else { // notation?
- xSpace()
- if ('>' != ch) {
- xToken("NDATA")
- xSpace()
- val notat = xName
- xSpaceOpt()
- xToken('>')
- handle.unparsedEntityDecl(n, extID, notat)
- } else {
- nextch()
- handle.parsedEntityDecl(n, ExtDef(extID))
- }
- }
-
- case '"' | '\'' =>
- val av = xEntityValue()
- xSpaceOpt()
- xToken('>')
- if (isParameterEntity)
- handle.parameterEntityDecl(n, IntDef(av))
- else
- handle.parsedEntityDecl(n, IntDef(av))
- }
- {}
- } // entityDecl
-
- /** {{{
- * 'N' notationDecl ::= "OTATION"
- * }}} */
- def notationDecl() {
- xToken("OTATION")
- xSpace()
- val notat = xName
- xSpace()
- val extID = if (ch == 'S') {
- externalID()
- }
- else if (ch == 'P') {
- /* PublicID (without system, only used in NOTATION) */
- nextch()
- xToken("UBLIC")
- xSpace()
- val pubID = pubidLiteral()
- xSpaceOpt()
- val sysID = if (ch != '>')
- systemLiteral()
- else
- null
- new PublicID(pubID, sysID)
- } else {
- reportSyntaxError("PUBLIC or SYSTEM expected")
- scala.sys.error("died parsing notationdecl")
- }
- xSpaceOpt()
- xToken('>')
- handle.notationDecl(notat, extID)
- }
-
- def reportSyntaxError(pos: Int, str: String) { curInput.reportError(pos, str) }
- def reportSyntaxError(str: String) { reportSyntaxError(pos, str) }
- def reportValidationError(pos: Int, str: String) { reportSyntaxError(pos, str) }
-
- def push(entityName: String) {
- if (!eof)
- inpStack = curInput :: inpStack
-
- // can't push before getting next character if needed
- ch
-
- curInput = replacementText(entityName)
- nextch()
- }
-
- def pushExternal(systemId: String) {
- if (!eof)
- inpStack = curInput :: inpStack
-
- // can't push before getting next character if needed
- ch
-
- curInput = externalSource(systemId)
- nextch()
- }
-
- def pop() {
- curInput = inpStack.head
- inpStack = inpStack.tail
- lastChRead = curInput.ch
- nextChNeeded = false
- pos = curInput.pos
- reachedEof = false // must be false, because of places where entity refs occur
- }
-}
diff --git a/src/xml/scala/xml/parsing/MarkupParserCommon.scala b/src/xml/scala/xml/parsing/MarkupParserCommon.scala
deleted file mode 100644
index 57c1651558..0000000000
--- a/src/xml/scala/xml/parsing/MarkupParserCommon.scala
+++ /dev/null
@@ -1,260 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala
-package xml
-package parsing
-
-import scala.io.Source
-import scala.annotation.switch
-import Utility.Escapes.{ pairs => unescape }
-
-import Utility.SU
-
-/** This is not a public trait - it contains common code shared
- * between the library level XML parser and the compiler's.
- * All members should be accessed through those.
- */
-private[scala] trait MarkupParserCommon extends TokenTests {
- protected def unreachable = scala.sys.error("Cannot be reached.")
-
- // type HandleType // MarkupHandler, SymbolicXMLBuilder
- type InputType // Source, CharArrayReader
- type PositionType // Int, Position
- type ElementType // NodeSeq, Tree
- type NamespaceType // NamespaceBinding, Any
- type AttributesType // (MetaData, NamespaceBinding), mutable.Map[String, Tree]
-
- def mkAttributes(name: String, pscope: NamespaceType): AttributesType
- def mkProcInstr(position: PositionType, name: String, text: String): ElementType
-
- /** parse a start or empty tag.
- * [40] STag ::= '<' Name { S Attribute } [S]
- * [44] EmptyElemTag ::= '<' Name { S Attribute } [S]
- */
- protected def xTag(pscope: NamespaceType): (String, AttributesType) = {
- val name = xName
- xSpaceOpt()
-
- (name, mkAttributes(name, pscope))
- }
-
- /** '<?' ProcInstr ::= Name [S ({Char} - ({Char}'>?' {Char})]'?>'
- *
- * see [15]
- */
- def xProcInstr: ElementType = {
- val n = xName
- xSpaceOpt()
- xTakeUntil(mkProcInstr(_, n, _), () => tmppos, "?>")
- }
-
- /** attribute value, terminated by either `'` or `"`. value may not contain `<`.
- @param endCh either `'` or `"`
- */
- def xAttributeValue(endCh: Char): String = {
- val buf = new StringBuilder
- while (ch != endCh) {
- // well-formedness constraint
- if (ch == '<') return errorAndResult("'<' not allowed in attrib value", "")
- else if (ch == SU) truncatedError("")
- else buf append ch_returning_nextch
- }
- ch_returning_nextch
- // @todo: normalize attribute value
- buf.toString
- }
-
- def xAttributeValue(): String = {
- val str = xAttributeValue(ch_returning_nextch)
- // well-formedness constraint
- normalizeAttributeValue(str)
- }
-
- private def takeUntilChar(it: Iterator[Char], end: Char): String = {
- val buf = new StringBuilder
- while (it.hasNext) it.next() match {
- case `end` => return buf.toString
- case ch => buf append ch
- }
- scala.sys.error("Expected '%s'".format(end))
- }
-
- /** [42] '<' xmlEndTag ::= '<' '/' Name S? '>'
- */
- def xEndTag(startName: String) {
- xToken('/')
- if (xName != startName)
- errorNoEnd(startName)
-
- xSpaceOpt()
- xToken('>')
- }
-
- /** actually, Name ::= (Letter | '_' | ':') (NameChar)* but starting with ':' cannot happen
- * Name ::= (Letter | '_') (NameChar)*
- *
- * see [5] of XML 1.0 specification
- *
- * pre-condition: ch != ':' // assured by definition of XMLSTART token
- * post-condition: name does neither start, nor end in ':'
- */
- def xName: String = {
- if (ch == SU)
- truncatedError("")
- else if (!isNameStart(ch))
- return errorAndResult("name expected, but char '%s' cannot start a name" format ch, "")
-
- val buf = new StringBuilder
-
- do buf append ch_returning_nextch
- while (isNameChar(ch))
-
- if (buf.last == ':') {
- reportSyntaxError( "name cannot end in ':'" )
- buf.toString dropRight 1
- }
- else buf.toString
- }
-
- private def attr_unescape(s: String) = s match {
- case "lt" => "<"
- case "gt" => ">"
- case "amp" => "&"
- case "apos" => "'"
- case "quot" => "\""
- case "quote" => "\""
- case _ => "&" + s + ";"
- }
-
- /** Replaces only character references right now.
- * see spec 3.3.3
- */
- private def normalizeAttributeValue(attval: String): String = {
- val buf = new StringBuilder
- val it = attval.iterator.buffered
-
- while (it.hasNext) buf append (it.next() match {
- case ' ' | '\t' | '\n' | '\r' => " "
- case '&' if it.head == '#' => it.next() ; xCharRef(it)
- case '&' => attr_unescape(takeUntilChar(it, ';'))
- case c => c
- })
-
- buf.toString
- }
-
- /** CharRef ::= "&#" '0'..'9' {'0'..'9'} ";"
- * | "&#x" '0'..'9'|'A'..'F'|'a'..'f' { hexdigit } ";"
- *
- * see [66]
- */
- def xCharRef(ch: () => Char, nextch: () => Unit): String =
- Utility.parseCharRef(ch, nextch, reportSyntaxError _, truncatedError _)
-
- def xCharRef(it: Iterator[Char]): String = {
- var c = it.next()
- Utility.parseCharRef(() => c, () => { c = it.next() }, reportSyntaxError _, truncatedError _)
- }
-
- def xCharRef: String = xCharRef(() => ch, () => nextch())
-
- /** Create a lookahead reader which does not influence the input */
- def lookahead(): BufferedIterator[Char]
-
- /** The library and compiler parsers had the interesting distinction of
- * different behavior for nextch (a function for which there are a total
- * of two plausible behaviors, so we know the design space was fully
- * explored.) One of them returned the value of nextch before the increment
- * and one of them the new value. So to unify code we have to at least
- * temporarily abstract over the nextchs.
- */
- def ch: Char
- def nextch(): Unit
- protected def ch_returning_nextch: Char
- def eof: Boolean
-
- // def handle: HandleType
- var tmppos: PositionType
-
- def xHandleError(that: Char, msg: String): Unit
- def reportSyntaxError(str: String): Unit
- def reportSyntaxError(pos: Int, str: String): Unit
-
- def truncatedError(msg: String): Nothing
- def errorNoEnd(tag: String): Nothing
-
- protected def errorAndResult[T](msg: String, x: T): T = {
- reportSyntaxError(msg)
- x
- }
-
- def xToken(that: Char) {
- if (ch == that) nextch()
- else xHandleError(that, "'%s' expected instead of '%s'".format(that, ch))
- }
- def xToken(that: Seq[Char]) { that foreach xToken }
-
- /** scan [S] '=' [S]*/
- def xEQ() = { xSpaceOpt(); xToken('='); xSpaceOpt() }
-
- /** skip optional space S? */
- def xSpaceOpt() = while (isSpace(ch) && !eof) nextch()
-
- /** scan [3] S ::= (#x20 | #x9 | #xD | #xA)+ */
- def xSpace() =
- if (isSpace(ch)) { nextch(); xSpaceOpt() }
- else xHandleError(ch, "whitespace expected")
-
- /** Apply a function and return the passed value */
- def returning[T](x: T)(f: T => Unit): T = { f(x); x }
-
- /** Execute body with a variable saved and restored after execution */
- def saving[A, B](getter: A, setter: A => Unit)(body: => B): B = {
- val saved = getter
- try body
- finally setter(saved)
- }
-
- /** Take characters from input stream until given String "until"
- * is seen. Once seen, the accumulated characters are passed
- * along with the current Position to the supplied handler function.
- */
- protected def xTakeUntil[T](
- handler: (PositionType, String) => T,
- positioner: () => PositionType,
- until: String): T =
- {
- val sb = new StringBuilder
- val head = until.head
- val rest = until.tail
-
- while (true) {
- if (ch == head && peek(rest))
- return handler(positioner(), sb.toString)
- else if (ch == SU)
- truncatedError("") // throws TruncatedXMLControl in compiler
-
- sb append ch
- nextch()
- }
- unreachable
- }
-
- /** Create a non-destructive lookahead reader and see if the head
- * of the input would match the given String. If yes, return true
- * and drop the entire String from input; if no, return false
- * and leave input unchanged.
- */
- private def peek(lookingFor: String): Boolean =
- (lookahead() take lookingFor.length sameElements lookingFor.iterator) && {
- // drop the chars from the real reader (all lookahead + orig)
- (0 to lookingFor.length) foreach (_ => nextch())
- true
- }
-}
diff --git a/src/xml/scala/xml/parsing/NoBindingFactoryAdapter.scala b/src/xml/scala/xml/parsing/NoBindingFactoryAdapter.scala
deleted file mode 100644
index 56ac185f47..0000000000
--- a/src/xml/scala/xml/parsing/NoBindingFactoryAdapter.scala
+++ /dev/null
@@ -1,37 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-package scala
-package xml
-package parsing
-
-import factory.NodeFactory
-
-/** nobinding adaptor providing callbacks to parser to create elements.
-* implements hash-consing
-*/
-class NoBindingFactoryAdapter extends FactoryAdapter with NodeFactory[Elem]
-{
- /** True. Every XML node may contain text that the application needs */
- def nodeContainsText(label: String) = true
-
- /** From NodeFactory. Constructs an instance of scala.xml.Elem */
- protected def create(pre: String, label: String, attrs: MetaData, scope: NamespaceBinding, children: Seq[Node]): Elem =
- Elem(pre, label, attrs, scope, children: _*)
-
- /** From FactoryAdapter. Creates a node. never creates the same node twice, using hash-consing. */
- def createNode(pre: String, label: String, attrs: MetaData, scope: NamespaceBinding, children: List[Node]): Elem =
- Elem(pre, label, attrs, scope, children: _*)
-
- /** Creates a text node. */
- def createText(text: String) = Text(text)
-
- /** Creates a processing instruction. */
- def createProcInstr(target: String, data: String) = makeProcInstr(target, data)
-}
diff --git a/src/xml/scala/xml/parsing/TokenTests.scala b/src/xml/scala/xml/parsing/TokenTests.scala
deleted file mode 100644
index 8dd9cdfaa3..0000000000
--- a/src/xml/scala/xml/parsing/TokenTests.scala
+++ /dev/null
@@ -1,101 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala
-package xml
-package parsing
-
-/**
- * Helper functions for parsing XML fragments
- */
-trait TokenTests {
-
- /** {{{
- * (#x20 | #x9 | #xD | #xA)
- * }}} */
- final def isSpace(ch: Char): Boolean = ch match {
- case '\u0009' | '\u000A' | '\u000D' | '\u0020' => true
- case _ => false
- }
- /** {{{
- * (#x20 | #x9 | #xD | #xA)+
- * }}} */
- final def isSpace(cs: Seq[Char]): Boolean = cs.nonEmpty && (cs forall isSpace)
-
- /** These are 99% sure to be redundant but refactoring on the safe side. */
- def isAlpha(c: Char) = (c >= 'A' && c <= 'Z') || (c >= 'a' && c <= 'z')
- def isAlphaDigit(c: Char) = isAlpha(c) || (c >= '0' && c <= '9')
-
- /** {{{
- * NameChar ::= Letter | Digit | '.' | '-' | '_' | ':'
- * | CombiningChar | Extender
- * }}}
- * See [4] and Appendix B of XML 1.0 specification.
- */
- def isNameChar(ch: Char) = {
- import java.lang.Character._
- // The constants represent groups Mc, Me, Mn, Lm, and Nd.
-
- isNameStart(ch) || (getType(ch).toByte match {
- case COMBINING_SPACING_MARK |
- ENCLOSING_MARK | NON_SPACING_MARK |
- MODIFIER_LETTER | DECIMAL_DIGIT_NUMBER => true
- case _ => ".-:" contains ch
- })
- }
-
- /** {{{
- * NameStart ::= ( Letter | '_' )
- * }}}
- * where Letter means in one of the Unicode general
- * categories `{ Ll, Lu, Lo, Lt, Nl }`.
- *
- * We do not allow a name to start with `:`.
- * See [3] and Appendix B of XML 1.0 specification
- */
- def isNameStart(ch: Char) = {
- import java.lang.Character._
-
- getType(ch).toByte match {
- case LOWERCASE_LETTER |
- UPPERCASE_LETTER | OTHER_LETTER |
- TITLECASE_LETTER | LETTER_NUMBER => true
- case _ => ch == '_'
- }
- }
-
- /** {{{
- * Name ::= ( Letter | '_' ) (NameChar)*
- * }}}
- * See [5] of XML 1.0 specification.
- */
- def isName(s: String) =
- s.nonEmpty && isNameStart(s.head) && (s.tail forall isNameChar)
-
- def isPubIDChar(ch: Char): Boolean =
- isAlphaDigit(ch) || (isSpace(ch) && ch != '\u0009') ||
- ("""-\()+,./:=?;!*#@$_%""" contains ch)
-
- /**
- * Returns `true` if the encoding name is a valid IANA encoding.
- * This method does not verify that there is a decoder available
- * for this encoding, only that the characters are valid for an
- * IANA encoding name.
- *
- * @param ianaEncoding The IANA encoding name.
- */
- def isValidIANAEncoding(ianaEncoding: Seq[Char]) = {
- def charOK(c: Char) = isAlphaDigit(c) || ("._-" contains c)
-
- ianaEncoding.nonEmpty && isAlpha(ianaEncoding.head) &&
- (ianaEncoding.tail forall charOK)
- }
-
- def checkSysID(s: String) = List('"', '\'') exists (c => !(s contains c))
- def checkPubID(s: String) = s forall isPubIDChar
-}
diff --git a/src/xml/scala/xml/parsing/ValidatingMarkupHandler.scala b/src/xml/scala/xml/parsing/ValidatingMarkupHandler.scala
deleted file mode 100644
index 1b20901249..0000000000
--- a/src/xml/scala/xml/parsing/ValidatingMarkupHandler.scala
+++ /dev/null
@@ -1,104 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala
-package xml
-package parsing
-
-import scala.xml.dtd._
-
-abstract class ValidatingMarkupHandler extends MarkupHandler {
-
- var rootLabel:String = _
- var qStack: List[Int] = Nil
- var qCurrent: Int = -1
-
- var declStack: List[ElemDecl] = Nil
- var declCurrent: ElemDecl = null
-
- final override val isValidating = true
-
- override def endDTD(n:String) = {
- rootLabel = n
- }
- override def elemStart(pos: Int, pre: String, label: String, attrs: MetaData, scope:NamespaceBinding) {
-
- def advanceDFA(dm:DFAContentModel) = {
- val trans = dm.dfa.delta(qCurrent)
- log("advanceDFA(dm): " + dm)
- log("advanceDFA(trans): " + trans)
- trans.get(ContentModel.ElemName(label)) match {
- case Some(qNew) => qCurrent = qNew
- case _ => reportValidationError(pos, "DTD says, wrong element, expected one of "+trans.keys)
- }
- }
- // advance in current automaton
- log("[qCurrent = "+qCurrent+" visiting "+label+"]")
-
- if (qCurrent == -1) { // root
- log(" checking root")
- if (label != rootLabel)
- reportValidationError(pos, "this element should be "+rootLabel)
- } else {
- log(" checking node")
- declCurrent.contentModel match {
- case ANY =>
- case EMPTY =>
- reportValidationError(pos, "DTD says, no elems, no text allowed here")
- case PCDATA =>
- reportValidationError(pos, "DTD says, no elements allowed here")
- case m @ MIXED(r) =>
- advanceDFA(m)
- case e @ ELEMENTS(r) =>
- advanceDFA(e)
- }
- }
- // push state, decl
- qStack = qCurrent :: qStack
- declStack = declCurrent :: declStack
-
- declCurrent = lookupElemDecl(label)
- qCurrent = 0
- log(" done now")
- }
-
- override def elemEnd(pos: Int, pre: String, label: String) {
- log(" elemEnd")
- qCurrent = qStack.head
- qStack = qStack.tail
- declCurrent = declStack.head
- declStack = declStack.tail
- log(" qCurrent now" + qCurrent)
- log(" declCurrent now" + declCurrent)
- }
-
- final override def elemDecl(name: String, cmstr: String) {
- decls = ElemDecl(name, ContentModel.parse(cmstr)) :: decls
- }
-
- final override def attListDecl(name: String, attList: List[AttrDecl]) {
- decls = AttListDecl(name, attList) :: decls
- }
-
- final override def unparsedEntityDecl(name: String, extID: ExternalID, notat: String) {
- decls = UnparsedEntityDecl(name, extID, notat) :: decls
- }
-
- final override def notationDecl(notat: String, extID: ExternalID) {
- decls = NotationDecl(notat, extID) :: decls
- }
-
- final override def peReference(name: String) {
- decls = PEReference(name) :: decls
- }
-
- /** report a syntax error */
- def reportValidationError(pos: Int, str: String): Unit
-}
diff --git a/src/xml/scala/xml/parsing/XhtmlEntities.scala b/src/xml/scala/xml/parsing/XhtmlEntities.scala
deleted file mode 100644
index 3683af202c..0000000000
--- a/src/xml/scala/xml/parsing/XhtmlEntities.scala
+++ /dev/null
@@ -1,54 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala
-package xml
-package parsing
-
-import scala.xml.dtd.{ IntDef, ParsedEntityDecl }
-
-/**
- * @author (c) David Pollak 2007 WorldWide Conferencing, LLC.
- *
- */
-object XhtmlEntities {
- val entList = List(("quot",34), ("amp",38), ("lt",60), ("gt",62), ("nbsp",160), ("iexcl",161), ("cent",162), ("pound",163), ("curren",164), ("yen",165),
- ("euro",8364), ("brvbar",166), ("sect",167), ("uml",168), ("copy",169), ("ordf",170), ("laquo",171), ("shy",173), ("reg",174), ("trade",8482),
- ("macr",175), ("deg",176), ("plusmn",177), ("sup2",178), ("sup3",179), ("acute",180), ("micro",181), ("para",182), ("middot",183), ("cedil",184),
- ("sup1",185), ("ordm",186), ("raquo",187), ("frac14",188), ("frac12",189), ("frac34",190), ("iquest",191), ("times",215), ("divide",247),
- ("Agrave",192), ("Aacute",193), ("Acirc",194), ("Atilde",195), ("Auml",196), ("Aring",197), ("AElig",198), ("Ccedil",199), ("Egrave",200),
- ("Eacute",201), ("Ecirc",202), ("Euml",203), ("Igrave",204), ("Iacute",205), ("Icirc",206), ("Iuml",207), ("ETH",208), ("Ntilde",209),
- ("Ograve",210), ("Oacute",211), ("Ocirc",212), ("Otilde",213), ("Ouml",214), ("Oslash",216), ("Ugrave",217), ("Uacute",218), ("Ucirc",219),
- ("Uuml",220), ("Yacute",221), ("THORN",222), ("szlig",223), ("agrave",224), ("aacute",225), ("acirc",226), ("atilde",227), ("auml",228),
- ("aring",229), ("aelig",230), ("ccedil",231), ("egrave",232), ("eacute",233), ("ecirc",234), ("euml",235), ("igrave",236), ("iacute",237),
- ("icirc",238), ("iuml",239), ("eth",240), ("ntilde",241), ("ograve",242), ("oacute",243), ("ocirc",244), ("otilde",245), ("ouml",246),
- ("oslash",248), ("ugrave",249), ("uacute",250), ("ucirc",251), ("uuml",252), ("yacute",253), ("thorn",254), ("yuml",255), ("OElig",338),
- ("oelig",339), ("Scaron",352), ("scaron",353), ("Yuml",376), ("circ",710), ("ensp",8194), ("emsp",8195), ("zwnj",204), ("zwj",8205), ("lrm",8206),
- ("rlm",8207), ("ndash",8211), ("mdash",8212), ("lsquo",8216), ("rsquo",8217), ("sbquo",8218), ("ldquo",8220), ("rdquo",8221), ("bdquo",8222),
- ("dagger",8224), ("Dagger",8225), ("permil",8240), ("lsaquo",8249), ("rsaquo",8250), ("fnof",402), ("bull",8226), ("hellip",8230), ("prime",8242),
- ("Prime",8243), ("oline",8254), ("frasl",8260), ("weierp",8472), ("image",8465), ("real",8476), ("alefsym",8501), ("larr",8592), ("uarr",8593),
- ("rarr",8594), ("darr",8495), ("harr",8596), ("crarr",8629), ("lArr",8656), ("uArr",8657), ("rArr",8658), ("dArr",8659), ("hArr",8660),
- ("forall",8704), ("part",8706), ("exist",8707), ("empty",8709), ("nabla",8711), ("isin",8712), ("notin",8713), ("ni",8715), ("prod",8719),
- ("sum",8721), ("minus",8722), ("lowast",8727), ("radic",8730), ("prop",8733), ("infin",8734), ("ang",8736), ("and",8743), ("or",8744),
- ("cap",8745), ("cup",8746), ("int",8747), ("there4",8756), ("sim",8764), ("cong",8773), ("asymp",8776), ("ne",8800), ("equiv",8801), ("le",8804),
- ("ge",8805), ("sub",8834), ("sup",8835), ("nsub",8836), ("sube",8838), ("supe",8839), ("oplus",8853), ("otimes",8855), ("perp",8869), ("sdot",8901),
- ("lceil",8968), ("rceil",8969), ("lfloor",8970), ("rfloor",8971), ("lang",9001), ("rang",9002), ("loz",9674), ("spades",9824), ("clubs",9827),
- ("hearts",9829), ("diams",9830), ("Alpha",913), ("Beta",914), ("Gamma",915), ("Delta",916), ("Epsilon",917), ("Zeta",918), ("Eta",919),
- ("Theta",920), ("Iota",921), ("Kappa",922), ("Lambda",923), ("Mu",924), ("Nu",925), ("Xi",926), ("Omicron",927), ("Pi",928), ("Rho",929),
- ("Sigma",931), ("Tau",932), ("Upsilon",933), ("Phi",934), ("Chi",935), ("Psi",936), ("Omega",937), ("alpha",945), ("beta",946), ("gamma",947),
- ("delta",948), ("epsilon",949), ("zeta",950), ("eta",951), ("theta",952), ("iota",953), ("kappa",954), ("lambda",955), ("mu",956), ("nu",957),
- ("xi",958), ("omicron",959), ("pi",960), ("rho",961), ("sigmaf",962), ("sigma",963), ("tau",964), ("upsilon",965), ("phi",966), ("chi",967),
- ("psi",968), ("omega",969), ("thetasym",977), ("upsih",978), ("piv",982))
-
- val entMap: Map[String, Char] = Map.empty[String, Char] ++ entList.map { case (name, value) => (name, value.toChar)}
-
- val entities = entList.
- map { case (name, value) => (name, new ParsedEntityDecl(name, new IntDef(value.toChar.toString)))}
-
- def apply() = entities
-}
diff --git a/src/xml/scala/xml/parsing/XhtmlParser.scala b/src/xml/scala/xml/parsing/XhtmlParser.scala
deleted file mode 100644
index 6ce5bec8d0..0000000000
--- a/src/xml/scala/xml/parsing/XhtmlParser.scala
+++ /dev/null
@@ -1,31 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala
-package xml
-package parsing
-
-import scala.io.Source
-
-/** An XML Parser that preserves `CDATA` blocks and knows about
- * [[scala.xml.parsing.XhtmlEntities]].
- *
- * @author (c) David Pollak, 2007 WorldWide Conferencing, LLC.
- */
-class XhtmlParser(val input: Source) extends ConstructingHandler with MarkupParser with ExternalSources {
- val preserveWS = true
- ent ++= XhtmlEntities()
-}
-
-/** Convenience method that instantiates, initializes and runs an `XhtmlParser`.
- *
- * @author Burak Emir
- */
-object XhtmlParser {
- def apply(source: Source): NodeSeq = new XhtmlParser(source).initialize.document()
-}
diff --git a/src/xml/scala/xml/persistent/CachedFileStorage.scala b/src/xml/scala/xml/persistent/CachedFileStorage.scala
deleted file mode 100644
index a1489ef3f4..0000000000
--- a/src/xml/scala/xml/persistent/CachedFileStorage.scala
+++ /dev/null
@@ -1,129 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala
-package xml
-package persistent
-
-import java.io.{ File, FileOutputStream }
-import java.nio.ByteBuffer
-import java.nio.channels.Channels
-import java.lang.Thread
-
-import scala.collection.Iterator
-
-/** Mutable storage of immutable xml trees. Everything is kept in memory,
- * with a thread periodically checking for changes and writing to file.
- *
- * To ensure atomicity, two files are used, `filename1` and `'$'+filename1`.
- * The implementation switches between the two, deleting the older one
- * after a complete dump of the database has been written.
- *
- * @author Burak Emir
- */
-abstract class CachedFileStorage(private val file1: File) extends Thread {
-
- private val file2 = new File(file1.getParent, file1.getName+"$")
-
- /** Either equals `file1` or `file2`, references the next file in which
- * updates will be stored.
- */
- private var theFile: File = null
-
- private def switch() = { theFile = if (theFile == file1) file2 else file1; }
-
- /** this storage modified since last modification check */
- protected var dirty = false
-
- /** period between modification checks, in milliseconds */
- protected val interval = 1000
-
- /** finds and loads the storage file. subclasses should call this method
- * prior to any other, but only once, to obtain the initial sequence of nodes.
- */
- protected def initialNodes: Iterator[Node] = (file1.exists, file2.exists) match {
- case (false,false) =>
- theFile = file1
- Iterator.empty
- case (true, true ) if (file1.lastModified < file2.lastModified) =>
- theFile = file2
- load
- case (true, _ ) =>
- theFile = file1
- load
- case _ =>
- theFile = file2
- load
- }
-
- /** returns an iterator over the nodes in this storage */
- def nodes: Iterator[Node]
-
- /** adds a node, setting this.dirty to true as a side effect */
- def += (e: Node): Unit
-
- /** removes a tree, setting this.dirty to true as a side effect */
- def -= (e: Node): Unit
-
- /* loads and parses XML from file */
- private def load: Iterator[Node] = {
- import scala.io.Source
- import scala.xml.parsing.ConstructingParser
- log("[load]\nloading "+theFile)
- val src = Source.fromFile(theFile)
- log("parsing "+theFile)
- val res = ConstructingParser.fromSource(src,preserveWS = false).document.docElem(0)
- switch()
- log("[load done]")
- res.child.iterator
- }
-
- /** saves the XML to file */
- private def save() = if (this.dirty) {
- log("[save]\ndeleting "+theFile)
- theFile.delete()
- log("creating new "+theFile)
- theFile.createNewFile()
- val fos = new FileOutputStream(theFile)
- val c = fos.getChannel()
-
- // @todo: optimize
- val storageNode = <nodes>{ nodes.toList }</nodes>
- val w = Channels.newWriter(c, "utf-8")
- XML.write(w, storageNode, "utf-8", xmlDecl = true, doctype = null)
-
- log("writing to "+theFile)
-
- w.close
- c.close
- fos.close
- dirty = false
- switch()
- log("[save done]")
- }
-
- /** Run method of the thread. remember to use `start()` to start a thread,
- * not `run`. */
- override def run = {
- log("[run]\nstarting storage thread, checking every "+interval+" ms")
- while (true) {
- Thread.sleep( this.interval.toLong )
- save()
- }
- }
-
- /** Force writing of contents to the file, even if there has not been any
- * update. */
- def flush() = {
- this.dirty = true
- save()
- }
-
- @deprecated("This method and its usages will be removed. Use a debugger to debug code.", "2.11")
- def log(msg: String): Unit = {}
-}
diff --git a/src/xml/scala/xml/persistent/Index.scala b/src/xml/scala/xml/persistent/Index.scala
deleted file mode 100644
index 9ee45e7086..0000000000
--- a/src/xml/scala/xml/persistent/Index.scala
+++ /dev/null
@@ -1,17 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala
-package xml
-package persistent
-
-/** an Index returns some unique key that is part of a node
- */
-abstract class Index[A] extends Function1[Node,A] {}
diff --git a/src/xml/scala/xml/persistent/SetStorage.scala b/src/xml/scala/xml/persistent/SetStorage.scala
deleted file mode 100644
index 8db56a2e71..0000000000
--- a/src/xml/scala/xml/persistent/SetStorage.scala
+++ /dev/null
@@ -1,42 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-package scala
-package xml
-package persistent
-
-import scala.collection.mutable
-import java.io.File
-
-/** A persistent store with set semantics. This class allows to add and remove
- * trees, but never contains two structurally equal trees.
- *
- * @author Burak Emir
- */
-class SetStorage(file: File) extends CachedFileStorage(file) {
-
- private val theSet = mutable.HashSet[Node]()
-
- // initialize
-
- {
- val it = super.initialNodes
- dirty = it.hasNext
- theSet ++= it
- }
-
- /* forwarding methods to hashset*/
-
- def += (e: Node): Unit = synchronized { this.dirty = true; theSet += e }
-
- def -= (e: Node): Unit = synchronized { this.dirty = true; theSet -= e }
-
- def nodes = synchronized { theSet.iterator }
-
-}
diff --git a/src/xml/scala/xml/pull/XMLEvent.scala b/src/xml/scala/xml/pull/XMLEvent.scala
deleted file mode 100644
index 3beb3648e7..0000000000
--- a/src/xml/scala/xml/pull/XMLEvent.scala
+++ /dev/null
@@ -1,60 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala
-package xml
-package pull
-
-/** An XML event for pull parsing. All events received during
- * parsing will be one of the subclasses of this trait.
- */
-trait XMLEvent
-
-/**
- * An Element's start tag was encountered.
- * @param pre prefix, if any, on the element. This is the `xs` in `<xs:string>foo</xs:string>`.
- * @param label the name of the element, not including the prefix
- * @param attrs any attributes on the element
- */
-case class EvElemStart(pre: String, label: String, attrs: MetaData, scope: NamespaceBinding) extends XMLEvent
-
-/**
- * An Element's end tag was encountered.
- * @param pre prefix, if any, on the element. This is the `xs` in `<xs:string>foo</xs:string>`.
- * @param label the name of the element, not including the prefix
- */
-case class EvElemEnd(pre: String, label: String) extends XMLEvent
-
-/**
- * A text node was encountered.
- * @param text the text that was found
- */
-case class EvText(text: String) extends XMLEvent
-
-/** An entity reference was encountered.
- * @param entity the name of the entity, e.g. `gt` when encountering the entity `&gt;`
- */
-case class EvEntityRef(entity: String) extends XMLEvent
-
-/**
- * A processing instruction was encountered.
- * @param target the "PITarget" of the processing instruction. For the instruction `<?foo bar="baz"?>`, the target would
- * be `foo`
- * @param text the remainder of the instruction. For the instruction `<?foo bar="baz"?>`, the text would
- * be `bar="baz"`
- * @see [[http://www.w3.org/TR/REC-xml/#sec-pi]]
- */
-case class EvProcInstr(target: String, text: String) extends XMLEvent
-
-/**
- * A comment was encountered
- * @param text the text of the comment
- */
-case class EvComment(text: String) extends XMLEvent
diff --git a/src/xml/scala/xml/pull/XMLEventReader.scala b/src/xml/scala/xml/pull/XMLEventReader.scala
deleted file mode 100755
index 76e51e17fd..0000000000
--- a/src/xml/scala/xml/pull/XMLEventReader.scala
+++ /dev/null
@@ -1,157 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala
-package xml
-package pull
-
-import scala.io.Source
-import java.lang.Thread
-import java.util.concurrent.LinkedBlockingQueue
-import java.nio.channels.ClosedChannelException
-import scala.xml.parsing.{ ExternalSources, MarkupHandler, MarkupParser }
-
-/**
- * Main entry point into creating an event-based XML parser. Treating this
- * as a [[scala.collection.Iterator]] will provide access to the generated events.
- * @param src A [[scala.io.Source]] for XML data to parse
- *
- * @author Burak Emir
- * @author Paul Phillips
- */
-class XMLEventReader(src: Source)
-extends scala.collection.AbstractIterator[XMLEvent]
- with ProducerConsumerIterator[XMLEvent] {
-
- // We implement a pull parser as an iterator, but since we may be operating on
- // a stream (e.g. XML over a network) there may be arbitrarily long periods when
- // the queue is empty. Fortunately the ProducerConsumerIterator is ideally
- // suited to this task, possibly because it was written for use by this class.
-
- // to override as necessary
- val preserveWS = true
-
- override val MaxQueueSize = 1000
- protected case object POISON extends XMLEvent
- val EndOfStream = POISON
-
- // thread machinery
- private[this] val parser = new Parser(src)
- private[this] val parserThread = new Thread(parser, "XMLEventReader")
- parserThread.start
- // enqueueing the poison object is the reliable way to cause the
- // iterator to terminate; hasNext will return false once it sees it.
- // Calling interrupt() on the parserThread is the only way we can get
- // it to stop producing tokens since it's lost deep in document() -
- // we cross our fingers the interrupt() gets to its target, but if it
- // fails for whatever reason the iterator correctness is not impacted,
- // only performance (because it will finish the entire XML document,
- // or at least as much as it can fit in the queue.)
- def stop() = {
- produce(POISON)
- parserThread.interrupt()
- }
-
- private class Parser(val input: Source) extends MarkupHandler with MarkupParser with ExternalSources with Runnable {
- val preserveWS = XMLEventReader.this.preserveWS
- // track level for elem memory usage optimization
- private var level = 0
-
- // this is Parser's way to add to the queue - the odd return type
- // is to conform to MarkupHandler's interface
- def setEvent(es: XMLEvent*): NodeSeq = {
- es foreach produce
- NodeSeq.Empty
- }
-
- override def elemStart(pos: Int, pre: String, label: String, attrs: MetaData, scope: NamespaceBinding) {
- level += 1
- setEvent(EvElemStart(pre, label, attrs, scope))
- }
- override def elemEnd(pos: Int, pre: String, label: String) {
- setEvent(EvElemEnd(pre, label))
- level -= 1
- }
-
- // this is a dummy to satisfy MarkupHandler's API
- // memory usage optimization return one <ignore/> for top level to satisfy
- // MarkupParser.document() otherwise NodeSeq.Empty
- private var ignoreWritten = false
- final def elem(pos: Int, pre: String, label: String, attrs: MetaData, pscope: NamespaceBinding, empty: Boolean, nodes: NodeSeq): NodeSeq =
- if (level == 1 && !ignoreWritten) {ignoreWritten = true; <ignore/> } else NodeSeq.Empty
-
- def procInstr(pos: Int, target: String, txt: String) = setEvent(EvProcInstr(target, txt))
- def comment(pos: Int, txt: String) = setEvent(EvComment(txt))
- def entityRef(pos: Int, n: String) = setEvent(EvEntityRef(n))
- def text(pos: Int, txt:String) = setEvent(EvText(txt))
-
- override def run() {
- curInput = input
- interruptibly { this.initialize.document() }
- setEvent(POISON)
- }
- }
-}
-
-// An iterator designed for one or more producers to generate
-// elements, and a single consumer to iterate. Iteration will continue
-// until closeIterator() is called, after which point producers
-// calling produce() will receive interruptions.
-//
-// Since hasNext may block indefinitely if nobody is producing,
-// there is also an available() method which will return true if
-// the next call hasNext is guaranteed not to block.
-//
-// This is not thread-safe for multiple consumers!
-trait ProducerConsumerIterator[T >: Null] extends Iterator[T] {
- // abstract - iterator-specific distinguished object for marking eos
- val EndOfStream: T
-
- // defaults to unbounded - override to positive Int if desired
- val MaxQueueSize = -1
-
- def interruptibly[T](body: => T): Option[T] = try Some(body) catch {
- case _: InterruptedException => Thread.currentThread.interrupt(); None
- case _: ClosedChannelException => None
- }
-
- private[this] lazy val queue =
- if (MaxQueueSize < 0) new LinkedBlockingQueue[T]()
- else new LinkedBlockingQueue[T](MaxQueueSize)
- private[this] var buffer: T = _
- private def fillBuffer() = {
- buffer = interruptibly(queue.take) getOrElse EndOfStream
- isElement(buffer)
- }
- private def isElement(x: T) = x != null && x != EndOfStream
- private def eos() = buffer == EndOfStream
-
- // public producer interface - this is the only method producers call, so
- // LinkedBlockingQueue's synchronization is all we need.
- def produce(x: T): Unit = if (!eos) interruptibly(queue put x)
-
- // consumer/iterator interface - we need not synchronize access to buffer
- // because we required there to be only one consumer.
- def hasNext = !eos && (buffer != null || fillBuffer)
-
- def next() = {
- if (eos()) throw new NoSuchElementException("ProducerConsumerIterator")
- if (buffer == null) fillBuffer()
-
- drainBuffer()
- }
-
- def available() = isElement(buffer) || isElement(queue.peek)
-
- private def drainBuffer() = {
- assert(!eos)
- val res = buffer
- buffer = null
- res
- }
-}
diff --git a/src/xml/scala/xml/pull/package.scala b/src/xml/scala/xml/pull/package.scala
deleted file mode 100644
index 0e3019446b..0000000000
--- a/src/xml/scala/xml/pull/package.scala
+++ /dev/null
@@ -1,42 +0,0 @@
-package scala
-package xml
-
-/**
- * Classes needed to view an XML document as a series of events. The document
- * is parsed by an [[scala.xml.pull.XMLEventReader]] instance. You can treat it as
- * an [[scala.collection.Iterator]] to retrieve the events, which are all
- * subclasses of [[scala.xml.pull.XMLEvent]].
- *
- * {{{
- * scala> val source = Source.fromString("""<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
- * <?instruction custom value="customvalue"?>
- * <!DOCTYPE foo [
- * <!ENTITY bar "BAR">
- * ]><foo>Hello<!-- this is a comment --><bar>&bar;</bar><bar>&gt;</bar></foo>""")
- *
- * source: scala.io.Source = non-empty iterator
- *
- * scala> val reader = new XMLEventReader(source)
- * reader: scala.xml.pull.XMLEventReader = non-empty iterator
- *
- * scala> reader.foreach{ println(_) }
- * EvProcInstr(instruction,custom value="customvalue")
- * EvText(
- * )
- * EvElemStart(null,foo,,)
- * EvText(Hello)
- * EvComment( this is a comment )
- * EvElemStart(null,bar,,)
- * EvText(BAR)
- * EvElemEnd(null,bar)
- * EvElemStart(null,bar,,)
- * EvEntityRef(gt)
- * EvElemEnd(null,bar)
- * EvElemEnd(null,foo)
- * EvText(
- *
- * )
- *
- * }}}
- */
-package object pull
diff --git a/src/xml/scala/xml/transform/BasicTransformer.scala b/src/xml/scala/xml/transform/BasicTransformer.scala
deleted file mode 100644
index c98339fd67..0000000000
--- a/src/xml/scala/xml/transform/BasicTransformer.scala
+++ /dev/null
@@ -1,60 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala
-package xml
-package transform
-
-/** A class for XML transformations.
- *
- * @author Burak Emir
- * @version 1.0
- */
-abstract class BasicTransformer extends Function1[Node,Node]
-{
- protected def unchanged(n: Node, ns: Seq[Node]) =
- ns.length == 1 && (ns.head == n)
-
- /** Call transform(Node) for each node in ns, append results
- * to NodeBuffer.
- */
- def transform(it: Iterator[Node], nb: NodeBuffer): Seq[Node] =
- it.foldLeft(nb)(_ ++= transform(_)).toSeq
-
- /** Call transform(Node) to each node in ns, yield ns if nothing changes,
- * otherwise a new sequence of concatenated results.
- */
- def transform(ns: Seq[Node]): Seq[Node] = {
- val (xs1, xs2) = ns span (n => unchanged(n, transform(n)))
-
- if (xs2.isEmpty) ns
- else xs1 ++ transform(xs2.head) ++ transform(xs2.tail)
- }
-
- def transform(n: Node): Seq[Node] = {
- if (n.doTransform) n match {
- case Group(xs) => Group(transform(xs)) // un-group the hack Group tag
- case _ =>
- val ch = n.child
- val nch = transform(ch)
-
- if (ch eq nch) n
- else Elem(n.prefix, n.label, n.attributes, n.scope, nch: _*)
- }
- else n
- }
-
- def apply(n: Node): Node = {
- val seq = transform(n)
- if (seq.length > 1)
- throw new UnsupportedOperationException("transform must return single node for root")
- else seq.head
- }
-}
diff --git a/src/xml/scala/xml/transform/RewriteRule.scala b/src/xml/scala/xml/transform/RewriteRule.scala
deleted file mode 100644
index 1399ee538d..0000000000
--- a/src/xml/scala/xml/transform/RewriteRule.scala
+++ /dev/null
@@ -1,28 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala
-package xml
-package transform
-
-/** A RewriteRule, when applied to a term, yields either
- * the result of rewriting the term or the term itself if the rule
- * is not applied.
- *
- * @author Burak Emir
- * @version 1.0
- */
-abstract class RewriteRule extends BasicTransformer {
- /** a name for this rewrite rule */
- val name = this.toString()
- override def transform(ns: Seq[Node]): Seq[Node] = super.transform(ns)
- override def transform(n: Node): Seq[Node] = n
-}
-
diff --git a/src/xml/scala/xml/transform/RuleTransformer.scala b/src/xml/scala/xml/transform/RuleTransformer.scala
deleted file mode 100644
index 3a222ba759..0000000000
--- a/src/xml/scala/xml/transform/RuleTransformer.scala
+++ /dev/null
@@ -1,16 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala
-package xml
-package transform
-
-class RuleTransformer(rules: RewriteRule*) extends BasicTransformer {
- override def transform(n: Node): Seq[Node] =
- rules.foldLeft(super.transform(n)) { (res, rule) => rule transform res }
-}
diff --git a/test/files/jvm/backendBugUnapply.scala b/test/files/jvm/backendBugUnapply.scala
deleted file mode 100644
index 45ee6f7d4f..0000000000
--- a/test/files/jvm/backendBugUnapply.scala
+++ /dev/null
@@ -1,17 +0,0 @@
-object Test {
- import scala.xml.{Node,UnprefixedAttribute}
-
- def domatch(x:Node) =
- x match {
- case Node("foo", UnprefixedAttribute("bar", z, _), _*) => z
- case _ => null
- }
-
- def main(args: Array[String]): Unit = {
- println(domatch(<foo bar="baz"><hi/></foo>))
- println(domatch(<foo bingo="donkey"><hi/></foo>))
- //
- // assert(domatch(<foo bar="baz"><hi/></foo>).toString == "baz")
- // assert(domatch(<foo bar="baz2"><hi/></foo>) == null)//, domatch(<foo bar="baz2"><hi/></foo>))
- }
-}
diff --git a/test/files/jvm/serialization-new.check b/test/files/jvm/serialization-new.check
index f886cfe29c..e2d2e4aee6 100644
--- a/test/files/jvm/serialization-new.check
+++ b/test/files/jvm/serialization-new.check
@@ -220,60 +220,6 @@ x = TrieMap(1 -> one, 2 -> two, 3 -> three)
y = TrieMap(1 -> one, 2 -> two, 3 -> three)
x equals y: true, y equals x: true
-x = xml:src="hello"
-y = xml:src="hello"
-x equals y: true, y equals x: true
-
-x = <title></title>
-y = <title></title>
-x equals y: true, y equals x: true
-
-x = <html><title>title</title><body></body></html>
-y = <html><title>title</title><body></body></html>
-x equals y: true, y equals x: true
-
-x = <html>
- <body>
- <table cellpadding="2" cellspacing="0">
- <tr>
- <th>Last Name</th>
- <th>First Name</th>
- </tr>
- <tr>
- <td> Tom </td>
- <td> 20 </td>
- </tr><tr>
- <td> Bob </td>
- <td> 22 </td>
- </tr><tr>
- <td> James </td>
- <td> 19 </td>
- </tr>
- </table>
- </body>
- </html>
-y = <html>
- <body>
- <table cellpadding="2" cellspacing="0">
- <tr>
- <th>Last Name</th>
- <th>First Name</th>
- </tr>
- <tr>
- <td> Tom </td>
- <td> 20 </td>
- </tr><tr>
- <td> Bob </td>
- <td> 22 </td>
- </tr><tr>
- <td> James </td>
- <td> 19 </td>
- </tr>
- </table>
- </body>
- </html>
-x equals y: true, y equals x: true
-
x = Tim
y = Tim
x equals y: true, y equals x: true
diff --git a/test/files/jvm/serialization-new.scala b/test/files/jvm/serialization-new.scala
index 1522fc8e27..1b5e856645 100644
--- a/test/files/jvm/serialization-new.scala
+++ b/test/files/jvm/serialization-new.scala
@@ -419,70 +419,6 @@ object Test3_mutable {
}
}
-
-//############################################################################
-// Test classes in package "scala.xml"
-
-object Test4_xml {
- import scala.xml.{Attribute, Document, Elem, Null, PrefixedAttribute, Text}
-
- case class Person(name: String, age: Int)
-
- try {
- // Attribute
- val a1 = new PrefixedAttribute("xml", "src", Text("hello"), Null)
- val _a1: Attribute = read(write(a1))
- check(a1, _a1)
-
- // Document
- val d1 = new Document
- d1.docElem = <title></title>
- d1.encoding = Some("UTF-8")
- val _d1: Document = read(write(d1))
- check(d1, _d1)
-
- // Elem
- val e1 = <html><title>title</title><body></body></html>;
- val _e1: Elem = read(write(e1))
- check(e1, _e1)
-
- class AddressBook(a: Person*) {
- private val people: List[Person] = a.toList
- def toXHTML =
- <table cellpadding="2" cellspacing="0">
- <tr>
- <th>Last Name</th>
- <th>First Name</th>
- </tr>
- { for (p <- people) yield
- <tr>
- <td> { p.name } </td>
- <td> { p.age.toString() } </td>
- </tr> }
- </table>;
- }
-
- val people = new AddressBook(
- Person("Tom", 20),
- Person("Bob", 22),
- Person("James", 19))
-
- val e2 =
- <html>
- <body>
- { people.toXHTML }
- </body>
- </html>;
- val _e2: Elem = read(write(e2))
- check(e2, _e2)
- }
- catch {
- case e: Exception =>
- println("Error in Test4_xml: " + e)
- throw e
- }
-}
-
//############################################################################
// Test user-defined classes WITHOUT nesting
@@ -594,11 +530,10 @@ object Test8 {
// Test code
object Test {
- def main(args: Array[String]) {
+ def main(args: Array[String]): Unit = {
Test1_scala
Test2_immutable
Test3_mutable
- Test4_xml
Test5
Test6
Test7
diff --git a/test/files/jvm/serialization.check b/test/files/jvm/serialization.check
index f886cfe29c..e2d2e4aee6 100644
--- a/test/files/jvm/serialization.check
+++ b/test/files/jvm/serialization.check
@@ -220,60 +220,6 @@ x = TrieMap(1 -> one, 2 -> two, 3 -> three)
y = TrieMap(1 -> one, 2 -> two, 3 -> three)
x equals y: true, y equals x: true
-x = xml:src="hello"
-y = xml:src="hello"
-x equals y: true, y equals x: true
-
-x = <title></title>
-y = <title></title>
-x equals y: true, y equals x: true
-
-x = <html><title>title</title><body></body></html>
-y = <html><title>title</title><body></body></html>
-x equals y: true, y equals x: true
-
-x = <html>
- <body>
- <table cellpadding="2" cellspacing="0">
- <tr>
- <th>Last Name</th>
- <th>First Name</th>
- </tr>
- <tr>
- <td> Tom </td>
- <td> 20 </td>
- </tr><tr>
- <td> Bob </td>
- <td> 22 </td>
- </tr><tr>
- <td> James </td>
- <td> 19 </td>
- </tr>
- </table>
- </body>
- </html>
-y = <html>
- <body>
- <table cellpadding="2" cellspacing="0">
- <tr>
- <th>Last Name</th>
- <th>First Name</th>
- </tr>
- <tr>
- <td> Tom </td>
- <td> 20 </td>
- </tr><tr>
- <td> Bob </td>
- <td> 22 </td>
- </tr><tr>
- <td> James </td>
- <td> 19 </td>
- </tr>
- </table>
- </body>
- </html>
-x equals y: true, y equals x: true
-
x = Tim
y = Tim
x equals y: true, y equals x: true
diff --git a/test/files/jvm/serialization.scala b/test/files/jvm/serialization.scala
index a64b7115fa..2bb9db3f72 100644
--- a/test/files/jvm/serialization.scala
+++ b/test/files/jvm/serialization.scala
@@ -420,70 +420,6 @@ object Test3_mutable {
}
}
-
-//############################################################################
-// Test classes in package "scala.xml"
-
-object Test4_xml {
- import scala.xml.{Attribute, Document, Elem, Null, PrefixedAttribute, Text}
-
- case class Person(name: String, age: Int)
-
- try {
- // Attribute
- val a1 = new PrefixedAttribute("xml", "src", Text("hello"), Null)
- val _a1: Attribute = read(write(a1))
- check(a1, _a1)
-
- // Document
- val d1 = new Document
- d1.docElem = <title></title>
- d1.encoding = Some("UTF-8")
- val _d1: Document = read(write(d1))
- check(d1, _d1)
-
- // Elem
- val e1 = <html><title>title</title><body></body></html>;
- val _e1: Elem = read(write(e1))
- check(e1, _e1)
-
- class AddressBook(a: Person*) {
- private val people: List[Person] = a.toList
- def toXHTML =
- <table cellpadding="2" cellspacing="0">
- <tr>
- <th>Last Name</th>
- <th>First Name</th>
- </tr>
- { for (p <- people) yield
- <tr>
- <td> { p.name } </td>
- <td> { p.age.toString() } </td>
- </tr> }
- </table>;
- }
-
- val people = new AddressBook(
- Person("Tom", 20),
- Person("Bob", 22),
- Person("James", 19))
-
- val e2 =
- <html>
- <body>
- { people.toXHTML }
- </body>
- </html>;
- val _e2: Elem = read(write(e2))
- check(e2, _e2)
- }
- catch {
- case e: Exception =>
- println("Error in Test4_xml: " + e)
- throw e
- }
-}
-
//############################################################################
// Test user-defined classes WITHOUT nesting
@@ -600,7 +536,6 @@ object Test {
Test1_scala
Test2_immutable
Test3_mutable
- Test4_xml
Test5
Test6
Test7
diff --git a/test/files/jvm/t0632.check b/test/files/jvm/t0632.check
deleted file mode 100755
index 681bc9da92..0000000000
--- a/test/files/jvm/t0632.check
+++ /dev/null
@@ -1,12 +0,0 @@
-<foo x="&amp;"/>
-<foo x="&amp;"/>
-<foo x="&amp;"/>
-<foo x="&amp;"/>
-<foo x="&amp;amp;"/>
-<foo x="&amp;amp;"/>
-<foo x="&amp;amp;"/>
-<foo x="&amp;amp;"/>
-<foo x="&amp;&amp;"/>
-<foo x="&amp;&amp;"/>
-<foo x="&amp;&amp;"/>
-<foo x="&amp;&amp;"/>
diff --git a/test/files/jvm/t0632.scala b/test/files/jvm/t0632.scala
deleted file mode 100644
index a2bb5aa7f4..0000000000
--- a/test/files/jvm/t0632.scala
+++ /dev/null
@@ -1,22 +0,0 @@
-object Test {
-import scala.io.Source.fromString
-import scala.xml.parsing.ConstructingParser.fromSource
-import scala.xml.TopScope
- def parse(s:String) = fromSource(fromString(s), false).element(TopScope)
- def main(argv : Array[String]) : Unit = {
-
- println(parse("<foo x='&amp;'/>"))
- println(xml.XML.loadString("<foo x='&amp;'/>"))
- println(<foo x="&amp;"/>)
- println(<foo x={ "&" }/>)
-
- println(xml.XML.loadString("<foo x='&amp;amp;'/>"))
- println(parse("<foo x='&amp;amp;'/>"))
- println(<foo x="&amp;amp;"/>)
- println(<foo x={ "&amp;" }/>)
- println(xml.XML.loadString("<foo x='&amp;&amp;'/>"))
- println(parse("<foo x='&amp;&amp;'/>"))
- println(<foo x="&amp;&amp;"/>)
- println(<foo x={ "&&" }/>)
- }
-}
diff --git a/test/files/jvm/t1118.check b/test/files/jvm/t1118.check
deleted file mode 100755
index d676b413c9..0000000000
--- a/test/files/jvm/t1118.check
+++ /dev/null
@@ -1,11 +0,0 @@
-
-<hi/> <!-- literal short -->
-<there></there> <!-- literal long -->
-<guys who="you all"></guys> <!-- literal long with attribute-->
-<hows it="going"/> <!-- literal short with attribute -->
-<this>is pretty cool</this> <!-- literal not empty -->
-
-<emptiness></emptiness> <!--programmatic long-->
-<vide/> <!--programmatic short-->
-<elem attr="value"/> <!--programmatic short with attribute-->
-<elem2 attr2="value2"></elem2> <!--programmatic long with attribute-->
diff --git a/test/files/jvm/t1118.scala b/test/files/jvm/t1118.scala
deleted file mode 100755
index 3c86547241..0000000000
--- a/test/files/jvm/t1118.scala
+++ /dev/null
@@ -1,21 +0,0 @@
-import scala.xml._
-
-object Test {
- def main(args: Array[String]) {
- println(<xml:group>
-<hi/> <!-- literal short -->
-<there></there> <!-- literal long -->
-<guys who="you all"></guys> <!-- literal long with attribute-->
-<hows it="going"/> <!-- literal short with attribute -->
-<this>is pretty cool</this> <!-- literal not empty -->
-</xml:group>)
-
- println(Elem(null, "emptiness", Null, TopScope, false) ++ Text(" ") ++ Comment("programmatic long"))
-
- println(Elem(null, "vide", Null, TopScope, true) ++ Text(" ") ++ Comment("programmatic short"))
-
- println(Elem(null, "elem", Attribute("attr", Text("value"), Null), TopScope, true) ++ Text(" ") ++ Comment ("programmatic short with attribute"))
-
- println(Elem(null, "elem2", Attribute("attr2", Text("value2"), Null), TopScope, false) ++ Text(" ") ++ Comment ("programmatic long with attribute"))
- }
-} \ No newline at end of file
diff --git a/test/files/jvm/t560bis.check b/test/files/jvm/t560bis.check
deleted file mode 100644
index 91eb4c19a2..0000000000
--- a/test/files/jvm/t560bis.check
+++ /dev/null
@@ -1,2 +0,0 @@
-cool!
-cool!
diff --git a/test/files/jvm/t560bis.scala b/test/files/jvm/t560bis.scala
deleted file mode 100644
index 21eb8dde28..0000000000
--- a/test/files/jvm/t560bis.scala
+++ /dev/null
@@ -1,21 +0,0 @@
-object Test {
-import scala.xml._;
-
- def bar(args: Seq[String]) = args match {
- case Seq(a,b,c,d @ _*) => Console.println("cool!")
- case _ => Console.println("bah")
- }
- def foo(args: List[String]) =
- Elem(null,"bla",Null, TopScope, minimizeEmpty = true, (args map {x => Text(x)}):_*) match {
- case Elem(_,_,_,_,Text("1"),_*) =>
- Console.println("cool!")
- case _ =>
- Console.println("bah")
- }
-
- def main(args: Array[String]) = {
- val li = List("1","2","3","4")
- bar(li)
- foo(li)
- }
-}
diff --git a/test/files/jvm/unittest_xml.scala b/test/files/jvm/unittest_xml.scala
deleted file mode 100644
index 106334e625..0000000000
--- a/test/files/jvm/unittest_xml.scala
+++ /dev/null
@@ -1,101 +0,0 @@
-import scala.xml.{ MetaData, Null, Utility, PrefixedAttribute, UnprefixedAttribute }
-
-object Test {
-
- def main(args:Array[String]) = {
- MetaDataTest.run()
- UtilityTest.run()
- }
-
- object MetaDataTest {
-
- import scala.xml.{ TopScope, NamespaceBinding, Node, Atom, Text }
-
- def domatch(x:Node): Node = {
- x match {
- case Node("foo", md @ UnprefixedAttribute(_, value, _), _*) if !value.isEmpty =>
- md("bar")(0)
- case _ => new Atom(3)
- }
- }
-
- def run() {
-
- var x: MetaData = Null
- var s: NamespaceBinding = TopScope
-
- // testing method def apply(uri:String, scp:NamespaceBinding, k:String): Seq[Node]
- // def apply(k:String): Seq[Node]
-
- assert(null == x("za://foo.com", s, "bar" ), "absent element (prefixed) 1")
- assert(null == x("bar"), "absent element (unprefix) 1")
-
- assert(None == x.get("za://foo.com", s, "bar" ), "absent element (prefixed) 2")
- assert(None == x.get("bar"), "absent element (unprefix) 2")
-
- x = new PrefixedAttribute("zo","bar", new Atom(42), x)
- s = new NamespaceBinding("zo","za://foo.com",s)
-
- assert(new Atom(42) == x("za://foo.com", s, "bar" ), "present element (prefixed) 3")
- assert(null == x("bar"), "present element (unprefix) 3")
-
- assert(Some(new Atom(42)) == x.get("za://foo.com", s, "bar" ), "present element (prefixed) 4")
- assert(None == x.get("bar"), "present element (unprefix) 4")
-
- x = new UnprefixedAttribute("bar","meaning", x)
-
- assert(null == x(null, s, "bar"), "present element (prefixed) 5")
- assert(Text("meaning") == x("bar"), "present element (unprefix) 5")
-
- assert(None == x.get(null, s, "bar" ), "present element (prefixed) 6")
- assert(Some(Text("meaning")) == x.get("bar"), "present element (unprefix) 6")
-
- val z = <foo bar="gar"/>
- val z2 = <foo/>
-
- assert(Text("gar") == domatch(z), "attribute extractor 1")
- assert(new Atom(3) == domatch(z2), "attribute extractor 2")
-
- }
- }
-
- object UtilityTest {
- def run() {
- assert(Utility.isNameStart('b'))
- assert(!Utility.isNameStart(':'))
-
- val x = <foo>
- <toomuchws/>
- </foo>
-
- val y = xml.Utility.trim(x)
-
- assert(1 == (y match { case <foo><toomuchws/></foo> => 1 }), "trim 1")
-
- val x2 = <foo>
- <toomuchws> a b b a </toomuchws>
- </foo>
-
- val y2 = xml.Utility.trim(x2)
-
- assert(2 == (y2 match { case <foo><toomuchws>a b b a</toomuchws></foo> => 2 }), "trim 2")
-
- val z = <bar>''</bar>
- val z1 = z.toString
-
- assert("<bar>''</bar>" == z1, "apos unescaped")
-
- val q = xml.Utility.sort(<a g='3' j='2' oo='2' a='2'/>)
- assert(" a=\"2\" g=\"3\" j=\"2\" oo=\"2\"" == xml.Utility.sort(q.attributes).toString)
-
- val pp = new xml.PrettyPrinter(80,5)
- assert("<a a=\"2\" g=\"3\" j=\"2\" oo=\"2\"/>" == pp.format(q))
-
- <hi>
- <there/>
- <guys/>
- </hi>.hashCode // Bug #777
- }
- }
-
-}
diff --git a/test/files/jvm/xml01.check b/test/files/jvm/xml01.check
deleted file mode 100755
index d78e6df410..0000000000
--- a/test/files/jvm/xml01.check
+++ /dev/null
@@ -1,8 +0,0 @@
-equality
-xpath \
-xpath \\ DESCENDANTS
-<book><author>Peter Buneman</author><author>Dan Suciu</author><title>Data on ze web</title></book>
--- group nodes
-<f><a/><b/><c/></f>
-<a/><f><a/><b/><c/></f><a/><b/><c/>
-attribute value normalization
diff --git a/test/files/jvm/xml01.scala b/test/files/jvm/xml01.scala
deleted file mode 100644
index 2b456f5ff5..0000000000
--- a/test/files/jvm/xml01.scala
+++ /dev/null
@@ -1,182 +0,0 @@
-import java.io.StringReader
-import org.xml.sax.InputSource
-
-import scala.xml._
-
-object Test extends App {
- def Elem(prefix: String, label: String, attributes: MetaData, scope: NamespaceBinding, child: Node*): Elem =
- scala.xml.Elem.apply(prefix, label, attributes, scope, minimizeEmpty = true, child: _*)
- val e: scala.xml.MetaData = Null //Node.NoAttributes
- val sc: scala.xml.NamespaceBinding = TopScope
-
- val xmlFile1 = "<hello><world/></hello>";
- val isrc1 = new InputSource(new StringReader(xmlFile1))
- val parsedxml1 = XML.load(isrc1)
- val isrc11 = new InputSource(new StringReader(xmlFile1))
- val parsedxml11 = XML.load(isrc11)
-
- val c = new Node {
- def label = "hello"
- override def hashCode() =
- Utility.hashCode(prefix, label, attributes.hashCode(), scope.hashCode(), child);
- def child = Elem(null, "world", e, sc);
- //def attributes = e;
- override def text = ""
- }
-
- println("equality")
- assert(c == parsedxml11)
- assert(parsedxml1 == parsedxml11)
- assert(List(parsedxml1) sameElements List(parsedxml11))
- assert(Array(parsedxml1).toList sameElements List(parsedxml11))
-
- val x2 = "<book><author>Peter Buneman</author><author>Dan Suciu</author><title>Data on ze web</title></book>";
-
- val i = new InputSource(new StringReader(x2))
- val x2p = XML.load(i)
-
- assert(x2p == Elem(null, "book" , e, sc,
- Elem(null, "author", e, sc,Text("Peter Buneman")),
- Elem(null, "author", e, sc,Text("Dan Suciu")),
- Elem(null, "title" , e, sc,Text("Data on ze web"))))
-
- val xmlFile2 = "<bib><book><author>Peter Buneman</author><author>Dan Suciu</author><title>Data on ze web</title></book><book><author>John Mitchell</author><title>Foundations of Programming Languages</title></book></bib>";
- val isrc2 = new InputSource(new StringReader(xmlFile2))
- val parsedxml2 = XML.load(isrc2)
-
- println("xpath \\")
-
- assert(parsedxml1 \ "_" sameElements List(Elem(null,"world", e, sc)))
-
- assert(parsedxml1 \ "world" sameElements List(Elem(null,"world", e, sc)))
-
- assert(
- (parsedxml2 \ "_") sameElements List(
- Elem(null,"book", e, sc,
- Elem(null,"author", e, sc, Text("Peter Buneman")),
- Elem(null,"author", e, sc, Text("Dan Suciu")),
- Elem(null,"title" , e, sc, Text("Data on ze web"))),
- Elem(null,"book",e,sc,
- Elem(null,"author",e,sc,Text("John Mitchell")),
- Elem(null,"title",e,sc,Text("Foundations of Programming Languages"))))
- )
- assert((parsedxml2 \ "author").isEmpty)
-
- assert(
- (parsedxml2 \ "book") sameElements List(
- Elem(null,"book",e,sc,
- Elem(null,"author", e, sc, Text("Peter Buneman")),
- Elem(null,"author", e, sc, Text("Dan Suciu")),
- Elem(null,"title" , e, sc, Text("Data on ze web"))),
- Elem(null,"book",e,sc,
- Elem(null,"author", e, sc, Text("John Mitchell")),
- Elem(null,"title" , e, sc, Text("Foundations of Programming Languages")))
- )
- )
-
- assert(
- (parsedxml2 \ "_" \ "_") sameElements List(
- Elem(null,"author", e, sc, Text("Peter Buneman")),
- Elem(null,"author", e, sc, Text("Dan Suciu")),
- Elem(null,"title" , e, sc, Text("Data on ze web")),
- Elem(null,"author", e, sc, Text("John Mitchell")),
- Elem(null,"title" , e, sc, Text("Foundations of Programming Languages"))
- )
- )
-
- assert(
- (parsedxml2 \ "_" \ "author") sameElements List(
- Elem(null,"author", e, sc, Text("Peter Buneman")),
- Elem(null,"author", e, sc, Text("Dan Suciu")),
- Elem(null,"author", e, sc, Text("John Mitchell"))
- )
- )
-
- assert((parsedxml2 \ "_" \ "_" \ "author").isEmpty)
-
- Console.println("xpath \\\\ DESCENDANTS");
-
- assert(
- (parsedxml2 \\ "author") sameElements List(
- Elem(null,"author", e, sc, Text("Peter Buneman")),
- Elem(null,"author", e, sc, Text("Dan Suciu")),
- Elem(null,"author", e, sc, Text("John Mitchell"))
- )
- )
-
- assert(
- (parsedxml2 \\ "title") sameElements List(
- Elem(null,"title", e, sc, Text("Data on ze web")),
- Elem(null,"title", e, sc, Text("Foundations of Programming Languages")))
- )
-
-
- println(
- (parsedxml2 \\ "book" ){ n:Node => (n \ "title") xml_== "Data on ze web" }
- )
-
- assert(
- ((new NodeSeq { val theSeq = List( parsedxml2 ) }) \\ "_") sameElements List(
- Elem(null,"bib",e,sc,
- Elem(null,"book",e,sc,
- Elem(null, "author", e, sc, Text("Peter Buneman")),
- Elem(null, "author", e, sc, Text("Dan Suciu")),
- Elem(null, "title" , e, sc, Text("Data on ze web"))),
- Elem(null,"book",e,sc,
- Elem(null,"author",e,sc,Text("John Mitchell")),
- Elem(null,"title",e,sc,Text("Foundations of Programming Languages")))),
- Elem(null,"book",e,sc,
- Elem(null,"author",e,sc,Text("Peter Buneman")),
- Elem(null,"author",e,sc,Text("Dan Suciu")),
- Elem(null,"title",e,sc,Text("Data on ze web"))),
- Elem(null,"author",e,sc,Text("Peter Buneman")),
- Elem(null,"author",e,sc,Text("Dan Suciu")),
- Elem(null,"title",e,sc,Text("Data on ze web")),
- Elem(null,"book",e,sc,
- Elem(null,"author",e,sc,Text("John Mitchell")),
- Elem(null,"title",e,sc,Text("Foundations of Programming Languages"))),
- Elem(null,"author",e,sc,Text("John Mitchell")),
- Elem(null,"title",e,sc,Text("Foundations of Programming Languages"))
- )
- )
-
- // test group node
- Console println "-- group nodes"
- val zx1: Node = Group { <a/><b/><c/> }
- val zy1 = <f>{zx1}</f>
- Console println zy1.toString()
-
- val zx2: Node = Group { List(<a/>,zy1,zx1) }
- Console println zx2.toString()
-
- val zz1 = <xml:group><a/><b/><c/></xml:group>
-
- assert(zx1 xml_== zz1)
- assert(zz1.length == 3)
-
- // unparsed
-
- println("attribute value normalization")
- val xmlAttrValueNorm = "<personne id='p0003' nom='&#x015e;ahingöz' />";
- {
- val isrcA = new InputSource( new StringReader(xmlAttrValueNorm) );
- val parsedxmlA = XML.load(isrcA);
- val c = (parsedxmlA \ "@nom").text.charAt(0);
- assert(c == '\u015e');
- }
- // buraq: if the following test fails with 'character x not allowed', it is
- // related to the mutable variable in a closures in MarkupParser.parsecharref
- {
- val isr = scala.io.Source.fromString(xmlAttrValueNorm);
- val pxmlB = scala.xml.parsing.ConstructingParser.fromSource(isr,false);
- val parsedxmlB = pxmlB.element(TopScope);
- val c = (parsedxmlB \ "@nom").text.charAt(0);
- assert(c == '\u015e');
- }
-
- // #60 test by round trip
-
- val p = scala.xml.parsing.ConstructingParser.fromSource(scala.io.Source.fromString("<foo bar:attr='&amp;'/>"),true)
- val n = p.element(new scala.xml.NamespaceBinding("bar","BAR",scala.xml.TopScope))(0)
- assert( n.attributes.get("BAR", n, "attr").nonEmpty)
-}
diff --git a/test/files/jvm/xml02.scala b/test/files/jvm/xml02.scala
deleted file mode 100644
index b830a0e694..0000000000
--- a/test/files/jvm/xml02.scala
+++ /dev/null
@@ -1,78 +0,0 @@
-object Test {
-
- def main(args: Array[String]) {
- XmlEx.run()
- XmlEy.run()
- XmlPat.run()
- DodgyNamespace.run()
- }
-
- import scala.xml.{NodeSeq, Utility}
- import NodeSeq.seqToNodeSeq
-
- val ax = <hello foo="bar" x:foo="baz" xmlns:x="the namespace from outer space">
- <world/>
- </hello>
-
- val cx = <z:hello foo="bar" xmlns:z="z" x:foo="baz" xmlns:x="the namespace from outer space">
- crazy text world
- </z:hello>
-
- val bx = <hello foo="bar&amp;x"></hello>
-
- object XmlEx {
-
- def run() {
- assert((ax \ "@foo") xml_== "bar") // uses NodeSeq.view!
- assert((ax \ "@foo") xml_== xml.Text("bar")) // dto.
- assert((bx \ "@foo") xml_== "bar&x") // dto.
- assert((bx \ "@foo") xml_sameElements List(xml.Text("bar&x")))
- assert("<hello foo=\"bar&amp;x\"></hello>" == bx.toString)
- }
- }
-
- object XmlEy {
- def run() {
- val z = ax \ "@{the namespace from outer space}foo"
- assert((ax \ "@{the namespace from outer space}foo") xml_== "baz")
- assert((cx \ "@{the namespace from outer space}foo") xml_== "baz")
-
- try {
- ax \ "@"
- assert(false)
- } catch {
- case _: IllegalArgumentException =>
- }
- try {
- ax \ "@{"
- assert(false)
- } catch {
- case _: IllegalArgumentException =>
- }
- try {
- ax \ "@{}"
- assert(false)
- } catch {
- case _: IllegalArgumentException =>
- }
-
- }
- }
-
- object XmlPat {
- def run() {
- assert(<hello/> match { case <hello/> => true; case _ => false; })
- assert(<x:ga xmlns:x="z"/> match { case <x:ga/> => true; case _ => false; });
- assert(Utility.trim(cx) match { case n @ <hello>crazy text world</hello> if (n \ "@foo") xml_== "bar" => true; })
- assert(Utility.trim(cx) match { case n @ <z:hello>crazy text world</z:hello> if (n \ "@foo") xml_== "bar" => true; })
- }
- }
-
- object DodgyNamespace {
- def run() {
- val x = <flog xmlns:ee="http://ee.com"><foo xmlns:dog="http://dog.com"><dog:cat/></foo></flog>
- assert(x.toString.matches(".*xmlns:dog=\"http://dog.com\".*"));
- }
- }
-
-}
diff --git a/test/files/jvm/xml03syntax.check b/test/files/jvm/xml03syntax.check
deleted file mode 100755
index 599cbad686..0000000000
--- a/test/files/jvm/xml03syntax.check
+++ /dev/null
@@ -1,27 +0,0 @@
-true
-true
-true
-<hello>world</hello>
-true
-<hello>1.5</hello>
-true
-<hello>5</hello>
-true
-<hello>true</hello>
-true
-<hello>5</hello>
-true
-<hello>27</hello>
-true
-<hello>1 2 3 4</hello>
-1
-2
-3
-4
-<hello>2 4</hello>
-2
-4
-
-node=<elem key="<b>hello</b>"/>, key=Some(<b>hello</b>)
-node=<elem/>, key=None
-<a>Š</a>
diff --git a/test/files/jvm/xml03syntax.scala b/test/files/jvm/xml03syntax.scala
deleted file mode 100644
index 41663681c7..0000000000
--- a/test/files/jvm/xml03syntax.scala
+++ /dev/null
@@ -1,97 +0,0 @@
-import scala.xml._
-
-object Test {
-
- private def handle[A](x: Node): A = {
- println(x)
- x.child(0).asInstanceOf[Atom[A]].data
- }
-
- def main(args: Array[String]) {
- test1()
- test2()
- test3()
- }
-
- private def test1() {
- val xNull = <hello>{null}</hello> // these used to be Atom(unit), changed to empty children
-
- println(xNull.child sameElements Nil)
-
- val x0 = <hello>{}</hello> // these used to be Atom(unit), changed to empty children
- val x00 = <hello>{ }</hello> // dto.
-
- val xa = <hello>{ "world" }</hello>
-
-
- println(x0.child sameElements Nil)
- println(x00.child sameElements Nil)
- println(handle[String](xa) == "world")
-
- val xb = <hello>{ 1.5 }</hello>
-
- println(handle[Double](xb) == 1.5)
-
- val xc = <hello>{ 5 }</hello>
-
- println(handle[Int](xc) == 5)
-
- val xd = <hello>{ true }</hello>
-
- println(handle[Boolean](xd) == true)
-
- val xe = <hello>{ 5:Short }</hello>
-
- println(handle[Short](xe) == (5:Short))
-
- val xf = <hello>{ val x = 27; x }</hello>
-
- println(handle[Int](xf) == 27)
-
- val xg = <hello>{ List(1,2,3,4) }</hello>
-
- println(xg)
- for (z <- xg.child) {
- println(z.toString() + {if (z.isInstanceOf[Text]) "(is text node ' ')" else ""})
- }
-
- val xh = <hello>{ for(x <- List(1,2,3,4) if x % 2 == 0) yield x }</hello>
-
- println(xh)
- for (z <- xh.child) {
- println(z.toString() + {if (z.isInstanceOf[Text]) "(is text node ' ')" else ""})
- }
- println
- }
-
- /** see SVN r13821 (emir): support for <elem key={x:Option[Seq[Node]]} />,
- * so that Options can be used for optional attributes.
- */
- private def test2() {
- val x1: Option[Seq[Node]] = Some(<b>hello</b>)
- val n1 = <elem key={x1} />;
- println("node="+n1+", key="+n1.attribute("key"))
-
- val x2: Option[Seq[Node]] = None
- val n2 = <elem key={x2} />;
- println("node="+n2+", key="+n2.attribute("key"))
- }
-
- private def test3() {
- // this demonstrates how to handle entities
- val s = io.Source.fromString("<a>&nbsp;</a>")
- object parser extends xml.parsing.ConstructingParser(s, false /*ignore ws*/) {
- override def replacementText(entityName: String): io.Source = {
- entityName match {
- case "nbsp" => io.Source.fromString("\u0160");
- case _ => super.replacementText(entityName);
- }
- }
- nextch; // !!important, to initialize the parser
- }
- val parsed = parser.element(TopScope) // parse the source as element
- // alternatively, we could call document()
- println(parsed)
- }
-
-}
diff --git a/test/files/jvm/xml04embed.check b/test/files/jvm/xml04embed.check
deleted file mode 100644
index e71e645149..0000000000
--- a/test/files/jvm/xml04embed.check
+++ /dev/null
@@ -1,3 +0,0 @@
-{
-}
-{}{}{}
diff --git a/test/files/jvm/xml04embed.scala b/test/files/jvm/xml04embed.scala
deleted file mode 100644
index fa453e4295..0000000000
--- a/test/files/jvm/xml04embed.scala
+++ /dev/null
@@ -1,10 +0,0 @@
-object Test {
- def main(args: Array[String]) {
- val ya = <x>{{</x>
- println(ya.text)
- val ua = <x>}}</x>
- println(ua.text)
- val za = <x>{{}}{{}}{{}}</x>
- println(za.text)
- }
-}
diff --git a/test/files/jvm/xmlattr.check b/test/files/jvm/xmlattr.check
deleted file mode 100644
index a87420d86c..0000000000
--- a/test/files/jvm/xmlattr.check
+++ /dev/null
@@ -1,18 +0,0 @@
-true
-true
-true
-true
-true
-true
-removal of duplicates for unprefixed attributes in append = 1
-true
-true
-true
-true
-true
-true
-true
-true
-true
-<b x="&amp;"/>
-<b x="&amp;"/>
diff --git a/test/files/jvm/xmlattr.scala b/test/files/jvm/xmlattr.scala
deleted file mode 100644
index 6423268ba7..0000000000
--- a/test/files/jvm/xmlattr.scala
+++ /dev/null
@@ -1,70 +0,0 @@
-import xml.{ NodeSeq, Null, Text, UnprefixedAttribute }
-
-object Test {
-
- def main(args: Array[String]) {
- UnprefixedAttributeTest()
- AttributeWithOptionTest()
- AttributeOutputTest()
- AttributeOperatorTest()
- }
-
- object UnprefixedAttributeTest {
- def apply() {
- val x = new UnprefixedAttribute("foo","bar", Null)
- println(Some(Text("bar")) == x.get("foo"))
- println(Text("bar") == x("foo"))
- println(None == x.get("no_foo"))
- println(null == x("no_foo"))
-
- val y = x.remove("foo")
- println(Null == y)
-
- val z = new UnprefixedAttribute("foo", null:NodeSeq, x)
- println(None == z.get("foo"))
-
- var appended = x append x append x append x
- var len = 0; while (appended ne Null) {
- appended = appended.next
- len = len + 1
- }
- println("removal of duplicates for unprefixed attributes in append = " + len)
- }
- }
-
- object AttributeWithOptionTest {
- def apply() {
- val x = new UnprefixedAttribute("foo", Some(Text("bar")), Null)
-
- println(Some(Text("bar")) == x.get("foo"))
- println(Text("bar") == x("foo"))
- println(None == x.get("no_foo"))
- println(null == x("no_foo"))
-
- val attr1 = Some(Text("foo value"))
- val attr2 = None
- val y = <b foo={attr1} bar={attr2} />
- println(Some(Text("foo value")) == y.attributes.get("foo"));
- println(Text("foo value") == y.attributes("foo"))
- println(None == y.attributes.get("bar"))
- println(null == y.attributes("bar"))
-
- val z = new UnprefixedAttribute("foo", None, x)
- println(None == z.get("foo"))
- }
- }
-
- object AttributeOutputTest {
- def apply() {
- println(<b x="&amp;"/>)
- println(<b x={"&"}/>)
- }
- }
-
- object AttributeOperatorTest {
- def apply() {
- val xml = <foo bar="apple" />
- assert(xml \@ "bar" == "apple")
- }
- }
-}
diff --git a/test/files/jvm/xmlmore.check b/test/files/jvm/xmlmore.check
deleted file mode 100644
index 29f144c89f..0000000000
--- a/test/files/jvm/xmlmore.check
+++ /dev/null
@@ -1,10 +0,0 @@
-<!-- thissa comment -->
-<?this is a pi foo bar = && {{ ?>
-
- &quot;Come, come again, whoever you are, come!
-Heathen, fire worshipper or idolatrous, come!
-Come even if you broke your penitence a hundred times,
-Ours is the portal of hope, come as you are.&quot;
- Mevlana Celaleddin Rumi
-<foo><br /></foo>
-End Test
diff --git a/test/files/jvm/xmlmore.scala b/test/files/jvm/xmlmore.scala
deleted file mode 100644
index 04d0a6c759..0000000000
--- a/test/files/jvm/xmlmore.scala
+++ /dev/null
@@ -1,29 +0,0 @@
-object myBreak extends scala.xml.Unparsed("<br />")
-
-object Test extends App {
- val com = <!-- thissa comment -->
- val pi = <?this is a pi foo bar = && {{ ?>
- val crz = <![CDATA[
- "Come, come again, whoever you are, come!
-Heathen, fire worshipper or idolatrous, come!
-Come even if you broke your penitence a hundred times,
-Ours is the portal of hope, come as you are."
- Mevlana Celaleddin Rumi]]>
-
- val nazim = <foo>{myBreak}</foo> // shows use of unparsed
-
- Console println com
- Console println pi
- Console println crz // this guy will escaped, and rightly so
- Console println nazim
- Console println "End Test"
-
- <x:foo xmlns:x="gaga"/> match {
- case scala.xml.QNode("gaga","foo",md,child@_*) =>
- }
-
- <x:foo xmlns:x="gaga"/> match {
- case scala.xml.Node("foo",md,child@_*) =>
- }
-
-}
diff --git a/test/files/jvm/xmlpull.scala b/test/files/jvm/xmlpull.scala
deleted file mode 100644
index 9ba7d4cf02..0000000000
--- a/test/files/jvm/xmlpull.scala
+++ /dev/null
@@ -1,31 +0,0 @@
-import scala.xml._
-import scala.xml.pull._
-import scala.io.Source
-
-object Test {
-
- val src = Source.fromString("<hello><world/>!</hello>")
-
- def main(args: Array[String]) {
- var er = new XMLEventReader(src)
- er.next match {
- case EvElemStart(_, "hello", _, _) => //println("1")
- }
- er.next match {
- case EvElemStart(_, "world", _, _) => //println("2")
- }
- er.next match {
- case EvElemEnd(_, "world") => //println("3")
- }
- er.next match {
- case EvText("!") => //println("4")
- }
- er.next match {
- case EvElemEnd(_, "hello") => //println("5")
- }
- // you get the picture...
- er.stop // allow thread to be garbage-collected
- //println("6")
- }
-}
-
diff --git a/test/files/jvm/xmlstuff.check b/test/files/jvm/xmlstuff.check
deleted file mode 100644
index e1222479f7..0000000000
--- a/test/files/jvm/xmlstuff.check
+++ /dev/null
@@ -1,22 +0,0 @@
-NodeSeq
-<result>
- <title>Blabla</title>
- <remarks> Hallo Welt. </remarks>
-</result><result>
- <title>Blubabla</title>
- <remarks> Hello Blu </remarks>
-</result><result>
- <title>Blubabla</title>
- <remarks> rem 2 </remarks>
-</result>
-List(<book><title>Blabla</title></book>)
-<result>
- <name>John</name>
- <street> Elm Street</street>
- <city>Dolphin City</city>
- <phone where="work"> +41 21 693 68 67</phone>
- <phone where="mobile">+41 79 602 23 23</phone>
-</result>
-namespaces
-validation - elements
-validation - attributes
diff --git a/test/files/jvm/xmlstuff.scala b/test/files/jvm/xmlstuff.scala
deleted file mode 100644
index 45234c7139..0000000000
--- a/test/files/jvm/xmlstuff.scala
+++ /dev/null
@@ -1,181 +0,0 @@
-import java.io.StringReader
-import org.xml.sax.InputSource
-import scala.xml.{Node, NodeSeq, Elem, Text, XML}
-
-object Test {
-
- /** returns true if exception was thrown */
- def catcher(att: Function1[Unit, scala.xml.MetaData]): Boolean = {
- var ex = false
- try {
- att.apply({})
- } catch {
- case scala.xml.MalformedAttributeException(msg) =>
- println(msg)
- ex = true
- }
- ex
- }
-
- def main(args: Array[String]) {
-
- println("NodeSeq")
-
- val p = <foo>
- <bar gt='ga' value="3"/>
- <baz bazValue="8"/>
- <bar value="5" gi='go'/>
- </foo>;
-
- val pelems_1 = for (x <- p \ "bar"; y <- p \ "baz" ) yield {
- Text(x.attributes("value").toString + y.attributes("bazValue").toString+ "!")
- };
- val pelems_2 = new NodeSeq { val theSeq = List(Text("38!"),Text("58!")) };
- assert(pelems_1 sameElements pelems_2)
-
- assert(Text("8") sameElements (p \\ "@bazValue"))
-
- val books =
- <bks>
- <book><title>Blabla</title></book>
- <book><title>Blubabla</title></book>
- <book><title>Baaaaaaalabla</title></book>
- </bks>;
-
- val reviews =
- <reviews>
- <entry><title>Blabla</title>
- <remarks>
- Hallo Welt.
- </remarks>
- </entry>
- <entry><title>Blubabla</title>
- <remarks>
- Hello Blu
- </remarks>
- </entry>
- <entry><title>Blubabla</title>
- <remarks>
- rem 2
- </remarks>
- </entry>
- </reviews>;
-
- println( new scala.xml.PrettyPrinter(80, 5).formatNodes (
- for (t <- books \\ "title";
- r <- reviews \\ "entry"
- if (r \ "title") xml_== t) yield
- <result>
- { t }
- { r \ "remarks" }
- </result>
- ));
-
- // example
- println(
- for (t @ <book><title>Blabla</title></book> <- new NodeSeq { val theSeq = books.child }.toList)
- yield t
- );
- val phoneBook =
- <phonebook>
- <descr>
- This is the <b>phonebook</b> of the
- <a href="http://acme.org">ACME</a> corporation.
- </descr>
- <entry>
- <name>John</name>
- <phone where="work"> +41 21 693 68 67</phone>
- <phone where="mobile">+41 79 602 23 23</phone>
- </entry>
- </phonebook>;
-
-
- val addrBook =
- <addrbook>
- <descr>
- This is the <b>addressbook</b> of the
- <a href="http://acme.org">ACME</a> corporation.
- </descr>
- <entry>
- <name>John</name>
- <street> Elm Street</street>
- <city>Dolphin City</city>
- </entry>
- </addrbook>;
-
- println( new scala.xml.PrettyPrinter(80, 5).formatNodes (
- for (t <- addrBook \\ "entry";
- r <- phoneBook \\ "entry"
- if (t \ "name") xml_== (r \ "name")) yield
- <result>
- { t.child }
- { r \ "phone" }
- </result>
- ));
-
-
- /* namespaces */
- // begin tmp
- println("namespaces")
- val cuckoo = <cuckoo xmlns="http://cuckoo.com">
- <foo/>
- <bar/>
- </cuckoo>;
- assert(cuckoo.namespace == "http://cuckoo.com")
- for (n <- cuckoo \ "_" ) {
- //println("n = "+n);
- //println("n.prefix = "+n.prefix);
- //.println("n.scope = "+n.scope);
- assert( n.namespace == "http://cuckoo.com")
- }
-
- println("validation - elements")
- val vtor = new scala.xml.dtd.ElementValidator();
- {
- import scala.xml.dtd.ELEMENTS
- import scala.xml.dtd.ContentModel._
- vtor.setContentModel(
- ELEMENTS(
- Sequ(
- Letter(ElemName("bar")),
- Star(Letter(ElemName("baz"))) )));
-
- }
- assert(vtor( <foo><bar/><baz/><baz/></foo> ))
-
- {
- import scala.xml.dtd.MIXED
- import scala.xml.dtd.ContentModel._
-
- vtor.setContentModel(
- MIXED(
- Alt(Letter(ElemName("bar")),
- Letter(ElemName("baz")),
- Letter(ElemName("bal")))));
- }
-
- assert(vtor(<foo><bar/><baz/><baz/></foo> ))
- assert(vtor(<foo>ab<bar/>cd<baz/>ed<baz/>gh</foo> ))
- assert(!vtor(<foo> <ugha/> <bugha/> </foo> ))
-
- println("validation - attributes")
- vtor.setContentModel(null)
- vtor.setMetaData(List())
- assert(!vtor( <foo bar="hello"/> ))
-
- {
- import scala.xml.dtd._
- vtor setMetaData List(AttrDecl("bar", "CDATA", IMPLIED))
- }
- assert(!vtor(<foo href="http://foo.com" bar="hello"/>))
- assert(vtor(<foo bar="hello"/>))
-
- {
- import scala.xml.dtd._
- vtor.setMetaData(List(AttrDecl("bar","CDATA",REQUIRED)))
- }
- assert(!vtor( <foo href="http://foo.com" /> ))
- assert( vtor( <foo bar="http://foo.com" /> ))
-
- }
-}
diff --git a/test/files/neg/classmanifests_new_deprecations.check b/test/files/neg/classmanifests_new_deprecations.check
index 5f9d0a1ccc..fd1e2728c3 100644
--- a/test/files/neg/classmanifests_new_deprecations.check
+++ b/test/files/neg/classmanifests_new_deprecations.check
@@ -7,9 +7,6 @@ classmanifests_new_deprecations.scala:3: warning: type ClassManifest in object P
classmanifests_new_deprecations.scala:4: warning: type ClassManifest in object Predef is deprecated: Use `scala.reflect.ClassTag` instead
val cm3: ClassManifest[Int] = null
^
-classmanifests_new_deprecations.scala:4: warning: type ClassManifest in object Predef is deprecated: Use `scala.reflect.ClassTag` instead
- val cm3: ClassManifest[Int] = null
- ^
classmanifests_new_deprecations.scala:6: warning: type ClassManifest in package reflect is deprecated: Use scala.reflect.ClassTag instead
def rcm1[T: scala.reflect.ClassManifest] = ???
^
@@ -19,9 +16,6 @@ classmanifests_new_deprecations.scala:7: warning: type ClassManifest in package
classmanifests_new_deprecations.scala:8: warning: type ClassManifest in package reflect is deprecated: Use scala.reflect.ClassTag instead
val rcm3: scala.reflect.ClassManifest[Int] = null
^
-classmanifests_new_deprecations.scala:8: warning: type ClassManifest in package reflect is deprecated: Use scala.reflect.ClassTag instead
- val rcm3: scala.reflect.ClassManifest[Int] = null
- ^
classmanifests_new_deprecations.scala:10: warning: type ClassManifest in object Predef is deprecated: Use `scala.reflect.ClassTag` instead
type CM[T] = ClassManifest[T]
^
@@ -29,5 +23,5 @@ classmanifests_new_deprecations.scala:15: warning: type ClassManifest in package
type RCM[T] = scala.reflect.ClassManifest[T]
^
error: No warnings can be incurred under -Xfatal-warnings.
-10 warnings found
+8 warnings found
one error found
diff --git a/test/files/neg/macro-abort.check b/test/files/neg/macro-abort.check
new file mode 100644
index 0000000000..1e58add533
--- /dev/null
+++ b/test/files/neg/macro-abort.check
@@ -0,0 +1,4 @@
+Test_2.scala:2: error: aborted
+ Macros.abort
+ ^
+one error found
diff --git a/test/files/neg/macro-abort/Macros_1.scala b/test/files/neg/macro-abort/Macros_1.scala
new file mode 100644
index 0000000000..676c112098
--- /dev/null
+++ b/test/files/neg/macro-abort/Macros_1.scala
@@ -0,0 +1,9 @@
+import scala.language.experimental.macros
+import scala.reflect.macros.Context
+
+object Macros {
+ def impl(c: Context) = {
+ c.abort(c.enclosingPosition, "aborted")
+ }
+ def abort = macro impl
+} \ No newline at end of file
diff --git a/test/files/neg/macro-abort/Test_2.scala b/test/files/neg/macro-abort/Test_2.scala
new file mode 100644
index 0000000000..1d0a7a25dc
--- /dev/null
+++ b/test/files/neg/macro-abort/Test_2.scala
@@ -0,0 +1,3 @@
+object Test extends App {
+ Macros.abort
+} \ No newline at end of file
diff --git a/test/files/neg/macro-exception.check b/test/files/neg/macro-exception.check
new file mode 100644
index 0000000000..cee8b32ebd
--- /dev/null
+++ b/test/files/neg/macro-exception.check
@@ -0,0 +1,7 @@
+Test_2.scala:2: error: exception during macro expansion:
+java.lang.Exception
+ at Macros$.impl(Macros_1.scala:6)
+
+ Macros.exception
+ ^
+one error found
diff --git a/test/files/neg/macro-exception/Macros_1.scala b/test/files/neg/macro-exception/Macros_1.scala
new file mode 100644
index 0000000000..60e4020aec
--- /dev/null
+++ b/test/files/neg/macro-exception/Macros_1.scala
@@ -0,0 +1,9 @@
+import scala.language.experimental.macros
+import scala.reflect.macros.Context
+
+object Macros {
+ def impl(c: Context) = {
+ throw new Exception()
+ }
+ def exception = macro impl
+} \ No newline at end of file
diff --git a/test/files/neg/macro-exception/Test_2.scala b/test/files/neg/macro-exception/Test_2.scala
new file mode 100644
index 0000000000..d82b21f2b2
--- /dev/null
+++ b/test/files/neg/macro-exception/Test_2.scala
@@ -0,0 +1,3 @@
+object Test extends App {
+ Macros.exception
+} \ No newline at end of file
diff --git a/test/files/neg/macro-invalidusage-presuper.check b/test/files/neg/macro-invalidusage-presuper.check
index f63a0eef80..c0b1ec0248 100644
--- a/test/files/neg/macro-invalidusage-presuper.check
+++ b/test/files/neg/macro-invalidusage-presuper.check
@@ -1,4 +1,4 @@
-Macros_Test_2.scala:3: error: only type definitions and concrete field definitions allowed in early object initialization section
+Macros_Test_2.scala:3: error: only concrete field definitions allowed in early object initialization section
class D extends { def x = macro impl } with AnyRef
^
one error found
diff --git a/test/files/neg/t1011.check b/test/files/neg/t1011.check
deleted file mode 100644
index d9c8123549..0000000000
--- a/test/files/neg/t1011.check
+++ /dev/null
@@ -1,4 +0,0 @@
-t1011.scala:8: error: not found: value entity
- <dl><code>{Text(entity)}</code>
- ^
-one error found
diff --git a/test/files/neg/t1011.scala b/test/files/neg/t1011.scala
deleted file mode 100644
index 57a6ad7b45..0000000000
--- a/test/files/neg/t1011.scala
+++ /dev/null
@@ -1,127 +0,0 @@
-package test;
-import scala.xml._;
-
-abstract class Test {
- //val entity : String;
- def primitiveHeader : NodeSeq =
- Group({
- <dl><code>{Text(entity)}</code>
- <code>{Text(entity)}</code>
- <code>{Text(entity)}</code>
- <code>{Text(entity)}</code>
- <code>{Text(entity)}</code>
- <code>{Text(entity)}</code>
- <code>{Text(entity)}</code>
- <code>{Text(entity)}</code>
- <code>{Text(entity)}</code>
- <code>{Text(entity)}</code>
- <code>{Text(entity)}</code>
- <code>{Text(entity)}</code>
- <code>{Text(entity)}</code>
- <code>{Text(entity)}</code>
- <code>{Text(entity)}</code>
- <code>{Text(entity)}</code>
- <code>{Text(entity)}</code>
- <code>{Text(entity)}</code>
- <code>{Text(entity)}</code>
- <code>{Text(entity)}</code>
- <code>{Text(entity)}</code>
- <code>{Text(entity)}</code>
- <code>{Text(entity)}</code>
- <code>{Text(entity)}</code>
- <code>{Text(entity)}</code>
- <code>{Text(entity)}</code>
- <code>{Text(entity)}</code>
- <code>{Text(entity)}</code>
- <code>{Text(entity)}</code>
- <code>{Text(entity)}</code>
- <code>{Text(entity)}</code>
- <code>{Text(entity)}</code>
- <code>{Text(entity)}</code>
- <code>{Text(entity)}</code>
- <code>{Text(entity)}</code>
- <code>{Text(entity)}</code>
- <code>{Text(entity)}</code>
- <code>{Text(entity)}</code>
- <code>{Text(entity)}</code>
- <code>{Text(entity)}</code>
- <code>{Text(entity)}</code>
- <code>{Text(entity)}</code>
- <code>{Text(entity)}</code>
- <code>{Text(entity)}</code>
- <code>{Text(entity)}</code>
- <code>{Text(entity)}</code>
- <code>{Text(entity)}</code>
- <code>{Text(entity)}</code>
- <code>{Text(entity)}</code>
- <code>{Text(entity)}</code>
- <code>{Text(entity)}</code>
- <code>{Text(entity)}</code>
- <code>{Text(entity)}</code>
- <code>{Text(entity)}</code>
- <code>{Text(entity)}</code>
- <code>{Text(entity)}</code>
- <code>{Text(entity)}</code>
- <code>{Text(entity)}</code>
- <code>{Text(entity)}</code>
- <code>{Text(entity)}</code>
- <code>{Text(entity)}</code>
- <code>{Text(entity)}</code>
- <code>{Text(entity)}</code>
- <code>{Text(entity)}</code>
- <code>{Text(entity)}</code>
- <code>{Text(entity)}</code>
- <code>{Text(entity)}</code>
- <code>{Text(entity)}</code>
- <code>{Text(entity)}</code></dl>
- } ++ // 3 seconds
- {}++ // 5 seconds
- {}++ // 10 seconds
- {}++ // 20 seconds
- {}++ // 40 seconds
- {}++ // 40 seconds
- {}++ // 40 seconds
- {}++ // 40 seconds
- {}++ // 40 seconds
- {}++ // 40 seconds
- {}++ // 40 seconds
- {}++ // 40 seconds
- {}++ // 40 seconds
- {}++ // 40 seconds
- {}++ // 40 seconds
- {}++ // 40 seconds
- {}++ // 40 seconds
- {}++ // 5 seconds
- {}++ // 10 seconds
- {}++ // 20 seconds
- {}++ // 40 seconds
- {}++ // 40 seconds
- {}++ // 40 seconds
- {}++ // 40 seconds
- {}++ // 40 seconds
- {}++ // 40 seconds
- {}++ // 40 seconds
- {}++ // 40 seconds
- {}++ // 40 seconds
- {}++ // 40 seconds
- {}++ // 40 seconds
- {}++ // 40 seconds
- {}++ // 40 seconds
- {}++ // 5 seconds
- {}++ // 10 seconds
- {}++ // 20 seconds
- {}++ // 40 seconds
- {}++ // 40 seconds
- {}++ // 40 seconds
- {}++ // 40 seconds
- {}++ // 40 seconds
- {}++ // 40 seconds
- {}++ // 40 seconds
- {}++ // 40 seconds
- {}++ // 40 seconds
- {}++ // 40 seconds
- {}++ // 40 seconds
- {}++ // 40 seconds
- {}++ // 40 seconds
- <hr/>);
-}
diff --git a/test/files/neg/t1017.check b/test/files/neg/t1017.check
deleted file mode 100644
index 52101c7f6e..0000000000
--- a/test/files/neg/t1017.check
+++ /dev/null
@@ -1,4 +0,0 @@
-t1017.scala:3: error: not found: value foo
-<x><x><x><x><x><x><x><x><x><x><x><x><x><x><x><x><x><x>{ foo }</x></x></x></x></x></x></x></x></x></x></x></x></x></x></x></x></x></x>
- ^
-one error found
diff --git a/test/files/neg/t1017.scala b/test/files/neg/t1017.scala
deleted file mode 100644
index e389f308c3..0000000000
--- a/test/files/neg/t1017.scala
+++ /dev/null
@@ -1,4 +0,0 @@
-// 'foo' is not defined
-object Test {
-<x><x><x><x><x><x><x><x><x><x><x><x><x><x><x><x><x><x>{ foo }</x></x></x></x></x></x></x></x></x></x></x></x></x></x></x></x></x></x>
-}
diff --git a/test/files/neg/t1845.scala b/test/files/neg/t1845.scala
index dab448b7dd..4d3966484d 100644
--- a/test/files/neg/t1845.scala
+++ b/test/files/neg/t1845.scala
@@ -1,10 +1,10 @@
-import scala.util.parsing.combinator.syntactical.TokenParsers
-import scala.util.parsing.combinator.lexical.StdLexical
-import scala.util.parsing.combinator.token._
+class Tokens { abstract class Token }
+trait TokenParsers { val lexical: Tokens }
+
class MyTokenParsers extends TokenParsers {
import lexical._
- type Tokens = StdTokens
- type Elem = lexical.Token
- val lexical = new StdLexical
+
+
+ val lexical = new Tokens
}
diff --git a/test/files/neg/t1878-typer.check b/test/files/neg/t1878-typer.check
deleted file mode 100644
index e3a20d0be7..0000000000
--- a/test/files/neg/t1878-typer.check
+++ /dev/null
@@ -1,4 +0,0 @@
-t1878-typer.scala:4: error: _* may only come last
- case <p> { _* } </p> =>
- ^
-one error found
diff --git a/test/files/neg/t1878-typer.scala b/test/files/neg/t1878-typer.scala
deleted file mode 100644
index 1eb0cb7dff..0000000000
--- a/test/files/neg/t1878-typer.scala
+++ /dev/null
@@ -1,6 +0,0 @@
-object Test extends App {
- // illegal - bug #1764
- null match {
- case <p> { _* } </p> =>
- }
-}
diff --git a/test/files/neg/t2796.check b/test/files/neg/t2796.check
index 4456a7fc19..22ee35a7e6 100644
--- a/test/files/neg/t2796.check
+++ b/test/files/neg/t2796.check
@@ -1,6 +1,9 @@
+t2796.scala:11: warning: early type members are deprecated. Move them to the regular body: the semantics are the same.
+ type X = Int // warn
+ ^
t2796.scala:7: warning: Implementation restriction: early definitions in traits are not initialized before the super class is initialized.
val abstractVal = "T1.abstractVal" // warn
^
error: No warnings can be incurred under -Xfatal-warnings.
-one warning found
+two warnings found
one error found
diff --git a/test/files/neg/t2796.flags b/test/files/neg/t2796.flags
index e8fb65d50c..d1b831ea87 100644
--- a/test/files/neg/t2796.flags
+++ b/test/files/neg/t2796.flags
@@ -1 +1 @@
--Xfatal-warnings \ No newline at end of file
+-deprecation -Xfatal-warnings \ No newline at end of file
diff --git a/test/files/neg/t2796.scala b/test/files/neg/t2796.scala
index 3bcc9df562..fa2f2358b9 100644
--- a/test/files/neg/t2796.scala
+++ b/test/files/neg/t2796.scala
@@ -8,10 +8,9 @@ trait T1 extends {
} with Base
trait T2 extends {
- type X = Int // okay
+ type X = Int // warn
} with Base
-
class C1 extends {
val abstractVal = "C1.abstractVal" // okay
} with Base
diff --git a/test/files/neg/t3160ambiguous.check b/test/files/neg/t3160ambiguous.check
index e80d9a5461..73a0c6d5db 100644
--- a/test/files/neg/t3160ambiguous.check
+++ b/test/files/neg/t3160ambiguous.check
@@ -1,7 +1,7 @@
-t3160ambiguous.scala:8: error: reference to Node is ambiguous;
+t3160ambiguous.scala:8: error: reference to List is ambiguous;
it is imported twice in the same scope by
-import scala.xml._
+import scala.collection.immutable._
and import Bippy._
- def f(x: Node): String = ??? // ambiguous, because Bippy.Node is accessible
+ def f(x: List[Any]): String = ??? // ambiguous, because Bippy.List is accessible
^
one error found
diff --git a/test/files/neg/t3160ambiguous.scala b/test/files/neg/t3160ambiguous.scala
index cb9759b79c..57745e60d8 100644
--- a/test/files/neg/t3160ambiguous.scala
+++ b/test/files/neg/t3160ambiguous.scala
@@ -1,15 +1,15 @@
object Bippy {
- private class Node
+ private class List[+T]
}
class Bippy {
import Bippy._
- import scala.xml._
+ import scala.collection.immutable._
- def f(x: Node): String = ??? // ambiguous, because Bippy.Node is accessible
+ def f(x: List[Any]): String = ??? // ambiguous, because Bippy.List is accessible
}
class Other {
import Bippy._
- import scala.xml._
+ import scala.collection.immutable._
- def f(x: Node): String = ??? // unambiguous, because Bippy.Node is inaccessible
+ def f(x: List[Any]): String = ??? // unambiguous, because Bippy.List is inaccessible
}
diff --git a/test/files/neg/t3776.scala b/test/files/neg/t3776.scala
index 454f914316..1075816511 100644
--- a/test/files/neg/t3776.scala
+++ b/test/files/neg/t3776.scala
@@ -1,8 +1,8 @@
-import util.parsing.combinator.{PackratParsers, RegexParsers}
-
-object MyParser extends RegexParsers with PackratParsers {
+object MyParser {
+ implicit def literal(s: String): Parser[String] = ???
+ trait Parser[+T]
+ def parse[T](p: Parser[T], in: java.lang.CharSequence): Option[T] = ???
}
-
object Test {
class ParsedAs(a: String) (implicit pattern: MyParser.Parser[_]) {
def parsedAs[T](v: T) = MyParser.parse(pattern, a).get someOperation v
diff --git a/test/files/neg/t7020.check b/test/files/neg/t7020.check
new file mode 100644
index 0000000000..f9600ca7fc
--- /dev/null
+++ b/test/files/neg/t7020.check
@@ -0,0 +1,19 @@
+t7020.scala:3: warning: match may not be exhaustive.
+It would fail on the following inputs: List((x: Int forSome x not in (1, 2, 4, 5, 6, 7))), List((x: Int forSome x not in (1, 2, 4, 5, 6, 7)), _), List(1, _), List(2, _), List(4, _), List(5, _), List(6, _), List(7, _), List(??, _), List(_, _)
+ List(5) match {
+ ^
+t7020.scala:10: warning: match may not be exhaustive.
+It would fail on the following inputs: List((x: Int forSome x not in (1, 2, 4, 5, 6, 7))), List((x: Int forSome x not in (1, 2, 4, 5, 6, 7)), _), List(1, _), List(2, _), List(4, _), List(5, _), List(6, _), List(7, _), List(??, _), List(_, _)
+ List(5) match {
+ ^
+t7020.scala:17: warning: match may not be exhaustive.
+It would fail on the following inputs: List((x: Int forSome x not in (1, 2, 4, 5, 6, 7))), List((x: Int forSome x not in (1, 2, 4, 5, 6, 7)), _), List(1, _), List(2, _), List(4, _), List(5, _), List(6, _), List(7, _), List(??, _), List(_, _)
+ List(5) match {
+ ^
+t7020.scala:24: warning: match may not be exhaustive.
+It would fail on the following inputs: List((x: Int forSome x not in (1, 2, 4, 5, 6, 7))), List((x: Int forSome x not in (1, 2, 4, 5, 6, 7)), _), List(1, _), List(2, _), List(4, _), List(5, _), List(6, _), List(7, _), List(??, _), List(_, _)
+ List(5) match {
+ ^
+error: No warnings can be incurred under -Xfatal-warnings.
+four warnings found
+one error found
diff --git a/test/files/neg/t7020.flags b/test/files/neg/t7020.flags
new file mode 100644
index 0000000000..e8fb65d50c
--- /dev/null
+++ b/test/files/neg/t7020.flags
@@ -0,0 +1 @@
+-Xfatal-warnings \ No newline at end of file
diff --git a/test/files/neg/t7020.scala b/test/files/neg/t7020.scala
new file mode 100644
index 0000000000..cc5421bab1
--- /dev/null
+++ b/test/files/neg/t7020.scala
@@ -0,0 +1,30 @@
+object Test {
+ // warning was non-deterministic
+ List(5) match {
+ case 1 :: Nil | 2 :: Nil =>
+ case (x@(4 | 5 | 6)) :: Nil =>
+ case 7 :: Nil =>
+ case Nil =>
+ }
+
+ List(5) match {
+ case 1 :: Nil | 2 :: Nil =>
+ case (x@(4 | 5 | 6)) :: Nil =>
+ case 7 :: Nil =>
+ case Nil =>
+ }
+
+ List(5) match {
+ case 1 :: Nil | 2 :: Nil =>
+ case (x@(4 | 5 | 6)) :: Nil =>
+ case 7 :: Nil =>
+ case Nil =>
+ }
+
+ List(5) match {
+ case 1 :: Nil | 2 :: Nil =>
+ case (x@(4 | 5 | 6)) :: Nil =>
+ case 7 :: Nil =>
+ case Nil =>
+ }
+}
diff --git a/test/files/neg/t7185.check b/test/files/neg/t7185.check
deleted file mode 100644
index 46f2cc797e..0000000000
--- a/test/files/neg/t7185.check
+++ /dev/null
@@ -1,7 +0,0 @@
-t7185.scala:2: error: overloaded method value apply with alternatives:
- (f: scala.xml.Node => Boolean)scala.xml.NodeSeq <and>
- (i: Int)scala.xml.Node
- cannot be applied to ()
- <e></e>()
- ^
-one error found
diff --git a/test/files/neg/t7185.scala b/test/files/neg/t7185.scala
deleted file mode 100644
index 2f9284bc5f..0000000000
--- a/test/files/neg/t7185.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-object Test {
- <e></e>()
-}
diff --git a/test/files/neg/t7694b.check b/test/files/neg/t7694b.check
new file mode 100644
index 0000000000..ea3d7736f8
--- /dev/null
+++ b/test/files/neg/t7694b.check
@@ -0,0 +1,7 @@
+t7694b.scala:8: error: type arguments [_3,_4] do not conform to trait L's type parameter bounds [A2,B2 <: A2]
+ def d = if (true) (null: L[A, A]) else (null: L[B, B])
+ ^
+t7694b.scala:9: error: type arguments [_1,_2] do not conform to trait L's type parameter bounds [A2,B2 <: A2]
+ val v = if (true) (null: L[A, A]) else (null: L[B, B])
+ ^
+two errors found
diff --git a/test/files/neg/t7752.check b/test/files/neg/t7752.check
new file mode 100644
index 0000000000..0a015d3f37
--- /dev/null
+++ b/test/files/neg/t7752.check
@@ -0,0 +1,27 @@
+t7752.scala:25: error: overloaded method value foo with alternatives:
+ [A](heading: String, rows: A*)(A,) <and>
+ [A, B](heading: (String, String), rows: (A, B)*)(A, B) <and>
+ [A, B, C](heading: (String, String, String), rows: (A, B, C)*)(A, B, C) <and>
+ [A, B, C, D](heading: (String, String, String, String), rows: (A, B, C, D)*)(A, B, C, D) <and>
+ [A, B, C, D, E](heading: (String, String, String, String, String), rows: (A, B, C, D, E)*)(A, B, C, D, E) <and>
+ [A, B, C, D, E, F](heading: (String, String, String, String, String, String), rows: (A, B, C, D, E, F)*)(A, B, C, D, E, F) <and>
+ [A, B, C, D, E, F, G](heading: (String, String, String, String, String, String, String), rows: (A, B, C, D, E, F, G)*)(A, B, C, D, E, F, G) <and>
+ [A, B, C, D, E, F, G, H](heading: (String, String, String, String, String, String, String, String), rows: (A, B, C, D, E, F, G, H)*)(A, B, C, D, E, F, G, H) <and>
+ [A, B, C, D, E, F, G, H, I](heading: (String, String, String, String, String, String, String, String, String), rows: (A, B, C, D, E, F, G, H, I)*)(A, B, C, D, E, F, G, H, I) <and>
+ [A, B, C, D, E, F, G, H, I, J](heading: (String, String, String, String, String, String, String, String, String, String), rows: (A, B, C, D, E, F, G, H, I, J)*)(A, B, C, D, E, F, G, H, I, J) <and>
+ [A, B, C, D, E, F, G, H, I, J, K](heading: (String, String, String, String, String, String, String, String, String, String, String), rows: (A, B, C, D, E, F, G, H, I, J, K)*)(A, B, C, D, E, F, G, H, I, J, K) <and>
+ [A, B, C, D, E, F, G, H, I, J, K, L](heading: (String, String, String, String, String, String, String, String, String, String, String, String), rows: (A, B, C, D, E, F, G, H, I, J, K, L)*)(A, B, C, D, E, F, G, H, I, J, K, L) <and>
+ [A, B, C, D, E, F, G, H, I, J, K, L, M](heading: (String, String, String, String, String, String, String, String, String, String, String, String, String), rows: (A, B, C, D, E, F, G, H, I, J, K, L, M)*)(A, B, C, D, E, F, G, H, I, J, K, L, M) <and>
+ [A, B, C, D, E, F, G, H, I, J, K, L, M, N](heading: (String, String, String, String, String, String, String, String, String, String, String, String, String, String), rows: (A, B, C, D, E, F, G, H, I, J, K, L, M, N)*)(A, B, C, D, E, F, G, H, I, J, K, L, M, N) <and>
+ [A, B, C, D, E, F, G, H, I, J, K, L, M, N, O](heading: (String, String, String, String, String, String, String, String, String, String, String, String, String, String, String), rows: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O)*)(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O) <and>
+ [A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P](heading: (String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String), rows: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P)*)(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P) <and>
+ [A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q](heading: (String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String), rows: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q)*)(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q) <and>
+ [A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R](heading: (String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String), rows: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R)*)(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R) <and>
+ [A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S](heading: (String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String), rows: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S)*)(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S) <and>
+ [A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T](heading: (String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String), rows: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T)*)(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T) <and>
+ [A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U](heading: (String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String), rows: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U)*)(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U) <and>
+ [A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V](heading: (String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String), rows: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V)*)(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V)
+ cannot be applied to (Int)
+ foo((1))
+ ^
+one error found
diff --git a/test/files/neg/t7752.scala b/test/files/neg/t7752.scala
new file mode 100644
index 0000000000..40ba2103b1
--- /dev/null
+++ b/test/files/neg/t7752.scala
@@ -0,0 +1,26 @@
+object Test {
+ def foo[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V](heading: (String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String), rows: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V)*): Tuple22[A,B,C,D,E,F,G,H,I,J,K,L,M,N,O,P,Q,R,S,T,U,V] = null
+ def foo[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U](heading: (String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String), rows: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U)*): Tuple21[A,B,C,D,E,F,G,H,I,J,K,L,M,N,O,P,Q,R,S,T,U] = null
+ def foo[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T](heading: (String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String), rows: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T)*): Tuple20[A,B,C,D,E,F,G,H,I,J,K,L,M,N,O,P,Q,R,S,T] = null
+ def foo[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S](heading: (String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String), rows: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S)*): Tuple19[A,B,C,D,E,F,G,H,I,J,K,L,M,N,O,P,Q,R,S] = null
+ def foo[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R](heading: (String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String), rows: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R)*): Tuple18[A,B,C,D,E,F,G,H,I,J,K,L,M,N,O,P,Q,R] = null
+ def foo[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q](heading: (String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String), rows: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q)*): Tuple17[A,B,C,D,E,F,G,H,I,J,K,L,M,N,O,P,Q] = null
+ def foo[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P](heading: (String, String, String, String, String, String, String, String, String, String, String, String, String, String, String, String), rows: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P)*): Tuple16[A,B,C,D,E,F,G,H,I,J,K,L,M,N,O,P] = null
+ def foo[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O](heading: (String, String, String, String, String, String, String, String, String, String, String, String, String, String, String), rows: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O)*): Tuple15[A,B,C,D,E,F,G,H,I,J,K,L,M,N,O] = null
+ def foo[A, B, C, D, E, F, G, H, I, J, K, L, M, N](heading: (String, String, String, String, String, String, String, String, String, String, String, String, String, String), rows: (A, B, C, D, E, F, G, H, I, J, K, L, M, N)*): Tuple14[A,B,C,D,E,F,G,H,I,J,K,L,M,N] = null
+ def foo[A, B, C, D, E, F, G, H, I, J, K, L, M](heading: (String, String, String, String, String, String, String, String, String, String, String, String, String), rows: (A, B, C, D, E, F, G, H, I, J, K, L, M)*): Tuple13[A,B,C,D,E,F,G,H,I,J,K,L,M] = null
+ def foo[A, B, C, D, E, F, G, H, I, J, K, L](heading: (String, String, String, String, String, String, String, String, String, String, String, String), rows: (A, B, C, D, E, F, G, H, I, J, K, L)*): Tuple12[A,B,C,D,E,F,G,H,I,J,K,L] = null
+ def foo[A, B, C, D, E, F, G, H, I, J, K](heading: (String, String, String, String, String, String, String, String, String, String, String), rows: (A, B, C, D, E, F, G, H, I, J, K)*): Tuple11[A,B,C,D,E,F,G,H,I,J,K] = null
+ def foo[A, B, C, D, E, F, G, H, I, J](heading: (String, String, String, String, String, String, String, String, String, String), rows: (A, B, C, D, E, F, G, H, I, J)*): Tuple10[A,B,C,D,E,F,G,H,I,J] = null
+ def foo[A, B, C, D, E, F, G, H, I](heading: (String, String, String, String, String, String, String, String, String), rows: (A, B, C, D, E, F, G, H, I)*): Tuple9[A,B,C,D,E,F,G,H,I] = null
+ def foo[A, B, C, D, E, F, G, H](heading: (String, String, String, String, String, String, String, String), rows: (A, B, C, D, E, F, G, H)*): Tuple8[A,B,C,D,E,F,G,H] = null
+ def foo[A, B, C, D, E, F, G](heading: (String, String, String, String, String, String, String), rows: (A, B, C, D, E, F, G)*): Tuple7[A,B,C,D,E,F,G] = null
+ def foo[A, B, C, D, E, F](heading: (String, String, String, String, String, String), rows: (A, B, C, D, E, F)*): Tuple6[A,B,C,D,E,F] = null
+ def foo[A, B, C, D, E](heading: (String, String, String, String, String), rows: (A, B, C, D, E)*): Tuple5[A,B,C,D,E] = null
+ def foo[A, B, C, D](heading: (String, String, String, String), rows: (A, B, C, D)*): Tuple4[A,B,C,D] = null
+ def foo[A, B, C](heading: (String, String, String), rows: (A, B, C)*): Tuple3[A,B,C] = null
+ def foo[A, B](heading: (String, String), rows: (A, B)*): Tuple2[A,B] = null
+ def foo[A](heading: String, rows: A*): Tuple1[A] = null
+
+ foo((1))
+} \ No newline at end of file
diff --git a/test/files/neg/t935.check b/test/files/neg/t935.check
index 8b73700187..af634a2630 100644
--- a/test/files/neg/t935.check
+++ b/test/files/neg/t935.check
@@ -4,7 +4,4 @@ t935.scala:7: error: type arguments [Test3.B] do not conform to class E's type p
t935.scala:13: error: type arguments [Test4.B] do not conform to class E's type parameter bounds [T <: String]
val b: String @E[B](new B) = "hi"
^
-t935.scala:13: error: type arguments [Test4.B] do not conform to class E's type parameter bounds [T <: String]
- val b: String @E[B](new B) = "hi"
- ^
-three errors found
+two errors found
diff --git a/test/files/pos/matchStarlift.scala b/test/files/pos/matchStarlift.scala
deleted file mode 100644
index dab46eada2..0000000000
--- a/test/files/pos/matchStarlift.scala
+++ /dev/null
@@ -1,7 +0,0 @@
-object Tet {
- import scala.xml._;
- def fooz(x: Node=>String) = {}
- def foo( m:Node ):Unit = fooz {
- case Elem(_,_,_,_,n,_*) if (n == m) => "gaga"
- }
-}
diff --git a/test/files/pos/t0422.scala b/test/files/pos/t0422.scala
deleted file mode 100644
index 2adfa392d2..0000000000
--- a/test/files/pos/t0422.scala
+++ /dev/null
@@ -1,16 +0,0 @@
-package scala.xml.dtd.impl
-
-object BoolWordExp extends WordExp {
- type _labelT = MyLabels;
- type _regexpT = RegExp;
- abstract class MyLabels extends Label ;
- case class MyLabel(c:Char) extends MyLabels;
-}
-
-object MyTranslator extends WordBerrySethi {
- override val lang = BoolWordExp;
- import lang._;
- override protected def seenLabel( r:RegExp, i:Int, label: _labelT ): Unit = {
- super.seenLabel(r,i,label)
- }
-}
diff --git a/test/files/pos/t0646.scala b/test/files/pos/t0646.scala
deleted file mode 100644
index 6146e60020..0000000000
--- a/test/files/pos/t0646.scala
+++ /dev/null
@@ -1,21 +0,0 @@
-object xfor {
-
- import scala.xml.NodeSeq
-
- val books =
- <bks>
- <title>Blabla</title>
- <title>Blubabla</title>
- <title>Baaaaaaalabla</title>
- </bks>;
-
- new NodeSeq { val theSeq = books.child } match {
- case t @ Seq(<title>Blabla</title>) => t
- }
-
- //val n: NodeSeq = new NodeSeq { val theSeq = books.child }
- //n match {
- // case t @ <title>Blabla</title> => t
- //}
-
-}
diff --git a/test/files/pos/t1014.scala b/test/files/pos/t1014.scala
deleted file mode 100644
index 3fc10d10dc..0000000000
--- a/test/files/pos/t1014.scala
+++ /dev/null
@@ -1,15 +0,0 @@
-import scala.xml.{NodeSeq, Elem}
-
-class EO extends App with Moo {
- // return type is Flog, inherited from overridden method.
- // implicit conversions are applied because expected type `pt` is `Flog` when `computeType(rhs, pt)`.
- def cat = <cat>dog</cat>
-
- implicit def nodeSeqToFlog(in: Elem): Flog = new Flog(in)
-}
-
-trait Moo {
- def cat: Flog
-}
-
-class Flog(val in: NodeSeq)
diff --git a/test/files/pos/t1059.scala b/test/files/pos/t1059.scala
deleted file mode 100644
index bcd8f0374f..0000000000
--- a/test/files/pos/t1059.scala
+++ /dev/null
@@ -1,28 +0,0 @@
-package com;
-
-import scala.xml._
-
-object Main {
-
- def main(args : Array[String]) : Unit = {
-
- var m : PartialFunction[Any, Any] = {
-
- case SafeNodeSeq(s @ _*) => println(s) }
-
- println(m(<a/> ++ <b/>))
- println(m.isDefinedAt(<a/> ++ <b/>))
-
- }
-
-}
-
-object SafeNodeSeq {
-
- def unapplySeq(any: Any) : Option[Seq[Node]] = any match { case s: Seq[_] => Some(s flatMap ( _ match {
-
- case n: Node => n case _ => NodeSeq.Empty
-
- })) case _ => None }
-
-}
diff --git a/test/files/pos/t1203a.scala b/test/files/pos/t1203a.scala
deleted file mode 100644
index 062ef93fc6..0000000000
--- a/test/files/pos/t1203a.scala
+++ /dev/null
@@ -1,7 +0,0 @@
-case class ant(t: String) extends scala.annotation.Annotation
-object Test {
- def main(args: Array[String]): Unit = {
- val a: scala.xml.NodeSeq @ant("12") = Nil
- println(a)
- }
-}
diff --git a/test/files/pos/t1626.scala b/test/files/pos/t1626.scala
deleted file mode 100644
index 200be47430..0000000000
--- a/test/files/pos/t1626.scala
+++ /dev/null
@@ -1,4 +0,0 @@
-object o {
- val n = <a xmlns=""/>
- n.namespace == null
-}
diff --git a/test/files/pos/t1761.scala b/test/files/pos/t1761.scala
deleted file mode 100644
index 2af7280734..0000000000
--- a/test/files/pos/t1761.scala
+++ /dev/null
@@ -1,10 +0,0 @@
-import scala.xml._
-
-class Foo {
- val elements: Seq[Node] = Nil
- val innerTransform: PartialFunction[Elem, String] = {
- case Elem(_, l: String, _, _, _ @ _*) if elements.exists(_.label == l) =>
- l
- }
-}
-
diff --git a/test/files/pos/t2281.scala b/test/files/pos/t2281.scala
deleted file mode 100644
index 3515d2e2e6..0000000000
--- a/test/files/pos/t2281.scala
+++ /dev/null
@@ -1,41 +0,0 @@
-import scala.collection.mutable.ArrayBuffer
-
-class A {
- def f(x: Boolean) = if (x) <br/><br/> else <br/>
-}
-
-class B {
- def splitSentences(text : String) : ArrayBuffer[String] = {
- val outarr = new ArrayBuffer[String]
- var outstr = new StringBuffer
- var prevspace = false
- val ctext = text.replaceAll("\n+","\n")
- ctext foreach {c =>
- outstr append c
- if(c == '.' || c == '!' || c == '?' || c == '\n' || c == ':' || c == ';' || (prevspace && c == '-') ){
- outarr += outstr.toString
- outstr = new StringBuffer
- }
- if(c == '\n'){
- outarr += "\n\n"
- }
- prevspace = c == ' '
- }
- if(outstr.length > 0){
- outarr += outstr.toString
- }
- outarr
- }
-
- def spanForSentence(x : String,picktext : String) =
- if(x == "\n\n"){
- <br/><br/>
- }else{
- <span class='clicksentence' style={if(x == picktext) "background-color: yellow" else ""}>{x}</span>
- }
-
- def selectableSentences(text : String, picktext : String) = {
- val sentences = splitSentences(text)
- sentences.map(x => spanForSentence(x,picktext))
- }
-} \ No newline at end of file
diff --git a/test/files/pos/t2698.scala b/test/files/pos/t2698.scala
deleted file mode 100644
index 7de50a13d6..0000000000
--- a/test/files/pos/t2698.scala
+++ /dev/null
@@ -1,11 +0,0 @@
-package scala.xml.dtd.impl
-
-import scala.collection._
-
-abstract class S2 {
- val lang: WordExp
- type __labelT = lang._labelT
-
- var deltaq: Array[__labelT] = _
- def delta1 = immutable.Map(deltaq.zipWithIndex: _*)
-}
diff --git a/test/files/pos/t3160.scala b/test/files/pos/t3160.scala
deleted file mode 100644
index 3309ece160..0000000000
--- a/test/files/pos/t3160.scala
+++ /dev/null
@@ -1,6 +0,0 @@
-import scala.collection.mutable._
-import scala.xml._
-
-class A {
- def f(x: Node): Node = ???
-}
diff --git a/test/files/pos/t4760.scala b/test/files/pos/t4760.scala
new file mode 100644
index 0000000000..767e3847f4
--- /dev/null
+++ b/test/files/pos/t4760.scala
@@ -0,0 +1,34 @@
+
+class Test {
+ // parses
+ def f1 = {
+ import scala._;
+ }
+ // b.scala:7: error: ';' expected but '}' found.
+ // }
+ // ^
+ // one error found
+ def f2 = {
+ import scala._
+ }
+ def f2b = {
+ import scala.collection.mutable.{ Map => MMap }
+ }
+ def f(): Unit = {
+ locally {
+ import scala.util.Properties.lineSeparator
+ }
+ }
+
+ // parses
+ def f3 = {
+ import scala._
+ 5
+ }
+ locally { (x: Int) =>
+ import scala.util._
+ }
+ 1 match {
+ case 1 => import scala.concurrent._
+ }
+}
diff --git a/test/files/pos/t5858.scala b/test/files/pos/t5858.scala
deleted file mode 100644
index f2b0f58d76..0000000000
--- a/test/files/pos/t5858.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-object Test {
- new xml.Elem(null, null, xml.Null, xml.TopScope, Nil: _*) // was ambiguous
-}
diff --git a/test/files/pos/t6201.scala b/test/files/pos/t6201.scala
deleted file mode 100644
index 366c1f26eb..0000000000
--- a/test/files/pos/t6201.scala
+++ /dev/null
@@ -1,13 +0,0 @@
-class Test {
- class Foo1 {
- def must(x: scala.xml.Elem) = ()
- }
-
- class Foo2 {
- def must(x: Int) = ()
- }
- implicit def toFoo1(s: scala.xml.Elem) = new Foo1()
- implicit def toFoo2(s: scala.xml.Elem) = new Foo2()
-
- def is: Unit = { (<a>{"a"}</a>).must(<a>{"b"}</a>) }
-} \ No newline at end of file
diff --git a/test/files/pos/t6897.scala b/test/files/pos/t6897.scala
deleted file mode 100644
index a7a03a1d3a..0000000000
--- a/test/files/pos/t6897.scala
+++ /dev/null
@@ -1,6 +0,0 @@
-class A {
- val html = (null: Any) match {
- case 1 => <xml:group></xml:group>
- case 2 => <p></p>
- }
-}
diff --git a/test/files/pos/t7014/ThreadSafety.java b/test/files/pos/t7014/ThreadSafety.java
new file mode 100644
index 0000000000..ed508804e3
--- /dev/null
+++ b/test/files/pos/t7014/ThreadSafety.java
@@ -0,0 +1,9 @@
+package t7014;
+
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+
+@Retention(RetentionPolicy.RUNTIME) // must be exactly RUNTIME retention (those we parse)
+public @interface ThreadSafety {
+ ThreadSafetyLevel level();
+} \ No newline at end of file
diff --git a/test/files/pos/t7014/ThreadSafetyLevel.java b/test/files/pos/t7014/ThreadSafetyLevel.java
new file mode 100644
index 0000000000..4df1dc787a
--- /dev/null
+++ b/test/files/pos/t7014/ThreadSafetyLevel.java
@@ -0,0 +1,8 @@
+package t7014; // package needed due to other bug in scalac's java parser
+
+// since we parse eagerly, we have not yet parsed the classfile when parsing the annotation,
+// and on doing so, fail to find a symbol for the COMPLETELY_THREADSAFE reference
+// from the annotation's argument to the enum's member
+// for now, let's just not crash -- should implement lazy completing at some point
+@ThreadSafety(level=ThreadSafetyLevel.COMPLETELY_THREADSAFE)
+public enum ThreadSafetyLevel { COMPLETELY_THREADSAFE }
diff --git a/test/files/pos/t7014/t7014.scala b/test/files/pos/t7014/t7014.scala
new file mode 100644
index 0000000000..faec4c7740
--- /dev/null
+++ b/test/files/pos/t7014/t7014.scala
@@ -0,0 +1,4 @@
+package t7014
+
+import ThreadSafetyLevel.COMPLETELY_THREADSAFE // refer to annotation so it gets parsed
+ \ No newline at end of file
diff --git a/test/files/pos/t715/meredith_1.scala b/test/files/pos/t715/meredith_1.scala
deleted file mode 100644
index c28afb4a9b..0000000000
--- a/test/files/pos/t715/meredith_1.scala
+++ /dev/null
@@ -1,98 +0,0 @@
-package com.sap.dspace.model.othello;
-
-import scala.xml._
-
-trait XMLRenderer {
- type T <: Any {def getClass(): java.lang.Class[_]}
- val valueTypes =
- List(
- classOf[java.lang.Boolean],
- classOf[java.lang.Integer],
- classOf[java.lang.Float],
- classOf[java.lang.String]
- // more to come
- )
-
- def value2XML(
- value: Object,
- field: java.lang.reflect.Field,
- pojo: T
- ): Node = {
- value match {
- case null => Text("null")
- case vUnmatched =>
- if (value.isInstanceOf[java.lang.Boolean])
- Text(value.asInstanceOf[java.lang.Boolean].toString)
- else if (value.isInstanceOf[java.lang.Integer])
- Text(value.asInstanceOf[java.lang.Integer].toString)
- else if (value.isInstanceOf[java.lang.Float])
- Text(value.asInstanceOf[java.lang.Float].toString)
- // else if (value.isInstanceOf[T])
- // pojo2XML(value.asInstanceOf[T])
- else
- <unmatchedType>
- <theType>
- {vUnmatched.getClass.toString}
- </theType>
- <theValue>
- {vUnmatched.toString}
- </theValue>
- </unmatchedType>
- }
- }
-
- def field2XML(
- field: java.lang.reflect.Field,
- pojo: T
- ): Elem = {
-
- val accessible = field.isAccessible
- field.setAccessible(true)
- // BUGBUG lgm need to disambiguate on type and possibly make
- // recursive call to pojo2XML
- val fldValXML = value2XML(field.get( pojo ), field, pojo)
- field.setAccessible( accessible )
-
- Elem(
- null,
- field.getName,
- null,
- TopScope,
- fldValXML
- )
- }
-
- def pojo2XML(pojo: T): Elem = {
- val progeny =
- for (field <- pojo.getClass.getDeclaredFields)
- yield field2XML(field, pojo)
-
- Elem(
- null,
- pojo.getClass.getName,
- null,
- TopScope,
- progeny.asInstanceOf[Array[scala.xml.Node]]: _*
- )
- }
-}
-
-case class POJO2XMLRenderer(recurse: Boolean)
- extends XMLRenderer {
- type T = java.io.Serializable
- override def value2XML(
- value: Object,
- field: java.lang.reflect.Field,
- pojo: java.io.Serializable
- ): Node = {
- if (recurse) super.value2XML(value, field, pojo)
- else Text(value + "")
- }
-}
-
-object thePOJO2XMLRenderer extends POJO2XMLRenderer(true) {
-}
-
-object Test extends App {
- println(com.sap.dspace.model.othello.thePOJO2XMLRenderer)
-}
diff --git a/test/files/pos/t715/runner_2.scala b/test/files/pos/t715/runner_2.scala
deleted file mode 100644
index d54805629a..0000000000
--- a/test/files/pos/t715/runner_2.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-object Test extends App {
- println(com.sap.dspace.model.othello.thePOJO2XMLRenderer)
-}
diff --git a/test/files/pos/t7486-named.scala b/test/files/pos/t7486-named.scala
new file mode 100644
index 0000000000..253293e5f1
--- /dev/null
+++ b/test/files/pos/t7486-named.scala
@@ -0,0 +1,8 @@
+
+object Test {
+ def fold(empty: Any) = ()
+ implicit val notAnnotatedImplicit = new {
+ fold(empty = 0)
+ def empty[A]: Any = ???
+ }
+}
diff --git a/test/pending/pos/t7486.scala b/test/files/pos/t7486.scala
index 6dd7f4c4ac..6dd7f4c4ac 100644
--- a/test/pending/pos/t7486.scala
+++ b/test/files/pos/t7486.scala
diff --git a/test/files/pos/t7694.scala b/test/files/pos/t7694.scala
new file mode 100644
index 0000000000..9852d5ec79
--- /dev/null
+++ b/test/files/pos/t7694.scala
@@ -0,0 +1,40 @@
+trait A
+trait B
+
+trait L[A2, B2 <: A2] {
+ def bar(a: Any, b: Any) = 0
+}
+
+object Lub {
+ // use named args transforms to include TypeTree(<lub.tpe>) in the AST before refchecks.
+ def foo(a: L[_, _], b: Any) = 0
+
+ foo(b = 0, a = if (true) (null: L[A, A]) else (null: L[B, B]))
+
+ (if (true) (null: L[A, A]) else (null: L[B, B])).bar(b = 0, a = 0)
+}
+
+/*
+The LUB ends up as:
+
+TypeRef(
+ TypeSymbol(
+ abstract trait L#7038[A2#7039, B2#7040 <: A2#7039] extends AnyRef#2197
+
+ )
+ args = List(
+ AbstractTypeRef(
+ AbstractType(
+ type _1#13680 >: A#7036 with B#7037 <: Object#1752
+ )
+ )
+ AbstractTypeRef(
+ AbstractType(
+ type _2#13681 >: A#7036 with B#7037 <: Object#1752
+ )
+ )
+ )
+)
+
+Note that type _2#13681 is *not* bound by _1#13680
+*/
diff --git a/test/files/pos/t7716.scala b/test/files/pos/t7716.scala
new file mode 100644
index 0000000000..40117051ed
--- /dev/null
+++ b/test/files/pos/t7716.scala
@@ -0,0 +1,16 @@
+object Test {
+ def test: Unit = {
+ val e: java.lang.Enum[_] = java.util.concurrent.TimeUnit.SECONDS
+ e match { case x => println(x) }
+
+
+ trait TA[X <: CharSequence]
+ val ta: TA[_] = new TA[String] {}
+
+ ta match {
+ case _ => println("hi")
+ }
+
+ def f(ta: TA[_]) = ta match { case _ => "hi" }
+ }
+}
diff --git a/test/files/pos/t7782.scala b/test/files/pos/t7782.scala
new file mode 100644
index 0000000000..037bdad673
--- /dev/null
+++ b/test/files/pos/t7782.scala
@@ -0,0 +1,25 @@
+package pack
+
+object Test {
+ import O.empty
+ empty // this will trigger completion of `test`
+ // with skolemizationLevel = 1
+}
+
+object O {
+ // order matters (!!!)
+
+ // this order breaks under 2.10.x
+ def empty[E]: C[E] = ???
+ def empty(implicit a: Any): Any = ???
+}
+
+abstract class C[E] {
+ def foo[BB](f: BB)
+ def test[B](f: B): Any = foo(f)
+ // error: no type parameters for method foo: (<param> f: BB)scala.this.Unit exist so that it can be applied to arguments (B&1)
+ // --- because ---
+ // argument expression's type is not compatible with formal parameter type;
+ // found : B&1
+ // required: ?BB
+}
diff --git a/test/files/pos/t7782b.scala b/test/files/pos/t7782b.scala
new file mode 100644
index 0000000000..09da4a5c5b
--- /dev/null
+++ b/test/files/pos/t7782b.scala
@@ -0,0 +1,25 @@
+package pack
+
+object Test {
+ import O.empty
+ empty // this will trigger completion of `test`
+ // with skolemizationLevel = 1
+}
+
+object O {
+ // order matters (!!!)
+
+ // this order breaks under 2.11.x
+ def empty(implicit a: Any): Any = ???
+ def empty[E]: C[E] = ???
+}
+
+abstract class C[E] {
+ def foo[BB](f: BB)
+ def test[B](f: B): Any = foo(f)
+ // error: no type parameters for method foo: (<param> f: BB)scala.this.Unit exist so that it can be applied to arguments (B&1)
+ // --- because ---
+ // argument expression's type is not compatible with formal parameter type;
+ // found : B&1
+ // required: ?BB
+}
diff --git a/test/files/pos/t880.scala b/test/files/pos/t880.scala
deleted file mode 100644
index cceb53c398..0000000000
--- a/test/files/pos/t880.scala
+++ /dev/null
@@ -1,6 +0,0 @@
-import scala.xml.Null
-
-class Test[A >: Null]
-{
- val x : A = null
-}
diff --git a/test/files/pos/t942/Amount_1.java b/test/files/pos/t942/Amount_1.java
new file mode 100644
index 0000000000..d9d37d127b
--- /dev/null
+++ b/test/files/pos/t942/Amount_1.java
@@ -0,0 +1,5 @@
+import java.util.concurrent.Callable;
+
+public abstract class Amount_1<Q> extends Object
+ implements Callable<Amount_1<?>> {
+}
diff --git a/test/files/pos/t942/Test_2.scala b/test/files/pos/t942/Test_2.scala
new file mode 100644
index 0000000000..3cc84dae3c
--- /dev/null
+++ b/test/files/pos/t942/Test_2.scala
@@ -0,0 +1,3 @@
+abstract class Foo {
+ val x: Amount_1[Foo]
+}
diff --git a/test/files/presentation/partial-fun.check b/test/files/presentation/partial-fun.check
new file mode 100644
index 0000000000..0352d5e5c8
--- /dev/null
+++ b/test/files/presentation/partial-fun.check
@@ -0,0 +1,2 @@
+reload: PartialFun.scala
+ArrayBuffer()
diff --git a/test/files/presentation/partial-fun/Runner.scala b/test/files/presentation/partial-fun/Runner.scala
new file mode 100644
index 0000000000..3edd5bb5b0
--- /dev/null
+++ b/test/files/presentation/partial-fun/Runner.scala
@@ -0,0 +1,10 @@
+import scala.tools.nsc.interactive.tests.InteractiveTest
+
+object Test extends InteractiveTest {
+ override def runDefaultTests() {
+ sourceFiles foreach (src => askLoadedTyped(src).get)
+ super.runDefaultTests()
+
+ println(compiler.unitOfFile.values.map(_.problems).mkString("", "\n", ""))
+ }
+}
diff --git a/test/files/presentation/partial-fun/partial-fun.check b/test/files/presentation/partial-fun/partial-fun.check
new file mode 100644
index 0000000000..adceab8280
--- /dev/null
+++ b/test/files/presentation/partial-fun/partial-fun.check
@@ -0,0 +1 @@
+reload: PartialFun.scala
diff --git a/test/files/presentation/partial-fun/src/PartialFun.scala b/test/files/presentation/partial-fun/src/PartialFun.scala
new file mode 100644
index 0000000000..4657898ed1
--- /dev/null
+++ b/test/files/presentation/partial-fun/src/PartialFun.scala
@@ -0,0 +1,5 @@
+class A {
+ def foo {
+ val x: PartialFunction[Int, Int] = ({ case 0 => 0 })
+ }
+}
diff --git a/test/files/run/WeakHashSetTest.scala b/test/files/run/WeakHashSetTest.scala
new file mode 100644
index 0000000000..3c8f380150
--- /dev/null
+++ b/test/files/run/WeakHashSetTest.scala
@@ -0,0 +1,174 @@
+object Test {
+ def main(args: Array[String]) {
+ val test = scala.reflect.internal.util.WeakHashSetTest
+ test.checkEmpty
+ test.checkPlusEquals
+ test.checkPlusEqualsCollisions
+ test.checkRehashing
+ test.checkRehashCollisions
+ test.checkFindOrUpdate
+ test.checkMinusEquals
+ test.checkMinusEqualsCollisions
+ test.checkClear
+ test.checkIterator
+ test.checkIteratorCollisions
+
+ // This test is commented out because it relies on gc behavior which isn't reliable enough in an automated environment
+ // test.checkRemoveUnreferencedObjects
+ }
+}
+
+// put the main test object in the same package as WeakHashSet because
+// it uses the package private "diagnostics" method
+package scala.reflect.internal.util {
+
+ object WeakHashSetTest {
+ // a class guaranteed to provide hash collisions
+ case class Collider(x : String) extends Comparable[Collider] with Serializable {
+ override def hashCode = 0
+ def compareTo(y : Collider) = this.x compareTo y.x
+ }
+
+ // basic emptiness check
+ def checkEmpty {
+ val hs = new WeakHashSet[String]()
+ assert(hs.size == 0)
+ hs.diagnostics.fullyValidate
+ }
+
+ // make sure += works
+ def checkPlusEquals {
+ val hs = new WeakHashSet[String]()
+ val elements = List("hello", "goodbye")
+ elements foreach (hs += _)
+ assert(hs.size == 2)
+ assert(hs contains "hello")
+ assert(hs contains "goodbye")
+ hs.diagnostics.fullyValidate
+ }
+
+ // make sure += works when there are collisions
+ def checkPlusEqualsCollisions {
+ val hs = new WeakHashSet[Collider]()
+ val elements = List("hello", "goodbye") map Collider
+ elements foreach (hs += _)
+ assert(hs.size == 2)
+ assert(hs contains Collider("hello"))
+ assert(hs contains Collider("goodbye"))
+ hs.diagnostics.fullyValidate
+ }
+
+ // add a large number of elements to force rehashing and then validate
+ def checkRehashing {
+ val size = 200
+ val hs = new WeakHashSet[String]()
+ val elements = (0 until size).toList map ("a" + _)
+ elements foreach (hs += _)
+ elements foreach {i => assert(hs contains i)}
+ hs.diagnostics.fullyValidate
+ }
+
+ // make sure rehashing works properly when the set is rehashed
+ def checkRehashCollisions {
+ val size = 200
+ val hs = new WeakHashSet[Collider]()
+ val elements = (0 until size).toList map {x => Collider("a" + x)}
+ elements foreach (hs += _)
+ elements foreach {i => assert(hs contains i)}
+ hs.diagnostics.fullyValidate
+ }
+
+ // test that unreferenced objects are removed
+ // not run in an automated environment because gc behavior can't be relied on
+ def checkRemoveUnreferencedObjects {
+ val size = 200
+ val hs = new WeakHashSet[Collider]()
+ val elements = (0 until size).toList map {x => Collider("a" + x)}
+ elements foreach (hs += _)
+ // don't throw the following into a retained collection so gc
+ // can remove them
+ for (i <- 0 until size) {
+ hs += Collider("b" + i)
+ }
+ System.gc()
+ Thread.sleep(1000)
+ assert(hs.size == 200)
+ elements foreach {i => assert(hs contains i)}
+ for (i <- 0 until size) {
+ assert(!(hs contains Collider("b" + i)))
+ }
+ hs.diagnostics.fullyValidate
+ }
+
+ // make sure findOrUpdate returns the originally entered element
+ def checkFindOrUpdate {
+ val size = 200
+ val hs = new WeakHashSet[Collider]()
+ val elements = (0 until size).toList map {x => Collider("a" + x)}
+ elements foreach {x => assert(hs findEntryOrUpdate x eq x)}
+ for (i <- 0 until size) {
+ // when we do a lookup the result should be the same reference we
+ // original put in
+ assert(hs findEntryOrUpdate(Collider("a" + i)) eq elements(i))
+ }
+ hs.diagnostics.fullyValidate
+ }
+
+ // check -= functionality
+ def checkMinusEquals {
+ val hs = new WeakHashSet[String]()
+ val elements = List("hello", "goodbye")
+ elements foreach (hs += _)
+ hs -= "goodbye"
+ assert(hs.size == 1)
+ assert(hs contains "hello")
+ assert(!(hs contains "goodbye"))
+ hs.diagnostics.fullyValidate
+ }
+
+ // check -= when there are collisions
+ def checkMinusEqualsCollisions {
+ val hs = new WeakHashSet[Collider]
+ val elements = List(Collider("hello"), Collider("goodbye"))
+ elements foreach (hs += _)
+ hs -= Collider("goodbye")
+ assert(hs.size == 1)
+ assert(hs contains Collider("hello"))
+ assert(!(hs contains Collider("goodbye")))
+ hs -= Collider("hello")
+ assert(hs.size == 0)
+ assert(!(hs contains Collider("hello")))
+ hs.diagnostics.fullyValidate
+ }
+
+ // check that the clear method actually cleans everything
+ def checkClear {
+ val size = 200
+ val hs = new WeakHashSet[String]()
+ val elements = (0 until size).toList map ("a" + _)
+ elements foreach (hs += _)
+ hs.clear()
+ assert(hs.size == 0)
+ elements foreach {i => assert(!(hs contains i))}
+ hs.diagnostics.fullyValidate
+ }
+
+ // check that the iterator covers all the contents
+ def checkIterator {
+ val hs = new WeakHashSet[String]()
+ val elements = (0 until 20).toList map ("a" + _)
+ elements foreach (hs += _)
+ assert(elements.iterator.toList.sorted == elements.sorted)
+ hs.diagnostics.fullyValidate
+ }
+
+ // check that the iterator covers all the contents even when there is a collision
+ def checkIteratorCollisions {
+ val hs = new WeakHashSet[Collider]
+ val elements = (0 until 20).toList map {x => Collider("a" + x)}
+ elements foreach (hs += _)
+ assert(elements.iterator.toList.sorted == elements.sorted)
+ hs.diagnostics.fullyValidate
+ }
+ }
+}
diff --git a/test/files/run/analyzerPlugins.scala b/test/files/run/analyzerPlugins.scala
index b20a734fe6..4b297ff220 100644
--- a/test/files/run/analyzerPlugins.scala
+++ b/test/files/run/analyzerPlugins.scala
@@ -8,7 +8,9 @@ object Test extends DirectTest {
def code = """
class testAnn extends annotation.TypeConstraint
- class A(param: Double) extends { val x: Int = 1; val y = "two"; type T = A } with AnyRef {
+ class A(param: Double) extends { val x: Int = 1; val y = "two" } with AnyRef {
+ type T = A
+
val inferField = ("str": @testAnn)
val annotField: Boolean @testAnn = false
@@ -81,7 +83,7 @@ object Test extends DirectTest {
output += s"pluginsPt($pt, ${treeClass(tree)})"
pt
}
-
+
override def pluginsTyped(tpe: Type, typer: Typer, tree: Tree, mode: Mode, pt: Type): Type = {
output += s"pluginsTyped($tpe, ${treeClass(tree)})"
tpe
diff --git a/test/files/run/deprecate-early-type-defs.check b/test/files/run/deprecate-early-type-defs.check
new file mode 100644
index 0000000000..1ee01df13e
--- /dev/null
+++ b/test/files/run/deprecate-early-type-defs.check
@@ -0,0 +1,3 @@
+deprecate-early-type-defs.scala:1: warning: early type members are deprecated. Move them to the regular body: the semantics are the same.
+object Test extends { type T = Int } with App
+ ^
diff --git a/test/files/run/deprecate-early-type-defs.flags b/test/files/run/deprecate-early-type-defs.flags
new file mode 100644
index 0000000000..c36e713ab8
--- /dev/null
+++ b/test/files/run/deprecate-early-type-defs.flags
@@ -0,0 +1 @@
+-deprecation \ No newline at end of file
diff --git a/test/files/run/deprecate-early-type-defs.scala b/test/files/run/deprecate-early-type-defs.scala
new file mode 100644
index 0000000000..99e42166f2
--- /dev/null
+++ b/test/files/run/deprecate-early-type-defs.scala
@@ -0,0 +1 @@
+object Test extends { type T = Int } with App \ No newline at end of file
diff --git a/test/files/run/existential-rangepos.check b/test/files/run/existential-rangepos.check
new file mode 100644
index 0000000000..1212b60bae
--- /dev/null
+++ b/test/files/run/existential-rangepos.check
@@ -0,0 +1,13 @@
+[[syntax trees at end of patmat]] // newSource1.scala
+[0:76]package [0:0]<empty> {
+ [0:76]abstract class A[[17:18]T[17:18]] extends [20:76][76]scala.AnyRef {
+ [76]def <init>(): [20]A[T] = [76]{
+ [76][76][76]A.super.<init>();
+ [20]()
+ };
+ [24:51]private[this] val foo: [28]Set[_ <: T] = [47:51]null;
+ [28]<stable> <accessor> def foo: [28]Set[_ <: T] = [28][28]A.this.foo;
+ [54:74]<stable> <accessor> def bar: [58]Set[_ <: T]
+ }
+}
+
diff --git a/test/files/run/existential-rangepos.scala b/test/files/run/existential-rangepos.scala
new file mode 100644
index 0000000000..7d2b0810d3
--- /dev/null
+++ b/test/files/run/existential-rangepos.scala
@@ -0,0 +1,13 @@
+import scala.tools.partest._
+
+object Test extends DirectTest {
+ override def extraSettings: String = "-usejavacp -Yrangepos -Xprint:patmat -Xprint-pos -d " + testOutput.path
+
+ override def code = """
+abstract class A[T] {
+ val foo: Set[_ <: T] = null
+ val bar: Set[_ <: T]
+}""".trim
+
+ override def show(): Unit = Console.withErr(System.out)(compile())
+}
diff --git a/test/files/run/fors.check b/test/files/run/fors.check
deleted file mode 100644
index 08ecc8ed5f..0000000000
--- a/test/files/run/fors.check
+++ /dev/null
@@ -1,46 +0,0 @@
-
-testOld
-1 2 3
-2
-2
-3
-1 2 3
-1 2 3
-0 1 2 3 4 5 6 7 8 9
-0 2 4 6 8
-0 2 4 6 8
-a b c
-b c
-b c
-
-
-<head><title>Scala</title></head>
-
-
-<body>1 2 3</body>
-
-
-<head><title>Scala</title></head>
-
-testNew
-3
-1 2 3
-1 2 3
-0 1 2 3 4 5 6 7 8 9
-0 2 4 6 8
-0 2 4 6 8
-0 2 4 6 8
-0 2 4 6 8
-0 2 4 6 8
-0 2 4 6 8
-0 2 4 6 8
-a b c
-
-
-<head><title>Scala</title></head>
-
-
-<body>1 2 3</body>
-
-
-<head><title>Scala</title></head>
diff --git a/test/files/run/fors.scala b/test/files/run/fors.scala
deleted file mode 100644
index 54afdc710b..0000000000
--- a/test/files/run/fors.scala
+++ /dev/null
@@ -1,97 +0,0 @@
-//############################################################################
-// for-comprehensions (old and new syntax)
-//############################################################################
-
-//############################################################################
-
-object Test extends App {
- val xs = List(1, 2, 3)
- val ys = List('a, 'b, 'c)
-
- def it = 0 until 10
-
- val ar = "abc".toCharArray
-
- val xml =
- <html>
- <head><title>Scala</title></head>
- <body>{xs}</body>
- </html>;
-
- /////////////////// old syntax ///////////////////
-
- def testOld {
- println("\ntestOld")
-
- // lists
- for (x <- xs) print(x + " "); println
- for (x <- xs;
- if x % 2 == 0) print(x + " "); println
- for {x <- xs
- if x % 2 == 0} print(x + " "); println
- var n = 0
- for (_ <- xs) n += 1; println(n)
- for ((x, y) <- xs zip ys) print(x + " "); println
- for (p @ (x, y) <- xs zip ys) print(p._1 + " "); println
-
- // iterators
- for (x <- it) print(x + " "); println
- for (x <- it;
- if x % 2 == 0) print(x + " "); println
- for {x <- it
- if x % 2 == 0} print(x + " "); println
-
- // arrays
- for (x <- ar) print(x + " "); println
- for (x <- ar;
- if x.toInt > 97) print(x + " "); println
- for {x <- ar
- if x.toInt > 97} print(x + " "); println
-
- // sequences
- for (x <- xml.child) println(x)
- for (x <- xml.child;
- if x.label == "head") println(x)
- }
-
- /////////////////// new syntax ///////////////////
-
- def testNew {
- println("\ntestNew")
-
- // lists
- var n = 0
- for (_ <- xs) n += 1; println(n)
- for ((x, y) <- xs zip ys) print(x + " "); println
- for (p @ (x, y) <- xs zip ys) print(p._1 + " "); println
-
- // iterators
- for (x <- it) print(x + " "); println
- for (x <- it if x % 2 == 0) print(x + " "); println
- for (x <- it; if x % 2 == 0) print(x + " "); println
- for (x <- it;
- if x % 2 == 0) print(x + " "); println
- for (x <- it
- if x % 2 == 0) print(x + " "); println
- for {x <- it
- if x % 2 == 0} print(x + " "); println
- for (x <- it;
- y = 2
- if x % y == 0) print(x + " "); println
- for {x <- it
- y = 2
- if x % y == 0} print(x + " "); println
-
- // arrays
- for (x <- ar) print(x + " "); println
-
- // sequences
- for (x <- xml.child) println(x)
- for (x <- xml.child if x.label == "head") println(x)
- }
-
- ////////////////////////////////////////////////////
-
- testOld
- testNew
-}
diff --git a/test/files/run/io-position.check b/test/files/run/io-position.check
deleted file mode 100644
index 09f743d750..0000000000
--- a/test/files/run/io-position.check
+++ /dev/null
Binary files differ
diff --git a/test/files/run/io-position.scala b/test/files/run/io-position.scala
deleted file mode 100644
index b227846fb4..0000000000
--- a/test/files/run/io-position.scala
+++ /dev/null
@@ -1,11 +0,0 @@
-object Test {
- def main(args: Array[String]): Unit = Console.withErr(Console.out) {
- try {
- xml.parsing.ConstructingParser.fromSource(io.Source.fromString("<foo>"), false).document()
- } catch {
- case e:Exception => println(e.getMessage)
- }
- }
-
-}
-
diff --git a/test/files/run/json.check b/test/files/run/json.check
deleted file mode 100644
index d4d2b41658..0000000000
--- a/test/files/run/json.check
+++ /dev/null
@@ -1,21 +0,0 @@
-Passed compare: {"name" : "value"}
-Passed compare: {"name" : "va1ue"}
-Passed compare: {"name" : {"name1" : "va1ue1", "name2" : "va1ue2"}}
-Passed parse : {"name" : "\""}
-Passed compare: Map(function -> add_symbol)
-Passed compare: [{"a" : "team"}, {"b" : 52.0}]
-Passed compare: Map()
-Passed compare: List()
-Passed compare: [4.0, 1.0, 3.0, 2.0, 6.0, 5.0, 8.0, 7.0]
-Passed parse : {"age" : 0.0}
-Passed compare: {"name" : "va1ue"}
-Passed compare: {"name" : {"name1" : "va1ue1", "name2" : "va1ue2"}}
-Passed compare: [4.0, 1.0, 3.0, 2.0, 6.0, 5.0, 8.0, 7.0]
-Passed compare: {"\u006e\u0061\u006d\u0065" : "\u0076\u0061\u006c"}
-
-Passed compare: Map(firstName -> John, lastName -> Smith, address -> Map(streetAddress -> 21 2nd Street, city -> New York, state -> NY, postalCode -> 10021.0), phoneNumbers -> List(212 732-1234, 646 123-4567))
-
-Passed parse : {"addresses" : [{"format" : "us", "type" : "work", "value" : "1234 Main StnSpringfield, TX 78080-1216"}, {"format" : "us", "type" : "home", "value" : "5678 Main StnSpringfield, TX 78080-1316"}], "emailaddrs" : [{"type" : "work", "value" : "kelly@seankelly.biz"}, {"pref" : 1.0, "type" : "home", "value" : "kelly@seankelly.tv"}], "fullname" : "Sean Kelly", "org" : "SK Consulting", "telephones" : [{"pref" : 1.0, "type" : "work", "value" : "+1 214 555 1212"}, {"type" : "fax", "value" : "+1 214 555 1213"}, {"type" : "mobile", "value" : "+1 214 555 1214"}], "urls" : [{"type" : "work", "value" : "http:\/\/seankelly.biz\/"}, {"type" : "home", "value" : "http:\/\/seankelly.tv\/"}]}
-
-Passed parse : {"web-app" : {"servlet" : [{"init-param" : {"cachePackageTagsRefresh" : 60.0, "cachePackageTagsStore" : 200.0, "cachePackageTagsTrack" : 200.0, "cachePagesDirtyRead" : 10.0, "cachePagesRefresh" : 10.0, "cachePagesStore" : 100.0, "cachePagesTrack" : 200.0, "cacheTemplatesRefresh" : 15.0, "cacheTemplatesStore" : 50.0, "cacheTemplatesTrack" : 100.0, "configGlossary:adminEmail" : "ksm@pobox.com", "configGlossary:installationAt" : "Philadelphia, PA", "configGlossary:poweredBy" : "Cofax", "configGlossary:poweredByIcon" : "\/images\/cofax.gif", "configGlossary:staticPath" : "\/content\/static", "dataStoreClass" : "org.cofax.SqlDataStore", "dataStoreConnUsageLimit" : 100.0, "dataStoreDriver" : "com.microsoft.jdbc.sqlserver.SQLServerDriver", "dataStoreInitConns" : 10.0, "dataStoreLogFile" : "\/usr\/local\/tomcat\/logs\/datastore.log", "dataStoreLogLevel" : "debug", "dataStoreMaxConns" : 100.0, "dataStoreName" : "cofax", "dataStorePassword" : "dataStoreTestQuery", "dataStoreTestQuery" : "SET NOCOUNT ON;select test='test';", "dataStoreUrl" : "jdbc:microsoft:sqlserver:\/\/LOCALHOST:1433;DatabaseName=goon", "dataStoreUser" : "sa", "defaultFileTemplate" : "articleTemplate.htm", "defaultListTemplate" : "listTemplate.htm", "jspFileTemplate" : "articleTemplate.jsp", "jspListTemplate" : "listTemplate.jsp", "maxUrlLength" : 500.0, "redirectionClass" : "org.cofax.SqlRedirection", "searchEngineFileTemplate" : "forSearchEngines.htm", "searchEngineListTemplate" : "forSearchEnginesList.htm", "searchEngineRobotsDb" : "WEB-INF\/robots.db", "templateLoaderClass" : "org.cofax.FilesTemplateLoader", "templateOverridePath" : "", "templatePath" : "templates", "templateProcessorClass" : "org.cofax.WysiwygTemplate", "useDataStore" : true, "useJSP" : false}, "servlet-class" : "org.cofax.cds.CDSServlet", "servlet-name" : "cofaxCDS"}, {"init-param" : {"mailHost" : "mail1", "mailHostOverride" : "mail2"}, "servlet-class" : "org.cofax.cds.EmailServlet", "servlet-name" : "cofaxEmail"}, {"servlet-class" : "org.cofax.cds.AdminServlet", "servlet-name" : "cofaxAdmin"}, {"servlet-class" : "org.cofax.cds.FileServlet", "servlet-name" : "fileServlet"}, {"init-param" : {"adminGroupID" : 4.0, "betaServer" : true, "dataLog" : 1.0, "dataLogLocation" : "\/usr\/local\/tomcat\/logs\/dataLog.log", "dataLogMaxSize" : "", "fileTransferFolder" : "\/usr\/local\/tomcat\/webapps\/content\/fileTransferFolder", "log" : 1.0, "logLocation" : "\/usr\/local\/tomcat\/logs\/CofaxTools.log", "logMaxSize" : "", "lookInContext" : 1.0, "removePageCache" : "\/content\/admin\/remove?cache=pages&id=", "removeTemplateCache" : "\/content\/admin\/remove?cache=templates&id=", "templatePath" : "toolstemplates\/"}, "servlet-class" : "org.cofax.cms.CofaxToolsServlet", "servlet-name" : "cofaxTools"}], "servlet-mapping" : {"cofaxAdmin" : "\/admin\/*", "cofaxCDS" : "\/", "cofaxEmail" : "\/cofaxutil\/aemail\/*", "cofaxTools" : "\/tools\/*", "fileServlet" : "\/static\/*"}, "taglib" : {"taglib-location" : "\/WEB-INF\/tlds\/cofax.tld", "taglib-uri" : "cofax.tld"}}}
-
diff --git a/test/files/run/json.scala b/test/files/run/json.scala
deleted file mode 100644
index 36e86ac5bb..0000000000
--- a/test/files/run/json.scala
+++ /dev/null
@@ -1,287 +0,0 @@
-/*
- * filter: inliner warning\(s\); re-run with -Yinline-warnings for details
- */
-import scala.util.parsing.json._
-import scala.collection.immutable.TreeMap
-
-@deprecated("Suppress warnings", since="2.11")
-object Test extends App {
- /* This method converts parsed JSON back into real JSON notation with objects in
- * sorted-key order. Not required by the spec, but it allows us to do a stable
- * toString comparison. */
- def jsonToString(in : Any) : String = in match {
- case l : List[_] => "[" + l.map(jsonToString).mkString(", ") + "]"
- case m : Map[String @unchecked,_] => "{" + m.iterator.toList
- .sortWith({ (x,y) => x._1 < y._1 })
- .map({ case (k,v) => "\"" + k + "\": " + jsonToString(v) })
- .mkString(", ") + "}"
- case s : String => "\"" + s + "\""
- case x => x.toString
- }
-
- /*
- * This method takes input JSON values and sorts keys on objects.
- */
- def sortJSON(in : Any) : Any = in match {
- case l : List[_] => l.map(sortJSON)
- case m : Map[String @unchecked,_] => TreeMap(m.mapValues(sortJSON).iterator.toSeq : _*)
- // For the object versions, sort their contents, ugly casts and all...
- case JSONObject(data) => JSONObject(sortJSON(data).asInstanceOf[Map[String,Any]])
- case JSONArray(data) => JSONArray(sortJSON(data).asInstanceOf[List[Any]])
- case x => x
- }
-
- // For this one, just parsing should be considered a pass
- def printJSON(given : String) {
- JSON parseRaw given match {
- case None => println("Parse failed for \"%s\"".format(given))
- case Some(parsed) => println("Passed parse : " + sortJSON(parsed))
- }
- }
-
- // For this usage, do a raw parse (to JSONObject/JSONArray)
- def printJSON(given : String, expected : JSONType) {
- printJSON(given, JSON.parseRaw, expected)
- }
-
- // For this usage, do a raw parse (to JSONType and subclasses)
- def printJSONFull(given : String, expected : Any) {
- printJSON(given, JSON.parseFull, expected)
- }
-
- // For this usage, do configurable parsing so that you can do raw if desired
- def printJSON[T](given : String, parser : String => T, expected : Any) {
- parser(given) match {
- case None => println("Parse failed for \"%s\"".format(given))
- case Some(parsed) => if (parsed == expected) {
- println("Passed compare: " + parsed)
- } else {
- val eStr = sortJSON(expected).toString
- val pStr = sortJSON(parsed).toString
- stringDiff(eStr,pStr)
- }
- }
- }
-
- def stringDiff (expected : String, actual : String) {
- if (expected != actual) {
- // Figure out where the Strings differ and generate a marker
- val mismatchPosition = expected.toList.zip(actual.toList).indexWhere({case (x,y) => x != y}) match {
- case -1 => Math.min(expected.length, actual.length)
- case x => x
- }
- val reason = (" " * mismatchPosition) + "^"
- println("Expected: %s\nGot : %s \n %s".format(expected, actual, reason))
-
- } else {
- println("Passed compare: " + actual)
- }
- }
-
-
- // The library should differentiate between lower case "l" and number "1" (ticket #136)
- printJSON("{\"name\" : \"value\"}", JSONObject(Map("name" -> "value")))
- printJSON("{\"name\" : \"va1ue\"}", JSONObject(Map("name" -> "va1ue")))
- printJSON("{\"name\" : { \"name1\" : \"va1ue1\", \"name2\" : \"va1ue2\" } }",
- JSONObject(Map("name" -> JSONObject(Map("name1" -> "va1ue1", "name2" -> "va1ue2")))))
-
- // Unicode escapes should be handled properly
- printJSON("{\"name\" : \"\\u0022\"}")
-
- // The library should return a map for JSON objects (ticket #873)
- printJSONFull("{\"function\" : \"add_symbol\"}", Map("function" -> "add_symbol"))
-
- // The library should recurse into arrays to find objects (ticket #2207)
- printJSON("[{\"a\" : \"team\"},{\"b\" : 52}]", JSONArray(List(JSONObject(Map("a" -> "team")), JSONObject(Map("b" -> 52.0)))))
-
- // The library should differentiate between empty maps and lists (ticket #3284)
- printJSONFull("{}", Map())
- printJSONFull("[]", List())
-
- // Lists should be returned in the same order as specified
- printJSON("[4,1,3,2,6,5,8,7]", JSONArray(List[Double](4,1,3,2,6,5,8,7)))
-
- // Additional tests
- printJSON("{\"age\": 0}")
-
- // The library should do a proper toString representation using default and custom renderers (ticket #3605)
- stringDiff("{\"name\" : \"va1ue\"}", JSONObject(Map("name" -> "va1ue")).toString)
- stringDiff("{\"name\" : {\"name1\" : \"va1ue1\", \"name2\" : \"va1ue2\"}}",
- JSONObject(Map("name" -> JSONObject(TreeMap("name1" -> "va1ue1", "name2" -> "va1ue2")))).toString)
-
- stringDiff("[4.0, 1.0, 3.0, 2.0, 6.0, 5.0, 8.0, 7.0]", JSONArray(List[Double](4,1,3,2,6,5,8,7)).toString)
-
- // A test method that escapes all characters in strings
- def escapeEverything (in : Any) : String = in match {
- case s : String => "\"" + s.map(c => "\\u%04x".format(c : Int)).mkString + "\""
- case jo : JSONObject => jo.toString(escapeEverything)
- case ja : JSONArray => ja.toString(escapeEverything)
- case other => other.toString
- }
-
- stringDiff("{\"\\u006e\\u0061\\u006d\\u0065\" : \"\\u0076\\u0061\\u006c\"}", JSONObject(Map("name" -> "val")).toString(escapeEverything))
-
- println
-
- // from http://en.wikipedia.org/wiki/JSON
- val sample1 = """
-{
- "firstName": "John",
- "lastName": "Smith",
- "address": {
- "streetAddress": "21 2nd Street",
- "city": "New York",
- "state": "NY",
- "postalCode": 10021
- },
- "phoneNumbers": [
- "212 732-1234",
- "646 123-4567"
- ]
-}"""
-
- // Should be equivalent to:
- val sample1Obj = Map(
- "firstName" -> "John",
- "lastName" -> "Smith",
- "address" -> Map(
- "streetAddress" -> "21 2nd Street",
- "city" -> "New York",
- "state" -> "NY",
- "postalCode" -> 10021
- ),
- "phoneNumbers"-> List(
- "212 732-1234",
- "646 123-4567"
- )
- )
-
-
- printJSONFull(sample1, sample1Obj)
- println
-
- // from http://www.developer.com/lang/jscript/article.php/3596836
- val sample2 = """
-{
- "fullname": "Sean Kelly",
- "org": "SK Consulting",
- "emailaddrs": [
- {"type": "work", "value": "kelly@seankelly.biz"},
- {"type": "home", "pref": 1, "value": "kelly@seankelly.tv"}
- ],
- "telephones": [
- {"type": "work", "pref": 1, "value": "+1 214 555 1212"},
- {"type": "fax", "value": "+1 214 555 1213"},
- {"type": "mobile", "value": "+1 214 555 1214"}
- ],
- "addresses": [
- {"type": "work", "format": "us",
- "value": "1234 Main StnSpringfield, TX 78080-1216"},
- {"type": "home", "format": "us",
- "value": "5678 Main StnSpringfield, TX 78080-1316"}
- ],
- "urls": [
- {"type": "work", "value": "http://seankelly.biz/"},
- {"type": "home", "value": "http://seankelly.tv/"}
- ]
-}"""
-
- printJSON(sample2)
- println
-
- // from http://json.org/example.html
- val sample3 = """
-{"web-app": {
- "servlet": [
- {
- "servlet-name": "cofaxCDS",
- "servlet-class": "org.cofax.cds.CDSServlet",
- "init-param": {
- "configGlossary:installationAt": "Philadelphia, PA",
- "configGlossary:adminEmail": "ksm@pobox.com",
- "configGlossary:poweredBy": "Cofax",
- "configGlossary:poweredByIcon": "/images/cofax.gif",
- "configGlossary:staticPath": "/content/static",
- "templateProcessorClass": "org.cofax.WysiwygTemplate",
- "templateLoaderClass": "org.cofax.FilesTemplateLoader",
- "templatePath": "templates",
- "templateOverridePath": "",
- "defaultListTemplate": "listTemplate.htm",
- "defaultFileTemplate": "articleTemplate.htm",
- "useJSP": false,
- "jspListTemplate": "listTemplate.jsp",
- "jspFileTemplate": "articleTemplate.jsp",
- "cachePackageTagsTrack": 200,
- "cachePackageTagsStore": 200,
- "cachePackageTagsRefresh": 60,
- "cacheTemplatesTrack": 100,
- "cacheTemplatesStore": 50,
- "cacheTemplatesRefresh": 15,
- "cachePagesTrack": 200,
- "cachePagesStore": 100,
- "cachePagesRefresh": 10,
- "cachePagesDirtyRead": 10,
- "searchEngineListTemplate": "forSearchEnginesList.htm",
- "searchEngineFileTemplate": "forSearchEngines.htm",
- "searchEngineRobotsDb": "WEB-INF/robots.db",
- "useDataStore": true,
- "dataStoreClass": "org.cofax.SqlDataStore",
- "redirectionClass": "org.cofax.SqlRedirection",
- "dataStoreName": "cofax",
- "dataStoreDriver": "com.microsoft.jdbc.sqlserver.SQLServerDriver",
- "dataStoreUrl": "jdbc:microsoft:sqlserver://LOCALHOST:1433;DatabaseName=goon",
- "dataStoreUser": "sa",
- "dataStorePassword": "dataStoreTestQuery",
- "dataStoreTestQuery": "SET NOCOUNT ON;select test='test';",
- "dataStoreLogFile": "/usr/local/tomcat/logs/datastore.log",
- "dataStoreInitConns": 10,
- "dataStoreMaxConns": 100,
- "dataStoreConnUsageLimit": 100,
- "dataStoreLogLevel": "debug",
- "maxUrlLength": 500}},
- {
- "servlet-name": "cofaxEmail",
- "servlet-class": "org.cofax.cds.EmailServlet",
- "init-param": {
- "mailHost": "mail1",
- "mailHostOverride": "mail2"}},
- {
- "servlet-name": "cofaxAdmin",
- "servlet-class": "org.cofax.cds.AdminServlet"},
-
- {
- "servlet-name": "fileServlet",
- "servlet-class": "org.cofax.cds.FileServlet"},
- {
- "servlet-name": "cofaxTools",
- "servlet-class": "org.cofax.cms.CofaxToolsServlet",
- "init-param": {
- "templatePath": "toolstemplates/",
- "log": 1,
- "logLocation": "/usr/local/tomcat/logs/CofaxTools.log",
- "logMaxSize": "",
- "dataLog": 1,
- "dataLogLocation": "/usr/local/tomcat/logs/dataLog.log",
- "dataLogMaxSize": "",
- "removePageCache": "/content/admin/remove?cache=pages&id=",
- "removeTemplateCache": "/content/admin/remove?cache=templates&id=",
- "fileTransferFolder": "/usr/local/tomcat/webapps/content/fileTransferFolder",
- "lookInContext": 1,
- "adminGroupID": 4,
- "betaServer": true}}],
- "servlet-mapping": {
- "cofaxCDS": "/",
- "cofaxEmail": "/cofaxutil/aemail/*",
- "cofaxAdmin": "/admin/*",
- "fileServlet": "/static/*",
- "cofaxTools": "/tools/*"},
-
- "taglib": {
- "taglib-uri": "cofax.tld",
- "taglib-location": "/WEB-INF/tlds/cofax.tld"}
- }
-}"""
-
- printJSON(sample3)
- println
-}
diff --git a/test/files/run/jtptest.check b/test/files/run/jtptest.check
deleted file mode 100644
index 95dbd28437..0000000000
--- a/test/files/run/jtptest.check
+++ /dev/null
@@ -1,7 +0,0 @@
-[1.4] parsed: 1.1
-[1.3] parsed: 1.
-[1.3] parsed: .1
-[1.1] failure: string matching regex `(\d+(\.\d*)?|\d*\.\d+)' expected but `!' found
-
-!1
-^
diff --git a/test/files/run/jtptest.scala b/test/files/run/jtptest.scala
deleted file mode 100644
index 4d0eef9153..0000000000
--- a/test/files/run/jtptest.scala
+++ /dev/null
@@ -1,17 +0,0 @@
-
-import scala.util.parsing.combinator.JavaTokenParsers
-import scala.util.parsing.input.CharArrayReader
-
-object TestJavaTokenParsers extends JavaTokenParsers {
-}
-
-object Test {
- import TestJavaTokenParsers._
-
- def main(args : Array[String]) {
- println(decimalNumber(new CharArrayReader("1.1".toCharArray)))
- println(decimalNumber(new CharArrayReader("1.".toCharArray)))
- println(decimalNumber(new CharArrayReader(".1".toCharArray)))
- println(decimalNumber(new CharArrayReader("!1".toCharArray)))
- }
-}
diff --git a/test/files/run/macro-auto-duplicate.check b/test/files/run/macro-auto-duplicate.check
new file mode 100644
index 0000000000..d81cc0710e
--- /dev/null
+++ b/test/files/run/macro-auto-duplicate.check
@@ -0,0 +1 @@
+42
diff --git a/test/files/run/macro-auto-duplicate/Macros_1.scala b/test/files/run/macro-auto-duplicate/Macros_1.scala
new file mode 100644
index 0000000000..e3df05ba50
--- /dev/null
+++ b/test/files/run/macro-auto-duplicate/Macros_1.scala
@@ -0,0 +1,17 @@
+import scala.reflect.macros.Context
+import language.experimental.macros
+
+object Macros {
+ def impl(c: Context) = {
+ import c.universe._
+ val x = Ident(newTermName("x"))
+ def defAndUseX(rhs: Tree) = {
+ Block(List(ValDef(NoMods, newTermName("x"), TypeTree(), rhs)), x)
+ }
+ val xi4 = defAndUseX(Literal(Constant(4)))
+ val xs2 = defAndUseX(Literal(Constant("2")))
+ c.Expr[String](Apply(Select(xi4, newTermName("$plus")), List(xs2)))
+ }
+
+ def foo = macro impl
+} \ No newline at end of file
diff --git a/test/files/run/macro-auto-duplicate/Test_2.scala b/test/files/run/macro-auto-duplicate/Test_2.scala
new file mode 100644
index 0000000000..f697da6020
--- /dev/null
+++ b/test/files/run/macro-auto-duplicate/Test_2.scala
@@ -0,0 +1,3 @@
+object Test extends App {
+ println(Macros.foo)
+} \ No newline at end of file
diff --git a/test/files/run/macro-duplicate/Impls_Macros_1.scala b/test/files/run/macro-duplicate/Impls_Macros_1.scala
index af80147a90..85a581585f 100644
--- a/test/files/run/macro-duplicate/Impls_Macros_1.scala
+++ b/test/files/run/macro-duplicate/Impls_Macros_1.scala
@@ -26,4 +26,4 @@ object Macros {
}
def foo = macro impl
-} \ No newline at end of file
+}
diff --git a/test/files/run/nodebuffer-array.check b/test/files/run/nodebuffer-array.check
deleted file mode 100644
index 49f8bfaf8d..0000000000
--- a/test/files/run/nodebuffer-array.check
+++ /dev/null
@@ -1,3 +0,0 @@
-<entry>
- <elem>a</elem><elem>b</elem><elem>c</elem>
- </entry>
diff --git a/test/files/run/nodebuffer-array.scala b/test/files/run/nodebuffer-array.scala
deleted file mode 100644
index 4e1ffe1e5e..0000000000
--- a/test/files/run/nodebuffer-array.scala
+++ /dev/null
@@ -1,15 +0,0 @@
-object Test {
-
- def f(s: String) = {
- <entry>
- {
- for (item <- s split ',') yield
- <elem>{ item }</elem>
- }
- </entry>
- }
-
- def main(args: Array[String]): Unit = {
- println(f("a,b,c"))
- }
-}
diff --git a/test/files/run/packrat1.check b/test/files/run/packrat1.check
deleted file mode 100644
index e9f797e1b6..0000000000
--- a/test/files/run/packrat1.check
+++ /dev/null
@@ -1,7 +0,0 @@
-1
-3
-5
-81
-4
-37
-9
diff --git a/test/files/run/packrat1.scala b/test/files/run/packrat1.scala
deleted file mode 100644
index b5a4687378..0000000000
--- a/test/files/run/packrat1.scala
+++ /dev/null
@@ -1,47 +0,0 @@
-import scala.util.parsing.combinator._
-
-import scala.util.parsing.combinator.syntactical.StandardTokenParsers
-import scala.util.parsing.input._
-import scala.util.parsing.combinator.token._
-
-import scala.collection.mutable.HashMap
-
-object Test extends App{
- import grammars._
-
- val head = phrase(term)
-
- println(extractResult(head(new lexical.Scanner("1"))))
- println(extractResult(head(new lexical.Scanner("1+2"))))
- println(extractResult(head(new lexical.Scanner("9-4"))))
- println(extractResult(head(new lexical.Scanner("9*9"))))
- println(extractResult(head(new lexical.Scanner("8/2"))))
- println(extractResult(head(new lexical.Scanner("4*9-0/7+9-8*1"))))
- println(extractResult(head(new lexical.Scanner("(1+2)*3"))))
-}
-
-object grammars extends StandardTokenParsers with PackratParsers{
-
- def extractResult(r : ParseResult[_]) = r match {
- case Success(a,_) => a
- case NoSuccess(a,_) => a
- }
-
- lexical.delimiters ++= List("+","-","*","/","(",")")
- lexical.reserved ++= List("Hello","World")
-
- /****
- * term = term + fact | term - fact | fact
- * fact = fact * num | fact / num | num
- */
-
-
- val term: PackratParser[Int] = (term~("+"~>fact) ^^ {case x~y => x+y}
- |term~("-"~>fact) ^^ {case x~y => x-y}
- |fact)
-
- val fact: PackratParser[Int] = (fact~("*"~>numericLit) ^^ {case x~y => x*y.toInt}
- |fact~("/"~>numericLit) ^^ {case x~y => x/y.toInt}
- |"("~>term<~")"
- |numericLit ^^ {_.toInt})
- }
diff --git a/test/files/run/packrat2.check b/test/files/run/packrat2.check
deleted file mode 100644
index 55a32ac58b..0000000000
--- a/test/files/run/packrat2.check
+++ /dev/null
@@ -1,7 +0,0 @@
-1
-3
-81
-43
-59
-188
-960
diff --git a/test/files/run/packrat2.scala b/test/files/run/packrat2.scala
deleted file mode 100644
index f55021a6a8..0000000000
--- a/test/files/run/packrat2.scala
+++ /dev/null
@@ -1,57 +0,0 @@
-import scala.util.parsing.combinator._
-
-import scala.util.parsing.combinator.syntactical.StandardTokenParsers
-import scala.util.parsing.input._
-import scala.util.parsing.combinator.token._
-
-import scala.collection.mutable.HashMap
-
-object Test extends App{
- import grammars2._
-
- val head = phrase(exp)
-
- println(extractResult(head(new lexical.Scanner("1"))))
- println(extractResult(head(new lexical.Scanner("1+2"))))
- println(extractResult(head(new lexical.Scanner("9*9"))))
- println(extractResult(head(new lexical.Scanner("4*9+7"))))
- println(extractResult(head(new lexical.Scanner("4*9+7*2+3*3"))))
- println(extractResult(head(new lexical.Scanner("4*9+7*2+3*3+9*5+7*6*2"))))
- println(extractResult(head(new lexical.Scanner("4*(9+7)*(2+3)*3"))))
-
-}
-
-object grammars2 extends StandardTokenParsers with PackratParsers{
-
- def extractResult(r : ParseResult[_]) = r match{
- case Success(a,_) => a
- case NoSuccess(a,_) => a
- }
-
- lexical.delimiters ++= List("+","-","*","/","(",")")
- lexical.reserved ++= List("Hello","World")
-
- /*
- * exp = sum | prod | num
- * sum = exp ~ "+" ~ num
- * prod = exp ~ "*" ~ num
- */
-
- val exp : PackratParser[Int] = sum | prod | numericLit ^^{_.toInt} | "("~>exp<~")"
- val sum : PackratParser[Int] = exp~("+"~>exp) ^^ {case x~y => x+y}
- val prod: PackratParser[Int] = exp~("*"~>(numericLit ^^{_.toInt} | exp)) ^^ {case x~y => x*y}
-
-
- /* lexical.reserved ++= List("a","b", "c")
- val a : PackratParser[Any] = numericLit^^{x => primeFactors(x.toInt)}
- val b : PackratParser[Any] = memo("b")
- val c : PackratParser[Any] = memo("c")
- val AnBnCn : PackratParser[Any] =
- parseButDontEat(repMany1(a,b))~not(b)~>rep1(a)~repMany1(b,c)// ^^{case x~y => x:::y}
- //val c : PackratParser[Any] = parseButDontEat(a)~a~a
- //println(c((new PackratReader(new lexical.Scanner("45 24")))))
- val r = new PackratReader(new lexical.Scanner("45 b c"))
- println(AnBnCn(r))
- println(r.getCache.size)
-*/
-}
diff --git a/test/files/run/packrat3.check b/test/files/run/packrat3.check
deleted file mode 100644
index 8c10626751..0000000000
--- a/test/files/run/packrat3.check
+++ /dev/null
@@ -1,7 +0,0 @@
-(((List(a, b)~())~List(a))~List(b, c))
-(((List(a, a, b, b)~())~List(a, a))~List(b, b, c, c))
-(((List(a, a, a, b, b, b)~())~List(a, a, a))~List(b, b, b, c, c, c))
-(((List(a, a, a, a, b, b, b, b)~())~List(a, a, a, a))~List(b, b, b, b, c, c, c, c))
-Expected failure
-``b'' expected but `c' found
-end of input
diff --git a/test/files/run/packrat3.scala b/test/files/run/packrat3.scala
deleted file mode 100644
index 216ef8f0af..0000000000
--- a/test/files/run/packrat3.scala
+++ /dev/null
@@ -1,51 +0,0 @@
-import scala.util.parsing.combinator._
-
-import scala.util.parsing.combinator.syntactical.StandardTokenParsers
-import scala.util.parsing.input._
-import scala.util.parsing.combinator.token._
-
-import scala.collection.mutable.HashMap
-
-object Test {
- def main(args: Array[String]): Unit = {
- import grammars3._
-
- val head = phrase(AnBnCn)
-
- println(extractResult(head(new lexical.Scanner("a b c"))))
- println(extractResult(head(new lexical.Scanner("a a b b c c"))))
- println(extractResult(head(new lexical.Scanner("a a a b b b c c c"))))
- println(extractResult(head(new lexical.Scanner("a a a a b b b b c c c c"))))
-
- println(extractResult(AnBnCn(new PackratReader(new lexical.Scanner("a a a b b b b c c c c")))))
- println(extractResult(AnBnCn(new PackratReader(new lexical.Scanner("a a a a b b b c c c c")))))
- println(extractResult(AnBnCn(new PackratReader(new lexical.Scanner("a a a a b b b b c c c")))))
- }
-}
-
-object grammars3 extends StandardTokenParsers with PackratParsers {
-
- def extractResult(r: ParseResult[_]) = r match {
- case Success(a,_) => a
- case NoSuccess(a,_) => a
- }
-
-
- lexical.reserved ++= List("a","b", "c")
- val a: PackratParser[Any] = memo("a")
- val b: PackratParser[Any] = memo("b")
- val c: PackratParser[Any] = memo("c")
-
- val AnBnCn: PackratParser[Any] =
- guard(repMany1(a,b) ~ not(b)) ~ rep1(a) ~ repMany1(b,c)// ^^{case x~y => x:::y}
-
-
- private def repMany[T](p: => Parser[T], q: => Parser[T]): Parser[List[T]] =
- ( p~repMany(p,q)~q ^^ {case x~xs~y => x::xs:::(y::Nil)}
- | success(Nil)
- )
-
- def repMany1[T](p: => Parser[T], q: => Parser[T]): Parser[List[T]] =
- p~opt(repMany(p,q))~q ^^ {case x~Some(xs)~y => x::xs:::(y::Nil)}
-
-}
diff --git a/test/files/run/parserFilter.check b/test/files/run/parserFilter.check
deleted file mode 100644
index be04454426..0000000000
--- a/test/files/run/parserFilter.check
+++ /dev/null
@@ -1,9 +0,0 @@
-[1.3] failure: Input doesn't match filter: false
-
-if false
- ^
-[1.1] failure: Input doesn't match filter: not
-
-not true
-^
-[1.8] parsed: (if~true)
diff --git a/test/files/run/parserFilter.scala b/test/files/run/parserFilter.scala
deleted file mode 100644
index d007d441f4..0000000000
--- a/test/files/run/parserFilter.scala
+++ /dev/null
@@ -1,15 +0,0 @@
-object Test extends scala.util.parsing.combinator.RegexParsers {
- val keywords = Set("if", "false")
- def word: Parser[String] = "\\w+".r
-
- def keyword: Parser[String] = word filter (keywords.contains)
- def ident: Parser[String] = word filter(!keywords.contains(_))
-
- def test = keyword ~ ident
-
- def main(args: Array[String]) {
- println(parseAll(test, "if false"))
- println(parseAll(test, "not true"))
- println(parseAll(test, "if true"))
- }
-}
diff --git a/test/files/run/parserForFilter.check b/test/files/run/parserForFilter.check
deleted file mode 100644
index a53c147719..0000000000
--- a/test/files/run/parserForFilter.check
+++ /dev/null
@@ -1 +0,0 @@
-[1.13] parsed: (second,first)
diff --git a/test/files/run/parserForFilter.scala b/test/files/run/parserForFilter.scala
deleted file mode 100644
index 1bc44f8033..0000000000
--- a/test/files/run/parserForFilter.scala
+++ /dev/null
@@ -1,12 +0,0 @@
-object Test extends scala.util.parsing.combinator.RegexParsers {
- def word: Parser[String] = "\\w+".r
-
- def twoWords = for {
- (a ~ b) <- word ~ word
- } yield (b, a)
-
- def main(args: Array[String]) {
- println(parseAll(twoWords, "first second"))
- }
-}
-
diff --git a/test/files/run/parserJavaIdent.check b/test/files/run/parserJavaIdent.check
deleted file mode 100644
index 597ddbee47..0000000000
--- a/test/files/run/parserJavaIdent.check
+++ /dev/null
@@ -1,26 +0,0 @@
-[1.7] parsed: simple
-[1.8] parsed: with123
-[1.6] parsed: with$
-[1.10] parsed: withøßöèæ
-[1.6] parsed: with_
-[1.6] parsed: _with
-[1.1] failure: java identifier expected
-
-3start
-^
-[1.1] failure: java identifier expected
-
--start
-^
-[1.5] failure: java identifier expected
-
-with-s
- ^
-[1.3] failure: java identifier expected
-
-we♥scala
- ^
-[1.6] failure: java identifier expected
-
-with space
- ^
diff --git a/test/files/run/parserJavaIdent.scala b/test/files/run/parserJavaIdent.scala
deleted file mode 100644
index c068075e4e..0000000000
--- a/test/files/run/parserJavaIdent.scala
+++ /dev/null
@@ -1,26 +0,0 @@
-object Test extends scala.util.parsing.combinator.JavaTokenParsers {
-
- def test[A](s: String) {
- val res = parseAll(ident, s) match {
- case Failure(_, in) => Failure("java identifier expected", in)
- case o => o
- }
- println(res)
- }
-
- def main(args: Array[String]) {
- // Happy tests
- test("simple")
- test("with123")
- test("with$")
- test("withøßöèæ")
- test("with_")
- test("_with")
- // Sad tests
- test("3start")
- test("-start")
- test("with-s")
- test("we♥scala")
- test("with space")
- }
-}
diff --git a/test/files/run/parserNoSuccessMessage.check b/test/files/run/parserNoSuccessMessage.check
deleted file mode 100644
index fe00d2fd3a..0000000000
--- a/test/files/run/parserNoSuccessMessage.check
+++ /dev/null
@@ -1,20 +0,0 @@
-[1.2] failure: string matching regex `\d+' expected but `x' found
-
--x
- ^
-[1.1] failure: string matching regex `\d+' expected but `x' found
-
-x
-^
-[1.3] parsed: (Some(-)~5)
-[1.2] parsed: (None~5)
-[1.2] error: Number expected!
-
--x
- ^
-[1.1] error: Number expected!
-
-x
-^
-[1.3] parsed: (Some(-)~5)
-[1.2] parsed: (None~5)
diff --git a/test/files/run/parserNoSuccessMessage.scala b/test/files/run/parserNoSuccessMessage.scala
deleted file mode 100644
index 93aa252db0..0000000000
--- a/test/files/run/parserNoSuccessMessage.scala
+++ /dev/null
@@ -1,19 +0,0 @@
-object Test extends scala.util.parsing.combinator.RegexParsers {
- def sign = "-"
- def number = "\\d+".r
- def p = sign.? ~ number withErrorMessage "Number expected!"
- def q = sign.? ~! number withErrorMessage "Number expected!"
-
- def main(args: Array[String]) {
- println(parseAll(p, "-x"))
- println(parseAll(p, "x"))
- println(parseAll(p, "-5"))
- println(parseAll(p, "5"))
- println(parseAll(q, "-x"))
- println(parseAll(q, "x"))
- println(parseAll(q, "-5"))
- println(parseAll(q, "5"))
- }
-}
-
-
diff --git a/test/files/run/reflect-priv-ctor.check b/test/files/run/reflect-priv-ctor.check
new file mode 100644
index 0000000000..a0fb1943b7
--- /dev/null
+++ b/test/files/run/reflect-priv-ctor.check
@@ -0,0 +1 @@
+privately constructed
diff --git a/test/files/run/reflect-priv-ctor.scala b/test/files/run/reflect-priv-ctor.scala
new file mode 100644
index 0000000000..9cb3e658cd
--- /dev/null
+++ b/test/files/run/reflect-priv-ctor.scala
@@ -0,0 +1,22 @@
+
+import language.postfixOps
+import reflect.runtime._
+import universe._
+
+object Test {
+
+ class Foo private () {
+ override def toString = "privately constructed"
+ }
+
+ def main(args: Array[String]): Unit = {
+
+ //val foo = new Foo // no access
+ val klass = currentMirror reflectClass typeOf[Foo].typeSymbol.asClass
+ val init = typeOf[Foo].members find { case m: MethodSymbol => m.isConstructor case _ => false } get
+ val ctor = klass reflectConstructor init.asMethod
+ val foo = ctor() // no access?
+ Console println foo
+ }
+}
+
diff --git a/test/files/run/reflection-magicsymbols-invoke.check b/test/files/run/reflection-magicsymbols-invoke.check
index f5258efeb7..352aefaf25 100644
--- a/test/files/run/reflection-magicsymbols-invoke.check
+++ b/test/files/run/reflection-magicsymbols-invoke.check
@@ -82,7 +82,7 @@ Array
it's important to print the list of Array's members
if some of them change (possibly, adding and/or removing magic symbols), we must update this test
constructor Array: (_length: Int)Array[T]
-constructor Object: ()java.lang.Object
+constructor Cloneable: ()java.lang.Cloneable
method !=: (x$1: Any)Boolean
method !=: (x$1: AnyRef)Boolean
method ##: ()Int
diff --git a/test/files/run/repl-backticks.check b/test/files/run/repl-backticks.check
deleted file mode 100644
index c0561abd7c..0000000000
--- a/test/files/run/repl-backticks.check
+++ /dev/null
@@ -1,2 +0,0 @@
-import java.lang.Thread.`yield`
-import scala.`package`.Throwable
diff --git a/test/files/run/repl-backticks.scala b/test/files/run/repl-backticks.scala
deleted file mode 100644
index ec2691d9c5..0000000000
--- a/test/files/run/repl-backticks.scala
+++ /dev/null
@@ -1,18 +0,0 @@
-import scala.tools.nsc._
-
-object Test {
- val testCode = <code>
- import java.lang.Thread.`yield`
- import scala.`package`.Throwable
-
- `yield`
- </code>.text
-
- def main(args: Array[String]) {
- val settings = new Settings()
- settings.classpath.value = System.getProperty("java.class.path")
- val repl = new interpreter.IMain(settings)
- repl.interpret(testCode)
- }
-}
-
diff --git a/test/files/run/repl-trim-stack-trace.scala b/test/files/run/repl-trim-stack-trace.scala
index bbf46f2f19..70ee8e1840 100644
--- a/test/files/run/repl-trim-stack-trace.scala
+++ b/test/files/run/repl-trim-stack-trace.scala
@@ -1,10 +1,11 @@
-import scala.tools.partest.SessionTest
+import scala.tools.partest.{ SessionTest, Welcoming }
// SI-7740
-object Test extends SessionTest {
+object Test extends SessionTest with Welcoming {
def session =
-"""Type in expressions to have them evaluated.
+"""Welcome to Scala
+Type in expressions to have them evaluated.
Type :help for more information.
scala> def f = throw new Exception("Uh-oh")
@@ -13,6 +14,7 @@ f: Nothing
scala> f
java.lang.Exception: Uh-oh
at .f(<console>:7)
+ ... 69 elided
scala> def f = throw new Exception("")
f: Nothing
@@ -20,6 +22,7 @@ f: Nothing
scala> f
java.lang.Exception:
at .f(<console>:7)
+ ... 69 elided
scala> def f = throw new Exception
f: Nothing
@@ -27,7 +30,16 @@ f: Nothing
scala> f
java.lang.Exception
at .f(<console>:7)
+ ... 69 elided
scala> """
+ // normalize the "elided" lines because the frame count depends on test context
+ lazy val elided = """(\s+\.{3} )\d+( elided)""".r
+ override def normalize(line: String) = line match {
+ case welcome(w) => w
+ case elided(ellipsis, suffix) => s"$ellipsis???$suffix"
+ case s => s
+ }
+ override def expected = super.expected map normalize
}
diff --git a/test/files/run/t0486.check b/test/files/run/t0486.check
deleted file mode 100644
index dd1ec28223..0000000000
--- a/test/files/run/t0486.check
+++ /dev/null
@@ -1,8 +0,0 @@
-<wsdl:definitions name="service1" xmlns:tns="target1">
- </wsdl:definitions>
-<wsdl:definitions name="service2" xmlns:tns="target2">
- </wsdl:definitions>
-<wsdl:definitions name="service3" xmlns:tns="target3">
- </wsdl:definitions>
-<wsdl:definitions name="service4" xmlns:tns="target4">
- </wsdl:definitions>
diff --git a/test/files/run/t0486.scala b/test/files/run/t0486.scala
deleted file mode 100644
index d3ed8f4225..0000000000
--- a/test/files/run/t0486.scala
+++ /dev/null
@@ -1,24 +0,0 @@
-object Test extends App {
- import scala.xml._
-
- def wsdlTemplate1(serviceName: String): Node =
- <wsdl:definitions name={serviceName} xmlns:tns = { "target1" } >
- </wsdl:definitions>;
-
- def wsdlTemplate2(serviceName: String, targetNamespace: String): Node =
- <wsdl:definitions name={serviceName} xmlns:tns = { targetNamespace } >
- </wsdl:definitions>;
-
- def wsdlTemplate3(serviceName: String): Node =
- <wsdl:definitions name={serviceName} xmlns:tns = { Text("target3") } >
- </wsdl:definitions>;
-
- def wsdlTemplate4(serviceName: String, targetNamespace: () => String): Node =
- <wsdl:definitions name={serviceName} xmlns:tns = { targetNamespace() } >
- </wsdl:definitions>;
-
- println(wsdlTemplate1("service1"))
- println(wsdlTemplate2("service2", "target2"))
- println(wsdlTemplate3("service3"))
- println(wsdlTemplate4("service4", () => "target4"))
-}
diff --git a/test/files/run/t0663.check b/test/files/run/t0663.check
deleted file mode 100755
index dd9be2af70..0000000000
--- a/test/files/run/t0663.check
+++ /dev/null
@@ -1 +0,0 @@
-<feed/>
diff --git a/test/files/run/t0663.scala b/test/files/run/t0663.scala
deleted file mode 100644
index dd0326d4e3..0000000000
--- a/test/files/run/t0663.scala
+++ /dev/null
@@ -1,6 +0,0 @@
-object Test extends App {
- val src = scala.io.Source.fromString("<?xml version='1.0' encoding='UTF-8'?><feed/>")
- val parser = xml.parsing.ConstructingParser.fromSource(src, true)
- println(parser.document)
-}
-
diff --git a/test/files/run/t0700.check b/test/files/run/t0700.check
deleted file mode 100644
index b4eabbab1a..0000000000
--- a/test/files/run/t0700.check
+++ /dev/null
@@ -1,2 +0,0 @@
-[3.2] parsed: List(2, 2, 2)
-[3.2] parsed: List(2, 2, 2)
diff --git a/test/files/run/t0700.scala b/test/files/run/t0700.scala
deleted file mode 100644
index 5a7180528c..0000000000
--- a/test/files/run/t0700.scala
+++ /dev/null
@@ -1,24 +0,0 @@
-import java.io.{File,StringReader}
-
-import scala.util.parsing.combinator.Parsers
-import scala.util.parsing.input.{CharArrayReader, StreamReader}
-
-class TestParsers extends Parsers {
- type Elem = Char
-
- def p: Parser[List[Int]] = rep(p1 | p2)
- def p1: Parser[Int] = 'a' ~ nl ~ 'b' ~ nl ^^^ 1
- def p2: Parser[Int] = 'a' ~ nl ^^^ 2
- def nl: Parser[Int] = rep(accept('\n') | accept('\r')) ^^^ 0
-}
-
-object Test {
- def main(args: Array[String]): Unit = {
- val tstParsers = new TestParsers
- val s = "a\na\na"
- val r1 = new CharArrayReader(s.toCharArray())
- val r2 = StreamReader(new StringReader(s))
- println(tstParsers.p(r1))
- println(tstParsers.p(r2))
- }
-}
diff --git a/test/files/run/t1079.scala b/test/files/run/t1079.scala
deleted file mode 100644
index ce435d254b..0000000000
--- a/test/files/run/t1079.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-object Test extends App {
- println(<t user:tag=""/> == <t user:tag="X"/>)
-}
diff --git a/test/files/run/t1100.check b/test/files/run/t1100.check
deleted file mode 100644
index d3a49a47df..0000000000
--- a/test/files/run/t1100.check
+++ /dev/null
@@ -1,4 +0,0 @@
-[1.4] error: errors are propagated
-
-aaab
- ^
diff --git a/test/files/run/t1100.scala b/test/files/run/t1100.scala
deleted file mode 100644
index 6b95fd6ecb..0000000000
--- a/test/files/run/t1100.scala
+++ /dev/null
@@ -1,17 +0,0 @@
-import scala.util.parsing.combinator.Parsers
-import scala.util.parsing.input.CharSequenceReader
-
-class TestParsers extends Parsers {
- type Elem = Char
-
- def p: Parser[List[Char]] = rep1(p1)
- def p1: Parser[Char] = accept('a') | err("errors are propagated")
-}
-
-object Test {
- def main(args: Array[String]): Unit = {
- val tstParsers = new TestParsers
- val s = new CharSequenceReader("aaab")
- println(tstParsers.p(s))
- }
-}
diff --git a/test/files/run/t1500.check b/test/files/run/t1500.check
deleted file mode 100644
index 94a169333b..0000000000
--- a/test/files/run/t1500.check
+++ /dev/null
@@ -1,3 +0,0 @@
-defined class posingAs
-resolve: [A, B](x: A @posingAs[B])B
-x: Any = 7
diff --git a/test/files/run/t1500.scala b/test/files/run/t1500.scala
deleted file mode 100644
index 75a6e31cdf..0000000000
--- a/test/files/run/t1500.scala
+++ /dev/null
@@ -1,46 +0,0 @@
-import scala.tools.nsc._
-
-object Test {
-
- /**
- * Type inference overlooks constraints posed by type parameters in annotations on types.
- */
-
- val testCode = <code>
-
- class posingAs[A] extends annotation.TypeConstraint
-
- def resolve[A,B](x: A @posingAs[B]): B = x.asInstanceOf[B]
-
- val x = resolve(7: @posingAs[Any])
-
- </code>.text
-
- def main(args: Array[String]) {
-
- val settings = new Settings()
- settings.classpath.value = System.getProperty("java.class.path")
- val tool = new interpreter.IMain(settings)
- val global = tool.global
-
- import global._
- import definitions._
-
- object checker extends AnnotationChecker {
-
- /** Check annotations to decide whether tpe1 <:< tpe2 */
- def annotationsConform(tpe1: Type, tpe2: Type): Boolean = {
-
- tpe1.annotations.forall(a1 => tpe2.annotations.forall(a2 => a1.atp <:< a2.atp))
-
- }
- }
-
- global.addAnnotationChecker(checker)
-
- tool.interpret(testCode)
-
- }
-
-}
-
diff --git a/test/files/run/t1501.check b/test/files/run/t1501.check
deleted file mode 100644
index f0fa9112a5..0000000000
--- a/test/files/run/t1501.check
+++ /dev/null
@@ -1,3 +0,0 @@
-defined class xyz
-loopWhile: [T](cond: => Boolean)(body: => Unit @xyz[T])Unit @xyz[T]
-test: ()Unit @xyz[Int]
diff --git a/test/files/run/t1501.scala b/test/files/run/t1501.scala
deleted file mode 100644
index 71ad0aeb5c..0000000000
--- a/test/files/run/t1501.scala
+++ /dev/null
@@ -1,56 +0,0 @@
-import scala.tools.nsc._
-
-object Test {
-
- /**
- * ...
- */
-
- val testCode = <code>
-
- class xyz[A] extends annotation.TypeConstraint
-
- def loopWhile[T](cond: =>Boolean)(body: =>(Unit @xyz[T])): Unit @ xyz[T] = {{
- if (cond) {{
- body
- loopWhile[T](cond)(body)
- }}
- }}
-
- def test() = {{
- var x = 7
- loopWhile(x != 0) {{
- x = x - 1
- (): @xyz[Int]
- }}
- }}
-
- </code>.text
-
- def main(args: Array[String]) {
- val settings = new Settings()
- settings.classpath.value = System.getProperty("java.class.path")
- val tool = new interpreter.IMain(settings)
- val global = tool.global
-
- import global._
- import definitions._
-
- object checker extends AnnotationChecker {
-
- /** Check annotations to decide whether tpe1 <:< tpe2 */
- def annotationsConform(tpe1: Type, tpe2: Type): Boolean = {
-
- tpe1.annotations.forall(a1 => tpe2.annotations.forall(a2 => a1.atp <:< a2.atp))
-
- }
- }
-
- global.addAnnotationChecker(checker)
-
- tool.interpret(testCode)
-
- }
-
-}
-
diff --git a/test/files/run/t1620.check b/test/files/run/t1620.check
deleted file mode 100755
index afa1e6acd5..0000000000
--- a/test/files/run/t1620.check
+++ /dev/null
@@ -1,6 +0,0 @@
-<?xml version='1.0' encoding='utf-8'?>
-<!DOCTYPE foo PUBLIC "-//Foo Corp//DTD 1.0//EN" "foo.dtd">
-<foo/>
-<?xml version='1.0' encoding='utf-8'?>
-<!DOCTYPE foo PUBLIC "-//Foo Corp//DTD 1.0//EN">
-<foo/>
diff --git a/test/files/run/t1620.scala b/test/files/run/t1620.scala
deleted file mode 100644
index e8ea06eb5f..0000000000
--- a/test/files/run/t1620.scala
+++ /dev/null
@@ -1,16 +0,0 @@
-import java.io.PrintWriter
-import scala.xml.XML
-import scala.xml.dtd.{DocType, PublicID}
-
-object Test extends App {
- val dt = DocType("foo", PublicID("-//Foo Corp//DTD 1.0//EN", "foo.dtd"), Seq())
- val pw = new PrintWriter(System.out)
- XML.write(pw, <foo/>, "utf-8", true, dt)
- pw.println()
- pw.flush()
-
- val dt2 = DocType("foo", PublicID("-//Foo Corp//DTD 1.0//EN", null), Seq())
- XML.write(pw, <foo/>, "utf-8", true, dt2)
- pw.println()
- pw.flush()
-}
diff --git a/test/files/run/t1773.scala b/test/files/run/t1773.scala
deleted file mode 100644
index c50b62512f..0000000000
--- a/test/files/run/t1773.scala
+++ /dev/null
@@ -1,12 +0,0 @@
-object Test extends App
-{
- val xs = List(
- <a></a>,
- <a/>,
- <a>{ xml.NodeSeq.Empty }</a>,
- <a>{""}</a>,
- <a>{ if (true) "" else "I like turtles" }</a>
- )
-
- for (x1 <- xs; x2 <- xs) assert (x1 xml_== x2)
-}
diff --git a/test/files/run/t2124.check b/test/files/run/t2124.check
deleted file mode 100755
index 51b40469aa..0000000000
--- a/test/files/run/t2124.check
+++ /dev/null
@@ -1 +0,0 @@
-<p><lost/><q/></p>
diff --git a/test/files/run/t2124.scala b/test/files/run/t2124.scala
deleted file mode 100644
index a4fd654d76..0000000000
--- a/test/files/run/t2124.scala
+++ /dev/null
@@ -1,25 +0,0 @@
-import scala.xml._
-
-import scala.xml.transform._
-
-object Test {
- val sampleXml = <p><lost/><t><s><r></r></s></t></p>
-
- def main(args: scala.Array[String]) {
-
- println(new RuleTransformer(new RewriteRule {
-
- override def transform(n: Node): NodeSeq = {
- val result = n match {
- case <t>{_*}</t> => <q/>
-
- case n => n
-
- }
-// println ("Rewriting '" +n+ "' to: '" + result+ "'")
-
- result
- }
- }).transform(sampleXml))
- }
-}
diff --git a/test/files/run/t2125.check b/test/files/run/t2125.check
deleted file mode 100755
index 51b40469aa..0000000000
--- a/test/files/run/t2125.check
+++ /dev/null
@@ -1 +0,0 @@
-<p><lost/><q/></p>
diff --git a/test/files/run/t2125.scala b/test/files/run/t2125.scala
deleted file mode 100644
index a10ed9827b..0000000000
--- a/test/files/run/t2125.scala
+++ /dev/null
@@ -1,25 +0,0 @@
-import scala.xml._
-
-import scala.xml.transform._
-
-object Test {
-
- val sampleXml = <xml:group><p><lost/><t><s><r></r></s></t></p></xml:group>
-
- def main(args: scala.Array[String]) {
- println(new RuleTransformer(new RewriteRule {
-
- override def transform(n: Node): NodeSeq = {
-
- val result = n match {
-
- case <t>{_*}</t> => <q/>
-
- case n => n
- }
-// println ("Rewriting '" +n+ "' to: '" + result+ "'")
- result
- }
- }).transform(sampleXml))
- }
-}
diff --git a/test/files/run/t2276.check b/test/files/run/t2276.check
deleted file mode 100644
index 95f51c8e29..0000000000
--- a/test/files/run/t2276.check
+++ /dev/null
@@ -1,8 +0,0 @@
-<root>
- <subnode>
- <version>2</version>
- </subnode>
- <contents>
- <version>2</version>
- </contents>
- </root>
diff --git a/test/files/run/t2276.scala b/test/files/run/t2276.scala
deleted file mode 100644
index f0404e5fab..0000000000
--- a/test/files/run/t2276.scala
+++ /dev/null
@@ -1,24 +0,0 @@
-import scala.xml._
-import scala.xml.transform._
-
-object Test extends App {
- val inputXml : Node =
- <root>
- <subnode>
- <version>1</version>
- </subnode>
- <contents>
- <version>1</version>
- </contents>
- </root>
-
- object t1 extends RewriteRule {
- override def transform(n: Node): Seq[Node] = n match {
- case <version>{x}</version> if x.toString.toInt < 4 => <version>{x.toString.toInt+1}</version>
- case other => other
- }
- }
-
- val ruleTransformer = new RuleTransformer(t1)
- println(ruleTransformer(inputXml))
-}
diff --git a/test/files/run/t2354.scala b/test/files/run/t2354.scala
deleted file mode 100644
index 5419911ac3..0000000000
--- a/test/files/run/t2354.scala
+++ /dev/null
@@ -1,17 +0,0 @@
-import scala.xml.parsing._
-import scala.io.Source
-
-object Test
-{
- val xml_good = "<title><![CDATA[Hello [tag]]]></title>"
- val xml_bad = "<title><![CDATA[Hello [tag] ]]></title>"
-
- val parser1 = ConstructingParser.fromSource(Source.fromString(xml_good),false)
- val parser2 = ConstructingParser.fromSource(Source.fromString(xml_bad),false)
-
- def main(args: Array[String]): Unit = {
- parser1.document
- parser2.document
- }
-}
-
diff --git a/test/files/run/t2721.check b/test/files/run/t2721.check
deleted file mode 100644
index 2bd7656b36..0000000000
--- a/test/files/run/t2721.check
+++ /dev/null
@@ -1,2 +0,0 @@
-root:-rootVal-sub:-subVal-
-root:-rootVal-sub:-subVal-
diff --git a/test/files/run/t2721.scala b/test/files/run/t2721.scala
deleted file mode 100644
index 93af884a60..0000000000
--- a/test/files/run/t2721.scala
+++ /dev/null
@@ -1,12 +0,0 @@
-object Test
-{
- val xml1 = <root xmlns:ns="nsUri" ns:at="rootVal"><sub ns:at="subVal"/></root>
- val xml2= scala.xml.XML.loadString("""<root xmlns:ns="nsUri" ns:at="rootVal"><sub ns:at="subVal"/></root>""")
-
- def backslashSearch(x: xml.Elem) = "root:-"+(x \ "@{nsUri}at") +"-sub:-"+(x \ "sub" \ "@{nsUri}at") +"-"
-
- def main(args: Array[String]): Unit = {
- println(backslashSearch(xml1))
- println(backslashSearch(xml2))
- }
-}
diff --git a/test/files/run/t3705.scala b/test/files/run/t3705.scala
deleted file mode 100644
index 3ebf6fc95d..0000000000
--- a/test/files/run/t3705.scala
+++ /dev/null
@@ -1,17 +0,0 @@
-// package foo
-
-import scala.xml._
-object Test {
- // guard caused verifyerror in oldpatmat
- def updateNodes(ns: Seq[Node]): Seq[Node] =
- for(subnode <- ns) yield subnode match {
- case <d>{_}</d> if true => <d>abc</d>
- case Elem(prefix, label, attribs, scope, children @ _*) =>
- Elem(prefix, label, attribs, scope, minimizeEmpty = true, updateNodes(children) : _*)
- case other => other
- }
- def main(args: Array[String]): Unit = {
- updateNodes(<b />)
- }
-}
-
diff --git a/test/files/run/t3886.scala b/test/files/run/t3886.scala
deleted file mode 100644
index 1e8e7ad252..0000000000
--- a/test/files/run/t3886.scala
+++ /dev/null
@@ -1,11 +0,0 @@
-object Test {
- def main(args: Array[String]) {
- assert( <k a="1" b="2"/> == <k a="1" b="2"/> )
- assert( <k a="1" b="2"/> != <k a="1" b="3"/> )
- assert( <k a="1" b="2"/> != <k a="2" b="2"/> )
-
- assert( <k a="1" b="2"/> != <k/> )
- assert( <k a="1" b="2"/> != <k a="1"/> )
- assert( <k a="1" b="2"/> != <k b="2"/> )
- }
-}
diff --git a/test/files/run/t4124.check b/test/files/run/t4124.check
deleted file mode 100644
index 66a0092d93..0000000000
--- a/test/files/run/t4124.check
+++ /dev/null
@@ -1,4 +0,0 @@
-hi
-hi
-bye
-bye
diff --git a/test/files/run/t4124.scala b/test/files/run/t4124.scala
deleted file mode 100644
index 9f35b57ce3..0000000000
--- a/test/files/run/t4124.scala
+++ /dev/null
@@ -1,24 +0,0 @@
-import xml.Node
-
-object Test extends App {
- val body: Node = <elem>hi</elem>
- println ((body: AnyRef, "foo") match {
- case (node: Node, "bar") => "bye"
- case (ser: Serializable, "foo") => "hi"
- })
-
- println ((body, "foo") match {
- case (node: Node, "bar") => "bye"
- case (ser: Serializable, "foo") => "hi"
- })
-
- println ((body: AnyRef, "foo") match {
- case (node: Node, "foo") => "bye"
- case (ser: Serializable, "foo") => "hi"
- })
-
- println ((body: AnyRef, "foo") match {
- case (node: Node, "foo") => "bye"
- case (ser: Serializable, "foo") => "hi"
- })
-}
diff --git a/test/files/run/t4138.check b/test/files/run/t4138.check
deleted file mode 100644
index f561b5e6b0..0000000000
--- a/test/files/run/t4138.check
+++ /dev/null
@@ -1,2 +0,0 @@
-[1.45] parsed: "lir 'de\' ' \\ \n / upa \"new\" \t parsing"
-[1.5] parsed: "s "
diff --git a/test/files/run/t4138.scala b/test/files/run/t4138.scala
deleted file mode 100644
index 131489e581..0000000000
--- a/test/files/run/t4138.scala
+++ /dev/null
@@ -1,6 +0,0 @@
-object Test extends App {
- object p extends scala.util.parsing.combinator.JavaTokenParsers
-
- println(p.parse(p.stringLiteral, """"lir 'de\' ' \\ \n / upa \"new\" \t parsing""""))
- println(p.parse(p.stringLiteral, """"s " lkjse""""))
-}
diff --git a/test/files/run/t4387.scala b/test/files/run/t4387.scala
deleted file mode 100644
index 68cbe97d08..0000000000
--- a/test/files/run/t4387.scala
+++ /dev/null
@@ -1,12 +0,0 @@
-object Test {
- import xml.XML.loadString
- def mkElem(arg: String) = <foo a="1" b="2" c="3" d="4" e={arg} />
-
- val x1 = mkElem("5")
- val x2 = mkElem("50")
-
- def main(args: Array[String]): Unit = {
- assert(x1 == loadString("" + x1))
- assert(x2 != loadString("" + x1))
- }
-}
diff --git a/test/files/run/t4929.check b/test/files/run/t4929.check
deleted file mode 100644
index 0f0c913d55..0000000000
--- a/test/files/run/t4929.check
+++ /dev/null
@@ -1 +0,0 @@
-success \ No newline at end of file
diff --git a/test/files/run/t4929.scala b/test/files/run/t4929.scala
deleted file mode 100644
index 1b0e8672d5..0000000000
--- a/test/files/run/t4929.scala
+++ /dev/null
@@ -1,43 +0,0 @@
-import scala.util.parsing.json._
-import java.util.concurrent._
-import collection.JavaConversions._
-
-@deprecated("Suppress warnings", since="2.11")
-object Test extends App {
-
- val LIMIT = 2000
- val THREAD_COUNT = 20
- val count = new java.util.concurrent.atomic.AtomicInteger(0)
-
- val begin = new CountDownLatch(THREAD_COUNT)
- val finish = new CountDownLatch(THREAD_COUNT)
-
- val errors = new ConcurrentLinkedQueue[Throwable]
-
- (1 to THREAD_COUNT) foreach { i =>
- val thread = new Thread {
- override def run() {
- begin.await(1, TimeUnit.SECONDS)
- try {
- while (count.getAndIncrement() < LIMIT && errors.isEmpty) {
- JSON.parseFull("""{"foo": [1,2,3,4]}""")
- }
- } catch {
- case t: Throwable => errors.add(t)
- }
-
- finish.await(10, TimeUnit.SECONDS)
- }
- }
-
- thread.setDaemon(true)
- thread.start()
-
- }
-
-
- errors foreach { throw(_) }
-
- println("success")
-
-}
diff --git a/test/files/run/t5052.scala b/test/files/run/t5052.scala
deleted file mode 100644
index 9e418e8ac5..0000000000
--- a/test/files/run/t5052.scala
+++ /dev/null
@@ -1,6 +0,0 @@
-object Test extends App {
- assert(<elem attr={null:String} /> xml_== <elem />)
- assert(<elem attr={None} /> xml_== <elem />)
- assert(<elem /> xml_== <elem attr={null:String} />)
- assert(<elem /> xml_== <elem attr={None} />)
-}
diff --git a/test/files/run/t5115.scala b/test/files/run/t5115.scala
deleted file mode 100644
index cf25214715..0000000000
--- a/test/files/run/t5115.scala
+++ /dev/null
@@ -1,14 +0,0 @@
-import scala.collection.Iterable
-
-object Test extends App {
- def assertHonorsIterableContract(i: Iterable[_]) = assert(i.size == i.iterator.size)
-
- assertHonorsIterableContract(<a/>.attributes)
- assertHonorsIterableContract(<a x=""/>.attributes)
- assertHonorsIterableContract(<a y={None}/>.attributes)
- assertHonorsIterableContract(<a y={None} x=""/>.attributes)
- assertHonorsIterableContract(<a a="" y={None} />.attributes)
- assertHonorsIterableContract(<a y={null:String}/>.attributes)
- assertHonorsIterableContract(<a y={null:String} x=""/>.attributes)
- assertHonorsIterableContract(<a a="" y={null:String} />.attributes)
-}
diff --git a/test/files/run/t5514.check b/test/files/run/t5514.check
deleted file mode 100644
index c68f7c9029..0000000000
--- a/test/files/run/t5514.check
+++ /dev/null
@@ -1,19 +0,0 @@
-constructed reader: 10
-constructed reader: 9
-constructed reader: 8
-constructed reader: 7
-constructed reader: 6
-constructed reader: 5
-constructed reader: 4
-constructed reader: 3
-constructed reader: 2
-constructed reader: 1
-constructed reader: 0
-[0.0] parsed: List(s10, s9, s8, s7, s6, s5, s4, s3, s2, s1)
-constructed reader: 10
-constructed reader: 9
-constructed reader: 8
-constructed reader: 7
-constructed reader: 6
-constructed reader: 5
-[0.0] parsed: List(s10, s9, s8, s7, s6) \ No newline at end of file
diff --git a/test/files/run/t5514.scala b/test/files/run/t5514.scala
deleted file mode 100644
index efd5ba6cb9..0000000000
--- a/test/files/run/t5514.scala
+++ /dev/null
@@ -1,35 +0,0 @@
-
-
-
-import scala.io.Source
-import scala.util.parsing.combinator.Parsers
-import scala.util.parsing.input.Reader
-import scala.util.parsing.input.Position
-
-
-
-class DemoReader(n: Int) extends Reader[String] {
- def atEnd = n == 0
- def first = if (n >= 0) "s" + n else throw new IllegalArgumentException("No more input.")
- def rest = new DemoReader(n - 1)
- def pos = new Position {
- def line = 0
- def column = 0
- def lineContents = first
- }
- println("constructed reader: " + n)
-}
-
-
-object Test extends App with Parsers {
- type Elem = String
- def startsWith(prefix: String) = acceptIf(_ startsWith prefix)("Error: " + _)
-
- val resrep = startsWith("s").*(new DemoReader(10))
- Console println resrep
-
- val resrep5 = repN(5, startsWith("s"))(new DemoReader(10))
- Console println resrep5
-}
-
-
diff --git a/test/files/run/t5843.check b/test/files/run/t5843.check
deleted file mode 100644
index 2bf97f4cdb..0000000000
--- a/test/files/run/t5843.check
+++ /dev/null
@@ -1,9 +0,0 @@
- foo="1"
- bar="2" foo="1"
-null
- bar="2"
- foo="1"
- bar="2"
- foo="1"
- bar="2" foo="1"
- bar="2" foo="1"
diff --git a/test/files/run/t5843.scala b/test/files/run/t5843.scala
deleted file mode 100644
index 43d588c7b7..0000000000
--- a/test/files/run/t5843.scala
+++ /dev/null
@@ -1,15 +0,0 @@
-object Test extends App {
- val foo = scala.xml.Attribute(null, "foo", "1", scala.xml.Null)
- val bar = scala.xml.Attribute(null, "bar", "2", foo)
- println(foo)
- println(bar)
- println(scala.xml.TopScope.getURI(foo.pre))
- println(bar remove "foo")
- println(bar remove "bar")
- println(bar remove (null, scala.xml.TopScope, "foo"))
- println(bar remove (null, scala.xml.TopScope, "bar"))
-
- val ns = scala.xml.NamespaceBinding(null, "uri", scala.xml.TopScope)
- println(bar remove (null, ns, "foo"))
- println(bar remove (null, ns, "bar"))
-}
diff --git a/test/files/run/t6392b.check b/test/files/run/t6392b.check
index 2afc48495f..1ccfced1c6 100644
--- a/test/files/run/t6392b.check
+++ b/test/files/run/t6392b.check
@@ -1 +1 @@
-ModuleDef(Modifiers(), TermName("C"), Template(List(Select(Ident(scala#PK), TypeName("AnyRef")#TPE)), emptyValDef, List(DefDef(Modifiers(), nme.CONSTRUCTOR, List(), List(List()), TypeTree(), Block(List(Apply(Select(Super(This(TypeName("C")), tpnme.EMPTY), nme.CONSTRUCTOR#PCTOR), List())), Literal(Constant(())))))))
+ModuleDef(Modifiers(), TermName("C")#MOD, Template(List(Select(Ident(scala#PK), TypeName("AnyRef")#TPE)), emptyValDef, List(DefDef(Modifiers(), nme.CONSTRUCTOR#PCTOR, List(), List(List()), TypeTree(), Block(List(Apply(Select(Super(This(TypeName("C")), tpnme.EMPTY), nme.CONSTRUCTOR#PCTOR), List())), Literal(Constant(())))))))
diff --git a/test/files/run/t6939.scala b/test/files/run/t6939.scala
deleted file mode 100644
index 9fe721555f..0000000000
--- a/test/files/run/t6939.scala
+++ /dev/null
@@ -1,13 +0,0 @@
-object Test extends App {
- val foo = <x:foo xmlns:x="http://foo.com/"><x:bar xmlns:x="http://bar.com/"><x:baz/></x:bar></x:foo>
- assert(foo.child.head.scope.toString == """ xmlns:x="http://bar.com/"""")
-
- val fooDefault = <foo xmlns="http://foo.com/"><bar xmlns="http://bar.com/"><baz/></bar></foo>
- assert(fooDefault.child.head.scope.toString == """ xmlns="http://bar.com/"""")
-
- val foo2 = scala.xml.XML.loadString("""<x:foo xmlns:x="http://foo.com/"><x:bar xmlns:x="http://bar.com/"><x:baz/></x:bar></x:foo>""")
- assert(foo2.child.head.scope.toString == """ xmlns:x="http://bar.com/"""")
-
- val foo2Default = scala.xml.XML.loadString("""<foo xmlns="http://foo.com/"><bar xmlns="http://bar.com/"><baz/></bar></foo>""")
- assert(foo2Default.child.head.scope.toString == """ xmlns="http://bar.com/"""")
-}
diff --git a/test/files/run/t6989.check b/test/files/run/t6989.check
index 8943792115..43d4bbaf02 100644
--- a/test/files/run/t6989.check
+++ b/test/files/run/t6989.check
@@ -101,6 +101,12 @@ isProtected = false
isPublic = false
privateWithin = <none>
============
+sym = constructor $PrivateJavaClass, signature = ()JavaClass_1.this.$PrivateJavaClass, owner = class $PrivateJavaClass
+isPrivate = false
+isProtected = false
+isPublic = true
+privateWithin = <none>
+============
sym = value this$0, signature = foo.JavaClass_1, owner = class $PrivateJavaClass
isPrivate = false
isProtected = false
@@ -119,6 +125,12 @@ isProtected = true
isPublic = false
privateWithin = package foo
============
+sym = constructor $ProtectedJavaClass, signature = ()JavaClass_1.this.$ProtectedJavaClass, owner = class $ProtectedJavaClass
+isPrivate = false
+isProtected = false
+isPublic = true
+privateWithin = <none>
+============
sym = value this$0, signature = foo.JavaClass_1, owner = class $ProtectedJavaClass
isPrivate = false
isProtected = false
@@ -173,6 +185,12 @@ isProtected = false
isPublic = false
privateWithin = <none>
============
+sym = constructor PrivateStaticJavaClass, signature = ()foo.JavaClass_1.PrivateStaticJavaClass, owner = class PrivateStaticJavaClass
+isPrivate = false
+isProtected = false
+isPublic = true
+privateWithin = <none>
+============
sym = object PrivateStaticJavaClass, signature = foo.JavaClass_1.PrivateStaticJavaClass.type, owner = object JavaClass_1
isPrivate = true
isProtected = false
@@ -185,6 +203,12 @@ isProtected = false
isPublic = false
privateWithin = <none>
============
+sym = constructor ProtectedStaticJavaClass, signature = ()foo.JavaClass_1.ProtectedStaticJavaClass, owner = class ProtectedStaticJavaClass
+isPrivate = false
+isProtected = false
+isPublic = true
+privateWithin = <none>
+============
sym = object ProtectedStaticJavaClass, signature = foo.JavaClass_1.ProtectedStaticJavaClass.type, owner = object JavaClass_1
isPrivate = true
isProtected = false
diff --git a/test/files/run/t7074.check b/test/files/run/t7074.check
deleted file mode 100644
index ab9cf11f16..0000000000
--- a/test/files/run/t7074.check
+++ /dev/null
@@ -1,9 +0,0 @@
-<a/>
-<a b="2" c="3" d="1"/>
-<a b="2" c="4" d="1" e="3" f="5"/>
-<a b="5" c="4" d="3" e="2" f="1"/>
-<a b="1" c="2" d="3" e="4" f="5"/>
-<a a:b="2" a:c="3" a:d="1"/>
-<a a:b="2" a:c="4" a:d="1" a:e="3" a:f="5"/>
-<a a:b="5" a:c="4" a:d="3" a:e="2" a:f="1"/>
-<a a:b="1" a:c="2" a:d="3" a:e="4" a:f="5"/>
diff --git a/test/files/run/t7074.scala b/test/files/run/t7074.scala
deleted file mode 100644
index 693a076a7a..0000000000
--- a/test/files/run/t7074.scala
+++ /dev/null
@@ -1,15 +0,0 @@
-import scala.xml.Utility.sort
-
-object Test extends App {
- println(sort(<a/>))
- println(sort(<a d="1" b="2" c="3"/>))
- println(sort(<a d="1" b="2" e="3" c="4" f="5"/>))
- println(sort(<a f="1" e="2" d="3" c="4" b="5"/>))
- println(sort(<a b="1" c="2" d="3" e="4" f="5"/>))
-
- println(sort(<a a:d="1" a:b="2" a:c="3"/>))
- println(sort(<a a:d="1" a:b="2" a:e="3" a:c="4" a:f="5"/>))
- println(sort(<a a:f="1" a:e="2" a:d="3" a:c="4" a:b="5"/>))
- println(sort(<a a:b="1" a:c="2" a:d="3" a:e="4" a:f="5"/>))
-}
-
diff --git a/test/files/jvm/xml02.check b/test/files/run/t7510.check
index e69de29bb2..e69de29bb2 100644
--- a/test/files/jvm/xml02.check
+++ b/test/files/run/t7510.check
diff --git a/test/files/run/t7510/Ann_1.java b/test/files/run/t7510/Ann_1.java
new file mode 100644
index 0000000000..c8c5b2035f
--- /dev/null
+++ b/test/files/run/t7510/Ann_1.java
@@ -0,0 +1,4 @@
+package foo;
+
+public @interface Ann_1 {
+} \ No newline at end of file
diff --git a/test/files/run/t7510/Test_2.scala b/test/files/run/t7510/Test_2.scala
new file mode 100644
index 0000000000..7d7a95e0f2
--- /dev/null
+++ b/test/files/run/t7510/Test_2.scala
@@ -0,0 +1,9 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect.ToolBox
+
+object Test extends App {
+ val tb = cm.mkToolBox()
+ tb.compile(tb.parse("@foo.Ann_1 class C"))
+}
+
diff --git a/test/files/run/t7775.scala b/test/files/run/t7775.scala
new file mode 100644
index 0000000000..5fb0327611
--- /dev/null
+++ b/test/files/run/t7775.scala
@@ -0,0 +1,17 @@
+import scala.concurrent.{duration, future, Await, ExecutionContext}
+import scala.tools.nsc.Settings
+import ExecutionContext.Implicits.global
+
+// Was failing pretty regularly with a ConcurrentModificationException as
+// WrappedProperties#systemProperties iterated directly over the mutable
+// global system properties map.
+object Test {
+ def main(args: Array[String]) {
+ val tries = 1000 // YMMV
+ val compiler = future {
+ for(_ <- 1 to tries) new Settings(_ => {})
+ }
+ for(i <- 1 to tries * 10) System.setProperty(s"foo$i", i.toString)
+ Await.result(compiler, duration.Duration.Inf)
+ }
+}
diff --git a/test/files/run/t7779.scala b/test/files/run/t7779.scala
new file mode 100644
index 0000000000..db32cb751f
--- /dev/null
+++ b/test/files/run/t7779.scala
@@ -0,0 +1,67 @@
+// -Xmax-classfile-length doesn't compress top-level classes.
+// class :::::::::::::::::::::::::::::::::::::::::::::::::
+
+trait Marker
+
+class Short extends Marker
+
+// We just test with member classes
+object O {
+ object ::::::::::::::::::::::::::::::::::::::::::::::::: extends Marker
+}
+class C {
+ class D {
+ class ::::::::::::::::::::::::::::::::::::::::::::::::: extends Marker
+ }
+}
+
+package pack {
+ // abbreviates to: $colon$colon$colon$colon$colon$colon$colon$colon$colon$colon$colon$colon$colon$colon$colon$colon$colon$colon$colon$colon$colon$colon$colon$colon$colon$colon$colon$colon$colon$colon$colon$colon$colon$colon$colon$colon$colon$colon$colon$colon$colon$colon$colon$colon$colon$colon$colon$colon$colon to $read$$iw$$iw$$colon$colon$colon$colon$colon$colon$colon$colon$$$$c39b3f245029fbed9732fc888d44231b$$$$on$colon$colon$colon$colon$colon$colon$colon$colon$colon$colon
+ // class :::::::::::::::::::::::::::::::::::::::::::::::::
+
+ class Short extends Marker
+
+ // We just test with member classes
+ object O {
+ object ::::::::::::::::::::::::::::::::::::::::::::::::: extends Marker
+ }
+ class C {
+ class D {
+ class ::::::::::::::::::::::::::::::::::::::::::::::::: extends Marker
+ }
+ }
+ package p2 {
+ class Short extends Marker
+
+ object O {
+ object ::::::::::::::::::::::::::::::::::::::::::::::::: extends Marker
+ }
+ class C {
+ class D {
+ class ::::::::::::::::::::::::::::::::::::::::::::::::: extends Marker
+ }
+ }
+ }
+}
+
+
+object Test extends App {
+ import reflect.runtime.universe._
+ def test[T: TypeTag] = {
+ val tt = typeTag[T]
+ val clz = tt.mirror.runtimeClass(tt.tpe)
+ assert(classOf[Marker].isAssignableFrom(clz), clz.toString)
+ }
+
+ test[Short]
+ test[O.:::::::::::::::::::::::::::::::::::::::::::::::::.type]
+ test[C#D#`:::::::::::::::::::::::::::::::::::::::::::::::::`]
+
+ test[pack.Short]
+ test[pack.O.:::::::::::::::::::::::::::::::::::::::::::::::::.type]
+ test[pack.C#D#`:::::::::::::::::::::::::::::::::::::::::::::::::`]
+
+ test[pack.p2.Short]
+ test[pack.p2.O.:::::::::::::::::::::::::::::::::::::::::::::::::.type]
+ test[pack.p2.C#D#`:::::::::::::::::::::::::::::::::::::::::::::::::`]
+}
diff --git a/test/files/run/t7791-script-linenums.check b/test/files/run/t7791-script-linenums.check
new file mode 100644
index 0000000000..b7d969564a
--- /dev/null
+++ b/test/files/run/t7791-script-linenums.check
@@ -0,0 +1 @@
+hello, scripted test
diff --git a/test/files/run/t7791-script-linenums.scala b/test/files/run/t7791-script-linenums.scala
new file mode 100644
index 0000000000..d89b8d4c63
--- /dev/null
+++ b/test/files/run/t7791-script-linenums.scala
@@ -0,0 +1,16 @@
+
+import scala.tools.partest.ScriptTest
+
+object Test extends ScriptTest {
+ object ExceptionLine {
+ def unapply(e: Exception) = Some(e.getStackTrace()(0).getLineNumber)
+ }
+ override def show() = {
+ import util._
+ Try(super.show()) match {
+ case Failure(ExceptionLine(7)) => ()
+ case Failure(e) => e.printStackTrace()
+ case Success(_) => Console println "Expected error"
+ }
+ }
+}
diff --git a/test/files/run/t7791-script-linenums.script b/test/files/run/t7791-script-linenums.script
new file mode 100644
index 0000000000..403dcc2d28
--- /dev/null
+++ b/test/files/run/t7791-script-linenums.script
@@ -0,0 +1,8 @@
+#!/bin/bash
+exec ${SCALA_HOME}/bin/scala "$0" "$@" 2>&1
+!#
+
+Console println s"hello, scripted test"
+
+throw new RuntimeException("failing") // line 7
+
diff --git a/test/files/run/t7805-repl-i.check b/test/files/run/t7805-repl-i.check
new file mode 100644
index 0000000000..eecfff079a
--- /dev/null
+++ b/test/files/run/t7805-repl-i.check
@@ -0,0 +1,11 @@
+Loading t7805-repl-i.script...
+import util._
+
+Welcome to Scala
+Type in expressions to have them evaluated.
+Type :help for more information.
+
+scala> Console println Try(8)
+Success(8)
+
+scala>
diff --git a/test/files/run/t7805-repl-i.scala b/test/files/run/t7805-repl-i.scala
new file mode 100644
index 0000000000..a4061689f0
--- /dev/null
+++ b/test/files/run/t7805-repl-i.scala
@@ -0,0 +1,42 @@
+
+import scala.tools.partest.{ ReplTest, Welcoming }
+import scala.tools.nsc.{ GenericRunnerSettings, Settings }
+import scala.tools.nsc.settings.MutableSettings
+
+object Test extends ReplTest with HangingRepl with Welcoming {
+ def script = testPath changeExtension "script"
+ override def transformSettings(s: Settings) = s match {
+ case m: MutableSettings =>
+ val t = new GenericRunnerSettings(s.errorFn)
+ m copyInto t
+ t processArgumentString s"-i $script"
+ t
+ case _ => s
+ }
+ def code = "Console println Try(8)"
+}
+
+object Resulting {
+ import scala.concurrent._
+ import scala.concurrent.duration._
+ implicit class AwaitResult[A](val f: Future[A]) extends AnyVal {
+ def resultWithin(d: Duration): A = Await.result(f, d)
+ }
+}
+
+/** Test that hangs the REPL.
+ * Usually that is the "before" case.
+ */
+trait HangingRepl extends ReplTest {
+ import scala.language.postfixOps
+ import scala.util._
+ import scala.concurrent._
+ import scala.concurrent.duration._
+ import ExecutionContext.Implicits._
+ import Resulting._
+ def timeout = 15 seconds
+ def hanging[A](a: =>A): A = future(a) resultWithin timeout
+ override def show() = Try(hanging(super.show())) recover {
+ case e => e.printStackTrace()
+ }
+}
diff --git a/test/files/run/t7805-repl-i.script b/test/files/run/t7805-repl-i.script
new file mode 100644
index 0000000000..eb2b8705f3
--- /dev/null
+++ b/test/files/run/t7805-repl-i.script
@@ -0,0 +1 @@
+import util._
diff --git a/test/files/run/t7817-tree-gen.check b/test/files/run/t7817-tree-gen.check
new file mode 100644
index 0000000000..4ed4b0d94a
--- /dev/null
+++ b/test/files/run/t7817-tree-gen.check
@@ -0,0 +1,104 @@
+
+
+Joint Compilation:
+
+ typer [ O] - O.this
+ pickler [ O] - O.this
+ refchecks [ O] - O.this
+ uncurry [ O] - O.this
+ specialize [ O] - O.this
+ explicitouter [ O] - O.this
+ erasure [ O] - O.this
+ posterasure [ O] - C.this.O()
+ flatten [ O] - C.this.O()
+ mixin [ O] - test.O()
+ cleanup [ O] - test.O()
+
+ typer [ P] - P.this
+ pickler [ P] - P.this
+ refchecks [ P] - P.this
+ uncurry [ P] - P.this
+ specialize [ P] - P.this
+ explicitouter [ P] - P.this
+ erasure [ P] - P.this
+ posterasure [ P] - D.this.P()
+ flatten [ P] - D.this.P()
+ mixin [ P] - P()
+ cleanup [ P] - P()
+
+ typer [ test2.PO] - PO.this
+ pickler [ test2.PO] - PO.this
+ refchecks [ test2.PO] - PO.this
+ uncurry [ test2.PO] - PO.this
+ specialize [ test2.PO] - PO.this
+ explicitouter [ test2.PO] - PO.this
+ erasure [ test2.PO] - PO.this
+ posterasure [ test2.PO] - test2.`package`.PO
+ flatten [ test2.PO] - test2.`package`.PO
+ mixin [ test2.PO] - test2.package$PO
+ cleanup [ test2.PO] - test2.package$PO
+
+ typer [ test2.bar] - `package`.this.bar
+ pickler [ test2.bar] - `package`.this.bar
+ refchecks [ test2.bar] - `package`.this.bar
+ uncurry [ test2.bar] - `package`.this.bar
+ specialize [ test2.bar] - `package`.this.bar
+ explicitouter [ test2.bar] - `package`.this.bar
+ erasure [ test2.bar] - `package`.this.bar
+ posterasure [ test2.bar] - test2.`package`.bar
+ flatten [ test2.bar] - test2.`package`.bar
+ mixin [ test2.bar] - test2.`package`.bar
+ cleanup [ test2.bar] - test2.`package`.bar
+
+
+
+Separate Compilation:
+
+ typer [ O] - O.this
+ pickler [ O] - O.this
+ refchecks [ O] - O.this
+ uncurry [ O] - O.this
+ specialize [ O] - O.this
+ explicitouter [ O] - O.this
+ erasure [ O] - O.this
+ posterasure [ O] - C.this.O()
+ flatten [ O] - C.this.O()
+ mixin [ O] - testSep.O()
+ cleanup [ O] - testSep.O()
+
+ typer [ P] - P.this
+ pickler [ P] - P.this
+ refchecks [ P] - P.this
+ uncurry [ P] - P.this
+ specialize [ P] - P.this
+ explicitouter [ P] - P.this
+ erasure [ P] - P.this
+ posterasure [ P] - DSep.this.P()
+ flatten [ P] - DSep.this.P()
+ mixin [ P] - P()
+ cleanup [ P] - P()
+
+ typer [ PO] - PO.this
+ pickler [ PO] - PO.this
+ refchecks [ PO] - PO.this
+ uncurry [ PO] - PO.this
+ specialize [ PO] - PO.this
+ explicitouter [ PO] - PO.this
+ erasure [ PO] - PO.this
+ posterasure [ PO] - test2.`package`.PO
+ flatten [ PO] - test2.`package`.PO
+ mixin [ PO] - test2.package$PO
+ cleanup [ PO] - test2.package$PO
+
+ typer [testSep2.bar] - `package`.this.bar
+ pickler [testSep2.bar] - `package`.this.bar
+ refchecks [testSep2.bar] - `package`.this.bar
+ uncurry [testSep2.bar] - `package`.this.bar
+ specialize [testSep2.bar] - `package`.this.bar
+ explicitouter [testSep2.bar] - `package`.this.bar
+ erasure [testSep2.bar] - `package`.this.bar
+ posterasure [testSep2.bar] - test2.`package`.bar
+ flatten [testSep2.bar] - test2.`package`.bar
+ mixin [testSep2.bar] - test2.`package`.bar
+ cleanup [testSep2.bar] - test2.`package`.bar
+
diff --git a/test/files/run/t7817-tree-gen.flags b/test/files/run/t7817-tree-gen.flags
new file mode 100644
index 0000000000..ce6e93b3da
--- /dev/null
+++ b/test/files/run/t7817-tree-gen.flags
@@ -0,0 +1 @@
+-Ynooptimise \ No newline at end of file
diff --git a/test/files/run/t7817-tree-gen.scala b/test/files/run/t7817-tree-gen.scala
new file mode 100644
index 0000000000..a8317fda6e
--- /dev/null
+++ b/test/files/run/t7817-tree-gen.scala
@@ -0,0 +1,65 @@
+import scala.tools.partest._
+
+// Testing that `mkAttributedRef` doesn't incude the package object test.`package`,
+// under joint and separate compilation.
+
+package testSep { class C { object O } }
+package testSep2 { object `package` { object PO; def bar = 0 } }
+class DSep { object P }
+
+object Test extends CompilerTest {
+ import global._
+ override def extraSettings = super.extraSettings + " -d " + testOutput.path
+ override def sources = List(
+ """
+ package test { class C { object O } }
+ class D { object P }
+ package test2 { object `package` { object PO; def bar = 0 } }
+ """
+ )
+ def check(source: String, unit: CompilationUnit) = enteringTyper {
+ def checkTree(msg: String, t: => Tree) = {
+ val run = currentRun
+ import run._
+ val phases = List(typerPhase, picklerPhase, refchecksPhase, uncurryPhase, specializePhase,
+ explicitouterPhase, erasurePhase, posterasurePhase, flattenPhase, mixinPhase, cleanupPhase)
+ for (phase <- phases) {
+ enteringPhase(phase) {
+ val error = t.exists(t => t.symbol == NoSymbol)
+ val errorStr = if (error) "!!!" else " - "
+ println(f"$phase%18s [$msg%12s] $errorStr $t")
+ }
+ }
+ println("")
+ }
+ import rootMirror._
+
+ println("\n\nJoint Compilation:\n")
+
+ {
+ val c = staticClass("test.C")
+ val o = c.info.decl(TermName("O"))
+ checkTree("O", gen.mkAttributedQualifier(o.moduleClass.thisType))
+ val d = staticClass("D")
+ val p = d.info.decl(TermName("P"))
+ checkTree("P", gen.mkAttributedQualifier(p.moduleClass.thisType))
+ val po = staticModule("test2.package").moduleClass.info.decl(TermName("PO"))
+ checkTree("test2.PO", gen.mkAttributedQualifier(po.moduleClass.thisType))
+ checkTree("test2.bar", gen.mkAttributedRef(po.owner.info.decl(TermName("bar"))))
+ }
+
+ println("\n\nSeparate Compilation:\n")
+
+ {
+ val c = typeOf[testSep.C].typeSymbol
+ val o = c.info.decl(TermName("O"))
+ checkTree("O", gen.mkAttributedQualifier(o.moduleClass.thisType))
+ val d = staticClass("DSep")
+ val p = d.info.decl(TermName("P"))
+ checkTree("P", gen.mkAttributedQualifier(p.moduleClass.thisType))
+ val po = staticModule("test2.package").moduleClass.info.decl(TermName("PO"))
+ checkTree("PO", gen.mkAttributedQualifier(po.moduleClass.thisType))
+ checkTree("testSep2.bar", gen.mkAttributedRef(po.owner.info.decl(TermName("bar"))))
+ }
+ }
+}
diff --git a/test/files/run/t7817.scala b/test/files/run/t7817.scala
new file mode 100644
index 0000000000..905b8aeb09
--- /dev/null
+++ b/test/files/run/t7817.scala
@@ -0,0 +1,31 @@
+import language.reflectiveCalls
+
+package test {
+ class C1 {
+ object O {
+ def struct(s: {def foo: Any}) = s.foo
+ }
+ }
+ trait T {
+ object O {
+ def struct(s: {def foo: Any}) = s.foo
+ }
+ }
+ object O1 extends T
+
+ object O2 {
+ object O {
+ def struct(s: {def foo: Any}) = s.foo
+ }
+ }
+}
+
+object Test extends App {
+ object fooable { def foo = "foo" }
+ def check(result: Any) = assert(result == "foo", result.toString)
+
+ val s = new test.C1
+ check(s.O.struct(fooable))
+ check(test.O1.O.struct(fooable))
+ check(test.O2.O.struct(fooable))
+}
diff --git a/test/files/run/tailcalls.scala b/test/files/run/tailcalls.scala
index 1d4124e138..e5d8891cc7 100644
--- a/test/files/run/tailcalls.scala
+++ b/test/files/run/tailcalls.scala
@@ -391,7 +391,20 @@ object Test {
def isOdd(xs: List[Int]): TailRec[Boolean] =
if (xs.isEmpty) done(false) else tailcall(isEven(xs.tail))
+ def fib(n: Int): TailRec[Int] =
+ if (n < 2) done(n) else for {
+ x <- tailcall(fib(n - 1))
+ y <- tailcall(fib(n - 2))
+ } yield (x + y)
+
+ def rec(n: Int): TailRec[Int] =
+ if (n == 1) done(n) else for {
+ x <- tailcall(rec(n - 1))
+ } yield x
+
assert(isEven((1 to 100000).toList).result)
+ //assert(fib(40).result == 102334155) // Commented out, as it takes a long time
+ assert(rec(100000).result == 1)
}
diff --git a/test/files/run/t1079.check b/test/files/run/toolbox_current_run_compiles.check
index c508d5366f..da29283aaa 100644
--- a/test/files/run/t1079.check
+++ b/test/files/run/toolbox_current_run_compiles.check
@@ -1 +1,2 @@
+true
false
diff --git a/test/files/run/toolbox_current_run_compiles.scala b/test/files/run/toolbox_current_run_compiles.scala
new file mode 100644
index 0000000000..b48c998e64
--- /dev/null
+++ b/test/files/run/toolbox_current_run_compiles.scala
@@ -0,0 +1,28 @@
+package pkg {
+ import scala.reflect.macros.Context
+ import scala.language.experimental.macros
+
+ object Macros {
+ def impl[T: c.WeakTypeTag](c: Context) = {
+ import c.universe._
+ val sym = c.weakTypeOf[T].typeSymbol
+ val g = c.universe.asInstanceOf[scala.tools.nsc.Global]
+ c.Expr[Boolean](Literal(Constant(g.currentRun.compiles(sym.asInstanceOf[g.Symbol]))))
+ }
+ def compiles[T] = macro impl[T]
+ }
+}
+
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{universe => ru}
+import scala.tools.reflect.ToolBox
+
+object Test extends App {
+ val cm = ru.runtimeMirror(getClass.getClassLoader)
+ val toolbox = cm.mkToolBox()
+ toolbox.eval(toolbox.parse("""{
+ class C
+ println(pkg.Macros.compiles[C])
+ println(pkg.Macros.compiles[Object])
+ }"""))
+} \ No newline at end of file
diff --git a/test/files/run/typed-annotated.check b/test/files/run/typed-annotated.check
new file mode 100644
index 0000000000..d81cc0710e
--- /dev/null
+++ b/test/files/run/typed-annotated.check
@@ -0,0 +1 @@
+42
diff --git a/test/files/run/typed-annotated/Macros_1.scala b/test/files/run/typed-annotated/Macros_1.scala
new file mode 100644
index 0000000000..dd18c63d90
--- /dev/null
+++ b/test/files/run/typed-annotated/Macros_1.scala
@@ -0,0 +1,17 @@
+import scala.reflect.macros.Context
+import language.experimental.macros
+
+class ann extends scala.annotation.StaticAnnotation
+
+object Macros {
+ def impl(c: Context) = {
+ import c.universe._
+ // val tpt = Annotated(Apply(Select(New(Ident(newTypeName("ann"))), nme.CONSTRUCTOR), List()), Ident(newTypeName("Int")))
+ val tpt = Annotated(Apply(Select(New(Ident(newTypeName("ann"))), nme.CONSTRUCTOR), List()), TypeTree(weakTypeOf[Int]))
+ c.Expr[Unit](Block(
+ List(ValDef(Modifiers(), newTermName("x"), tpt, Literal(Constant(42)))),
+ Apply(Ident(newTermName("println")), List(Ident(newTermName("x"))))))
+ }
+
+ def foo = macro impl
+} \ No newline at end of file
diff --git a/test/files/run/typed-annotated/Test_2.scala b/test/files/run/typed-annotated/Test_2.scala
new file mode 100644
index 0000000000..acfddae942
--- /dev/null
+++ b/test/files/run/typed-annotated/Test_2.scala
@@ -0,0 +1,3 @@
+object Test extends App {
+ Macros.foo
+} \ No newline at end of file
diff --git a/test/files/run/unittest_io.scala b/test/files/run/unittest_io.scala
deleted file mode 100644
index 2c3dacdf91..0000000000
--- a/test/files/run/unittest_io.scala
+++ /dev/null
@@ -1,42 +0,0 @@
-
-@deprecated("Suppress warnings", since="2.11")
-object Test {
-
- def main(args: Array[String]) {
- UTF8Tests.run()
- SourceTest.run()
- }
-
- object UTF8Tests {
- def decode(ch: Int) = new String(Array(ch), 0, 1).getBytes("UTF-8")
-
- def run() {
- assert(new String( decode(0x004D), "utf8") == new String(Array(0x004D.asInstanceOf[Char])))
- assert(new String( decode(0x0430), "utf8") == new String(Array(0x0430.asInstanceOf[Char])))
- assert(new String( decode(0x4E8C), "utf8") == new String(Array(0x4E8C.asInstanceOf[Char])))
- assert(new String(decode(0x10302), "utf8") == new String(Array(0xD800.asInstanceOf[Char],
- 0xDF02.asInstanceOf[Char])))
- // a client
- val test = "{\"a\":\"\\u0022\"}"
- val expected = "a" -> "\""
-
- val parsed = scala.util.parsing.json.JSON.parseFull(test)
- val result = parsed == Some(Map(expected))
- if(result)
- assert(result)
- else {
- Console.println(parsed); assert(result)
- }
- }
- }
-
- object SourceTest {
- def run() {
- val s = "Here is a test string"
- val f = io.Source.fromBytes(s.getBytes("utf-8"))
- val b = new collection.mutable.ArrayBuffer[Char]()
- f.copyToBuffer(b)
- assert(s == new String(b.toArray))
- }
- }
-}
diff --git a/test/files/run/xml-attribute.check b/test/files/run/xml-attribute.check
deleted file mode 100644
index 3cfe3779fc..0000000000
--- a/test/files/run/xml-attribute.check
+++ /dev/null
@@ -1,12 +0,0 @@
-<t/>
-<t/>
-<t/>
-<t/>
-<t/>
-<t b="1" d="2"/>
-<t b="1" d="2"/>
-<t b="1" d="2"/>
-<t a="1" d="2"/>
-<t b="1" d="2"/>
-<t a="1" b="2" c="3"/>
-<t g="1" e="2" p:a="3" f:e="4" mgruhu:ji="5"/>
diff --git a/test/files/run/xml-attribute.scala b/test/files/run/xml-attribute.scala
deleted file mode 100644
index eb3956c41b..0000000000
--- a/test/files/run/xml-attribute.scala
+++ /dev/null
@@ -1,37 +0,0 @@
-import xml.Node
-
-object Test {
- def main(args: Array[String]): Unit = {
- val noAttr = <t/>
- val attrNull = <t a={ null: String }/>
- val attrNone = <t a={ None: Option[Seq[Node]] }/>
- val preAttrNull = <t p:a={ null: String }/>
- val preAttrNone = <t p:a={ None: Option[Seq[Node]] }/>
- assert(noAttr == attrNull)
- assert(noAttr == attrNone)
- assert(noAttr == preAttrNull)
- assert(noAttr == preAttrNone)
-
- println(noAttr)
- println(attrNull)
- println(attrNone)
- println(preAttrNull)
- println(preAttrNone)
-
- val xml1 = <t b="1" d="2"/>
- val xml2 = <t a={ null: String } p:a={ null: String } b="1" c={ null: String } d="2"/>
- val xml3 = <t b="1" c={ null: String } d="2" a={ null: String } p:a={ null: String }/>
- assert(xml1 == xml2)
- assert(xml1 == xml3)
-
- println(xml1)
- println(xml2)
- println(xml3)
-
- // Check if attribute order is retained
- println(<t a="1" d="2"/>)
- println(<t b="1" d="2"/>)
- println(<t a="1" b="2" c="3"/>)
- println(<t g="1" e="2" p:a="3" f:e="4" mgruhu:ji="5"/>)
- }
-}
diff --git a/test/files/run/xml-loop-bug.scala b/test/files/run/xml-loop-bug.scala
deleted file mode 100644
index dc155dbb02..0000000000
--- a/test/files/run/xml-loop-bug.scala
+++ /dev/null
@@ -1,14 +0,0 @@
-import java.io.{ Console => _, _ }
-import scala.io._
-import scala.xml.parsing._
-object Test {
- def main(args: Array[String]): Unit = {
- val xml = "<!DOCTYPE xmeml SYSTEM> <xmeml> <sequence> </sequence> </xmeml> "
- val sink = new PrintStream(new ByteArrayOutputStream())
- (Console withOut sink) {
- (Console withErr sink) {
- ConstructingParser.fromSource((Source fromString xml), true).document.docElem
- }
- }
- }
-}
diff --git a/test/files/scalacheck/quasiquotes/DefinitionConstructionProps.scala b/test/files/scalacheck/quasiquotes/DefinitionConstructionProps.scala
new file mode 100644
index 0000000000..153e23d947
--- /dev/null
+++ b/test/files/scalacheck/quasiquotes/DefinitionConstructionProps.scala
@@ -0,0 +1,293 @@
+import org.scalacheck._
+import Prop._
+import Gen._
+import Arbitrary._
+
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.universe.build.ScalaDot
+import Flag._
+
+object DefinitionConstructionProps
+ extends QuasiquoteProperties("definition construction")
+ with ClassConstruction
+ with TraitConstruction
+ with TypeDefConstruction
+ with ValDefConstruction
+
+trait ClassConstruction { self: QuasiquoteProperties =>
+ val anyRef = ScalaDot(TypeName("AnyRef"))
+ val emtpyConstructor =
+ DefDef(Modifiers(), nme.CONSTRUCTOR, List(),
+ List(List()), TypeTree(), Block(List(pendingSuperCall), Literal(Constant(()))))
+ def classWith(name: TypeName, parents: List[Tree] = List(anyRef), body: List[DefDef] = Nil) =
+ ClassDef(
+ Modifiers(), name, List(),
+ Template(parents, emptyValDef, emtpyConstructor :: body))
+
+ property("construct case class") = test {
+ val params = q"val x: Int" :: q"val y: Int" :: Nil
+ val name = TypeName("Point")
+ assertEqAst(q"$CASE class $name(..$params)", "case class Point(x: Int, y: Int)")
+ }
+
+ property("case class bare param") = test {
+ assertEqAst(q"$CASE class Point(x: Int, y: Int)", "case class Point(private[this] val x: Int, private[this] val y: Int)")
+ }
+
+ property("generate default constructors automatically") = test {
+ val parents = List.empty[Tree]
+ assertEqAst(q"class Foo extends ..$parents", "class Foo")
+ }
+
+ property("splice term name into class") = forAll { (name: TypeName) =>
+ eqAst(q"class $name", "class " + name.toString)
+ }
+
+ property("splice method into class") = forAll { (name: TypeName, method: DefDef) =>
+ q"class $name { $method }" ≈ classWith(name, body = List(method))
+ }
+
+ property("splice members into class") = forAll { (name: TypeName, defs: List[DefDef], extra: DefDef) =>
+ q"""class $name {
+ ..$defs
+ $extra
+ }""" ≈ classWith(name, body = defs :+ extra)
+ }
+
+ property("splice type name into class parents") = forAll { (name: TypeName, parent: TypeName) =>
+ q"class $name extends $parent" ≈ classWith(name, parents = List(Ident(parent)))
+ }
+
+ property("param flags are consistent with raw code") = test {
+ val pubx = q"val x: Int"
+ val privx = q"private[this] val x: Int"
+ assertEqAst(q" class C(x: Int)", " class C(x: Int) ")
+ assertEqAst(q"case class C(x: Int)", "case class C(x: Int) ")
+ assertEqAst(q" class C($pubx) ", " class C(val x: Int) ")
+ assertEqAst(q"case class C($pubx) ", "case class C(x: Int) ")
+ assertEqAst(q" class C($privx)", " class C(x: Int) ")
+ assertEqAst(q"case class C($privx)", "case class C(private[this] val x: Int)")
+ }
+}
+
+trait TraitConstruction { self: QuasiquoteProperties =>
+ property("splice name into trait def") = test {
+ val Foo = TypeName("Foo")
+ assert(q"trait $Foo" ≈ q"trait Foo")
+ }
+
+ property("splice type params into trait def") = test {
+ val tparams = q"type A" :: q"type B" :: Nil
+ assert(q"trait Foo[..$tparams]" ≈ q"trait Foo[A, B]")
+ }
+
+ property("splice defs into trait body") = test {
+ val body = q"def foo" :: q"val bar: Baz" :: Nil
+ assert(q"trait Foo { ..$body }" ≈ q"trait Foo { def foo; val bar: Baz }")
+ }
+
+ property("splice parents into trait") = test {
+ val parents = tq"A" :: tq"B" :: Nil
+ assert(q"trait Foo extends ..$parents" ≈ q"trait Foo extends A with B")
+ }
+
+ property("splice early valdef into trait") = test {
+ val x = q"val x: Int = 1"
+ assertEqAst(q"trait T extends { $x } with Any", "trait T extends { val x: Int = 1} with Any")
+ }
+
+ property("construct trait with early valdef") = test {
+ assertEqAst(q"trait T extends { val x: Int = 1 } with Any", "trait T extends { val x: Int = 1 } with Any")
+ }
+
+ property("splice defs into early block") = test {
+ val defs = q"val x: Int = 0" :: q"type Foo = Bar" :: Nil
+ assert(q"trait T extends { ..$defs } with Bippy" ≈
+ q"trait T extends { val x: Int = 0; type Foo = Bar} with Bippy")
+ }
+
+ property("fail on splicing of non-valid early tree") = test {
+ val defn = q"def x: Int = 0"
+ assertThrows[IllegalArgumentException] { q"trait T extends { $defn } with Bar" }
+ }
+}
+
+trait TypeDefConstruction { self: QuasiquoteProperties =>
+ property("splice type name into typedef") = forAll { (name1: TypeName, name2: TypeName) =>
+ q"type $name1 = $name2" ≈ TypeDef(Modifiers(), name1, List(), Ident(name2))
+ }
+
+ property("splice type names into type bounds") = forAll { (T1: TypeName, T2: TypeName, T3: TypeName) =>
+ q"type $T1 >: $T2 <: $T3" ≈
+ TypeDef(
+ Modifiers(DEFERRED), T1, List(),
+ TypeBoundsTree(Ident(T2), Ident(T3)))
+ }
+
+ property("splice trees names into type bounds") = forAll { (T: TypeName, t1: Tree, t2: Tree) =>
+ q"type $T >: $t1 <: $t2" ≈
+ TypeDef(
+ Modifiers(DEFERRED), T, List(),
+ TypeBoundsTree(t1, t2))
+ }
+
+ property("splice tparams into typedef (1)") = forAll { (T: TypeName, targs: List[TypeDef], t: Tree) =>
+ q"type $T[..$targs] = $t" ≈ TypeDef(Modifiers(), T, targs, t)
+ }
+
+ property("splice tparams into typedef (2)") = forAll { (T: TypeName, targs1: List[TypeDef], targs2: List[TypeDef], t: Tree) =>
+ q"type $T[..$targs1, ..$targs2] = $t" ≈ TypeDef(Modifiers(), T, targs1 ++ targs2, t)
+ }
+
+ property("splice tparams into typedef (3)") = forAll { (T: TypeName, targ: TypeDef, targs: List[TypeDef], t: Tree) =>
+ q"type $T[$targ, ..$targs] = $t" ≈ TypeDef(Modifiers(), T, targ :: targs, t)
+ }
+
+ property("splice typename into typedef with default bounds") = forAll { (T1: TypeName, T2: TypeName, t: Tree) =>
+ q"type $T1[$T2 >: Any <: Nothing] = $t" ≈
+ TypeDef(
+ Modifiers(), T1,
+ List(TypeDef(
+ Modifiers(PARAM), T2,
+ List(),
+ TypeBoundsTree(
+ Ident(TypeName("Any")),
+ Ident(TypeName("Nothing"))))),
+ t)
+ }
+
+ property("splice type names into compound type tree") = forAll { (T: TypeName, A: TypeName, B: TypeName) =>
+ q"type $T = $A with $B" ≈
+ TypeDef(
+ Modifiers(), T, List(),
+ CompoundTypeTree(
+ Template(List(Ident(A), Ident(B)), ValDef(Modifiers(PRIVATE), nme.WILDCARD, TypeTree(), EmptyTree), List())))
+ }
+
+ property("splice trees into existential type tree") = forAll {
+ (T1: TypeName, T2: TypeName, X: TypeName, Lo: TypeName, Hi: TypeName) =>
+
+ q"type $T1 = $T2[$X] forSome { type $X >: $Lo <: $Hi }" ≈
+ TypeDef(
+ Modifiers(), T1, List(),
+ ExistentialTypeTree(
+ AppliedTypeTree(Ident(T2), List(Ident(X))),
+ List(
+ TypeDef(Modifiers(DEFERRED), X, List(), TypeBoundsTree(Ident(Lo), Ident(Hi))))))
+ }
+
+ property("splice tree into singleton type tree") = forAll { (name: TypeName, t: Tree) =>
+ q"type $name = $t.type" ≈ q"type $name = ${SingletonTypeTree(t)}"
+ }
+
+ property("splice into applied type tree") = forAll { (T1: TypeName, T2: TypeName, args: List[Tree]) =>
+ q"type $T1 = $T2[..$args]" ≈
+ TypeDef(Modifiers(), T1, List(),
+ if(args.nonEmpty) AppliedTypeTree(Ident(T2), args) else Ident(T2))
+ }
+}
+
+trait ValDefConstruction { self: QuasiquoteProperties =>
+ property("splice term name into val") = forAll { (name: TermName, tpt: Tree, rhs: Tree) =>
+ q"val $name: $tpt = $rhs" ≈ ValDef(Modifiers(), name, tpt, rhs)
+ }
+
+ property("splice term name into var") = forAll { (name: TermName, tpt: Tree, rhs: Tree) =>
+ q"var $name: $tpt = $rhs" ≈ ValDef(Modifiers(MUTABLE), name, tpt, rhs)
+ }
+}
+
+trait MethodConstruction { self: QuasiquoteProperties =>
+ property("splice paramss into defdef") = test {
+ val paramss = List(q"val x: Int") :: List(q"val y: Int = 1") :: Nil
+ assert(q"def foo(...$paramss)" ≈ parse("def foo(x: Int)(y: Int = 1)"))
+ }
+
+ property("splice tparams into defdef") = test {
+ val tparams = q"type A" :: q"type B <: Bippy" :: Nil
+ assert(q"def foo[..$tparams]" ≈ parse("def foo[A, B <: Bippy]"))
+ }
+
+ def assertSameAnnots(tree: {def mods: Modifiers}, annots: List[Tree]) =
+ assert(tree.mods.annotations ≈ annots,
+ s"${tree.mods.annotations} =/= ${annots}")
+
+ def assertSameAnnots(tree1: {def mods: Modifiers}, tree2: {def mods: Modifiers}) =
+ assert(tree1.mods.annotations ≈ tree2.mods.annotations,
+ s"${tree1.mods.annotations} =/= ${tree2.mods.annotations}")
+
+ property("splice type name into annotation") = test {
+ val name = TypeName("annot")
+ assertSameAnnots(q"@$name def foo", List(annot(name)))
+ }
+
+ property("splice ident into annotation") = test {
+ val name = TypeName("annot")
+ val ident = Ident(name)
+ assertSameAnnots(q"@$ident def foo", List(annot(name)))
+ }
+
+ property("splice idents into annotation") = test {
+ val idents = List(Ident(TypeName("annot1")), Ident(TypeName("annot2")))
+ assertSameAnnots(q"@..$idents def foo",
+ idents.map { ident => Apply(Select(New(ident), nme.CONSTRUCTOR), List()) })
+ }
+
+ property("splice constructor calls into annotation") = test {
+ val ctorcalls = List(annot("a1"), annot("a2"))
+ assertSameAnnots(q"@..$ctorcalls def foo", ctorcalls)
+ }
+
+ property("splice multiple annotations (1)") = test {
+ val annot1 = annot("a1")
+ val annot2 = annot("a2")
+ val res = q"@$annot1 @$annot2 def foo"
+ assertSameAnnots(res, List(annot1, annot2))
+ }
+
+ property("splice multiple annotations (2)") = test {
+ val annot1 = annot("a1")
+ val annots = List(annot("a2"), annot("a3"))
+ val res = q"@$annot1 @..$annots def foo"
+ assertSameAnnots(res, annot1 :: annots)
+ }
+
+ property("splice annotations with arguments (1)") = test {
+ val a = annot("a", List(q"x"))
+ assertSameAnnots(q"@$a def foo", q"@a(x) def foo")
+ }
+
+ property("splice annotations with arguments (2)") = test {
+ val a = newTypeName("a")
+ assertSameAnnots(q"@$a(x) def foo", q"@a(x) def foo")
+ }
+
+ property("splice annotations with arguments (3") = test {
+ val a = Ident(newTypeName("a"))
+ assertSameAnnots(q"@$a(x) def foo", q"@a(x) def foo")
+ }
+
+ property("splice improper tree into annot") = test {
+ val t = tq"Foo[Baz]"
+ assertThrows[IllegalArgumentException] {
+ q"@$t def foo"
+ }
+ }
+
+ property("can't splice annotations with arguments specificed twice") = test {
+ val a = annot("a", List(q"x"))
+ assertThrows[IllegalArgumentException] {
+ q"@$a(y) def foo"
+ }
+ }
+
+ property("splice annotation with targs") = test {
+ val a = q"new Foo[A, B]"
+ assertEqAst(q"@$a def foo", "@Foo[A,B] def foo")
+ }
+
+ property("splice annotation with multiple argument lists") = test{
+ val a = q"new Foo(a)(b)"
+ assertEqAst(q"@$a def foo", "@Foo(a)(b) def foo")
+ }
+} \ No newline at end of file
diff --git a/test/files/scalacheck/quasiquotes/DefinitionDeconstructionProps.scala b/test/files/scalacheck/quasiquotes/DefinitionDeconstructionProps.scala
new file mode 100644
index 0000000000..fdfbfe871c
--- /dev/null
+++ b/test/files/scalacheck/quasiquotes/DefinitionDeconstructionProps.scala
@@ -0,0 +1,147 @@
+import org.scalacheck._
+import Prop._
+import Gen._
+import Arbitrary._
+
+import scala.reflect.runtime.universe._
+import Flag._
+
+object DefinitionDeconstructionProps
+ extends QuasiquoteProperties("definition deconstruction")
+ with TraitDeconstruction
+ with ClassDeconstruction
+ with ObjectDeconstruction
+ with ModsDeconstruction
+ with ValVarDeconstruction
+
+trait TraitDeconstruction { self: QuasiquoteProperties =>
+ property("exhaustive trait matcher") = test {
+ def matches(line: String) {
+ val q"""$mods trait $name[..$targs]
+ extends { ..$early } with ..$parents { $self => ..$body }""" = parse(line)
+ }
+ matches("trait Foo")
+ matches("trait Foo[T]")
+ matches("trait Foo { def bar }")
+ matches("trait Foo extends Bar with Baz")
+ matches("trait Foo { self: Bippy => val x: Int = 1}")
+ matches("trait Foo extends { val early: Int = 1 } with Bar { val late = early }")
+ matches("private[Gap] trait Foo")
+ }
+}
+
+trait ObjectDeconstruction { self: QuasiquoteProperties =>
+ property("exhaustive object matcher") = test {
+ def matches(line: String) = {
+ val q"""$mods object $name extends { ..$early } with ..$parents { $self => ..$body }""" = parse(line)
+ }
+ matches("object Foo")
+ matches("object Foo extends Bar[T]")
+ matches("object Foo extends { val early: T = v } with Bar")
+ matches("object Foo extends Foo { selfy => body }")
+ matches("private[Bippy] object Foo extends Bar with Baz")
+ }
+}
+
+trait ClassDeconstruction { self: QuasiquoteProperties =>
+ property("class without params") = test {
+ val q"class $name { ..$body }" = q"class Foo { def bar = 3 }"
+ assert(body ≈ List(q"def bar = 3"))
+ }
+
+ property("class constructor") = test {
+ val q"class $name(...$argss)" = q"class Foo(x: Int)(y: Int)"
+ assert(argss.length == 2)
+ }
+
+ property("class parents") = test {
+ val q"class $name extends ..$parents" = q"class Foo extends Bar with Blah"
+ assert(parents ≈ List(tq"Bar", tq"Blah"))
+ }
+
+ property("class selfdef") = test {
+ val q"class $name { $self => }" = q"class Foo { self: T => }"
+ assert(self.name ≈ TermName("self") && self.tpt ≈ tq"T")
+ }
+
+ property("class tparams") = test {
+ val q"class $name[..$tparams]" = q"class Foo[A, B]"
+ assert(tparams.map { _.name } == List(TypeName("A"), TypeName("B")))
+ }
+
+ property("deconstruct bare case class") = test {
+ val q"$mods class $name(..$args) extends ..$parents" = q"case class Foo(x: Int)"
+ }
+
+ property("exhaustive class matcher") = test {
+ def matches(line: String) {
+ val q"""$classMods class $name[..$targs] $ctorMods(...$argss)
+ extends { ..$early } with ..$parents { $self => ..$body }""" = parse(line)
+ }
+ matches("class Foo")
+ matches("class Foo[T]")
+ matches("class Foo[T] @annot")
+ matches("class Foo extends Bar with Baz")
+ matches("class Foo { body }")
+ matches("class Foo extends { val early = 0 } with Any")
+ matches("abstract class Foo")
+ matches("private[Baz] class Foo")
+ matches("class Foo(first: A)(second: B)")
+ matches("class Foo(first: A) extends Bar(first) with Baz")
+ matches("class Foo private (first: A) { def bar }")
+ matches("class Foo { self => bar(self) }")
+ matches("case class Foo(x: Int)")
+ }
+}
+
+trait ModsDeconstruction { self: QuasiquoteProperties =>
+ property("deconstruct mods") = test {
+ val mods = Modifiers(IMPLICIT | PRIVATE, TermName("foobar"), Nil)
+ val q"$mods0 def foo" = q"$mods def foo"
+ assert(mods0 ≈ mods)
+ }
+
+ property("@$annot def foo") = forAll { (annotName: TypeName) =>
+ val q"@$annot def foo" = q"@$annotName def foo"
+ annot ≈ Apply(Select(New(Ident(annotName)), nme.CONSTRUCTOR), List())
+ }
+
+ property("@$annot(..$args) def foo") = forAll { (annotName: TypeName, tree: Tree) =>
+ val q"@$annot(..$args) def foo" = q"@$annotName($tree) def foo"
+ annot ≈ Ident(annotName) && args ≈ List(tree)
+ }
+
+ property("@..$annots def foo") = test {
+ val a = annot("a")
+ val b = annot("b")
+ val q"@..$annots def foo" = q"@$a @$b def foo"
+ annots ≈ List(a, b)
+ }
+
+ property("@$annot @..$annots def foo") = test {
+ val a = annot("a")
+ val b = annot("b")
+ val c = annot("c")
+ val q"@$first @..$rest def foo" = q"@$a @$b @$c def foo"
+ first ≈ a && rest ≈ List(b, c)
+ }
+}
+
+trait ValVarDeconstruction { self: QuasiquoteProperties =>
+ property("exhaustive val matcher") = test {
+ def matches(line: String) { val q"$mods val $name: $tpt = $rhs" = parse(line) }
+ matches("val x: Int")
+ matches("val x: Int = 1")
+ matches("lazy val x: Int = 1")
+ matches("implicit val x = 1")
+ assertThrows[MatchError] { matches("var x = 1") }
+ }
+
+ property("exhaustive var matcher") = test {
+ def matches(line: String) { val q"$mods var $name: $tpt = $rhs" = parse(line) }
+ matches("var x: Int")
+ matches("var x: Int = 1")
+ matches("var x = 1")
+ assertThrows[MatchError] { matches("val x = 1") }
+ }
+} \ No newline at end of file
diff --git a/test/files/scalacheck/quasiquotes/PatternConstructionProps.scala b/test/files/scalacheck/quasiquotes/PatternConstructionProps.scala
index aee50c9c5f..504cb2a77d 100644
--- a/test/files/scalacheck/quasiquotes/PatternConstructionProps.scala
+++ b/test/files/scalacheck/quasiquotes/PatternConstructionProps.scala
@@ -32,6 +32,6 @@ object PatternConstructionProps extends QuasiquoteProperties("pattern constructi
}
property("splice into casedef") = forAll { (pat: Tree, cond: Tree, body: Tree) =>
- cq"$pat if $cond => $body" ≈ CaseDef(pat, cond, Block(List(), body))
+ cq"$pat if $cond => $body" ≈ CaseDef(pat, cond, body)
}
} \ No newline at end of file
diff --git a/test/files/scalacheck/quasiquotes/QuasiquoteProperties.scala b/test/files/scalacheck/quasiquotes/QuasiquoteProperties.scala
index 5e87aa57cc..6a531071bf 100644
--- a/test/files/scalacheck/quasiquotes/QuasiquoteProperties.scala
+++ b/test/files/scalacheck/quasiquotes/QuasiquoteProperties.scala
@@ -1,7 +1,10 @@
import scala.reflect.runtime.universe._
-import scala.tools.reflect.ToolBox
-import scala.tools.reflect.ToolBoxError
+import scala.reflect.runtime.universe.definitions._
+import scala.reflect.runtime.universe.Flag._
+import scala.reflect.runtime.currentMirror
+import scala.reflect.api.{Liftable, Universe}
import scala.reflect.macros.TypecheckException
+import scala.tools.reflect.{ToolBox, ToolBoxError}
import org.scalacheck._
import Prop._
@@ -57,6 +60,14 @@ trait Helpers {
assert(false, "exception wasn't thrown")
}
+ def assertEqAst(tree: Tree, code: String) = assert(eqAst(tree, code))
+ def eqAst(tree: Tree, code: String) = tree ≈ parse(code)
+
+ val toolbox = currentMirror.mkToolBox()
+ val parse = toolbox.parse(_)
+ val compile = toolbox.compile(_)
+ val eval = toolbox.eval(_)
+
def fails(msg: String, block: String) = {
def result(ok: Boolean, description: String = "") = {
val status = if (ok) Prop.Proof else Prop.False
@@ -64,14 +75,12 @@ trait Helpers {
Prop { new Prop.Result(status, Nil, Set.empty, labels) }
}
try {
- val tb = rootMirror.mkToolBox()
- val tree = tb.parse(s"""
+ compile(parse(s"""
object Wrapper extends Helpers {
import scala.reflect.runtime.universe._
$block
}
- """)
- tb.compile(tree)
+ """))
result(false, "given code doesn't fail to typecheck")
} catch {
case ToolBoxError(emsg, _) =>
diff --git a/test/files/scalacheck/quasiquotes/TermConstructionProps.scala b/test/files/scalacheck/quasiquotes/TermConstructionProps.scala
index b14945f24b..c6cca85c81 100644
--- a/test/files/scalacheck/quasiquotes/TermConstructionProps.scala
+++ b/test/files/scalacheck/quasiquotes/TermConstructionProps.scala
@@ -7,16 +7,6 @@ import scala.reflect.runtime.universe._
import Flag._
object TermConstructionProps extends QuasiquoteProperties("term construction") {
- val anyRef = Select(Ident(TermName("scala")), TypeName("AnyRef"))
- val emtpyConstructor =
- DefDef(
- Modifiers(), nme.CONSTRUCTOR, List(),
- List(List()), TypeTree(), Block(List(Apply(Select(Super(This(tpnme.EMPTY), tpnme.EMPTY), nme.CONSTRUCTOR), List())), Literal(Constant(()))))
-
- def classWithMethods(name: TypeName, methods: List[DefDef] = Nil) =
- ClassDef(
- Modifiers(), name, List(),
- Template(List(anyRef), emptyValDef, List(emtpyConstructor) ++ methods))
property("splice single tree return tree itself") = forAll { (t: Tree) =>
q"$t" ≈ t
@@ -26,22 +16,6 @@ object TermConstructionProps extends QuasiquoteProperties("term construction") {
q"if($t1) $t2 else $t3" ≈ If(t1, t2, t3)
}
- property("splice term name into val") = forAll { (name: TermName) =>
- q"val $name = 0" ≈ ValDef(Modifiers(), name, TypeTree(), Literal(Constant(0)))
- }
-
- property("splice type name into typedef") = forAll { (name1: TypeName, name2: TypeName) =>
- q"type $name1 = $name2" ≈ TypeDef(Modifiers(), name1, List(), Ident(name2))
- }
-
- property("splice term name into class") = forAll { (name: TypeName) =>
- q"class $name" ≈ classWithMethods(name)
- }
-
- property("splice method into class") = forAll { (name: TypeName, method: DefDef) =>
- q"class $name { $method }" ≈ classWithMethods(name, List(method))
- }
-
property("splice trees into ascriptiopn") = forAll { (t1: Tree, t2: Tree) =>
q"$t1 : $t2" ≈ Typed(t1, t2)
}
@@ -69,19 +43,13 @@ object TermConstructionProps extends QuasiquoteProperties("term construction") {
}
property("splice trees into block") = forAll { (t1: Tree, t2: Tree, t3: Tree) =>
- q"""{
+ blockInvariant(q"""{
$t1
$t2
$t3
- }""" ≈ Block(List(t1, t2), t3)
+ }""", List(t1, t2, t3))
}
- property("splice type name into class parents") = forAll { (name: TypeName, parent: TypeName) =>
- q"class $name extends $parent" ≈
- ClassDef(
- Modifiers(), name, List(),
- Template(List(Ident(parent)), emptyValDef, List(emtpyConstructor)))
- }
property("splice tree into new") = forAll { (tree: Tree) =>
q"new $tree" ≈ Apply(Select(New(tree), nme.CONSTRUCTOR), List())
@@ -101,13 +69,6 @@ object TermConstructionProps extends QuasiquoteProperties("term construction") {
q"$fun($arg1, $arg2, ..$args)" ≈ Apply(fun, List(arg1) ++ List(arg2) ++ args)
}
- property("splice members into class") = forAll { (name: TypeName, defs: List[DefDef], extra: DefDef) =>
- q"""class $name {
- ..$defs
- $extra
- }""" ≈ classWithMethods(name, defs ++ List(extra))
- }
-
property("splice into new") = forAll { (name: TypeName, body: List[Tree]) =>
q"new $name { ..$body }" ≈
q"""{
@@ -118,11 +79,6 @@ object TermConstructionProps extends QuasiquoteProperties("term construction") {
}"""
}
-
- property("splice tree into singleton type tree") = forAll { (name: TypeName, t: Tree) =>
- q"type $name = $t.type" ≈ q"type $name = ${SingletonTypeTree(t)}"
- }
-
property("splice type name into this") = forAll { (T: TypeName) =>
q"$T.this" ≈ This(T)
}
@@ -132,66 +88,7 @@ object TermConstructionProps extends QuasiquoteProperties("term construction") {
}
property("splice trees into type apply") = forAll { (fun: TreeIsTerm, types: List[Tree]) =>
- q"$fun[..$types]" ≈ TypeApply(fun, types)
- }
-
- property("splice type names into type bounds") = forAll { (T1: TypeName, T2: TypeName, T3: TypeName) =>
- q"type $T1 >: $T2 <: $T3" ≈
- TypeDef(
- Modifiers(DEFERRED), T1, List(),
- TypeBoundsTree(Ident(T2), Ident(T3)))
- }
-
- property("splice trees names into type bounds") = forAll { (T: TypeName, t1: Tree, t2: Tree) =>
- q"type $T >: $t1 <: $t2" ≈
- TypeDef(
- Modifiers(DEFERRED), T, List(),
- TypeBoundsTree(t1, t2))
- }
-
- property("splice tparams into typedef (1)") = forAll { (T: TypeName, targs: List[TypeDef], t: Tree) =>
- q"type $T[..$targs] = $t" ≈ TypeDef(Modifiers(), T, targs, t)
- }
-
- property("splice tparams into typedef (2)") = forAll { (T: TypeName, targs1: List[TypeDef], targs2: List[TypeDef], t: Tree) =>
- q"type $T[..$targs1, ..$targs2] = $t" ≈ TypeDef(Modifiers(), T, targs1 ++ targs2, t)
- }
-
- property("splice tparams into typedef (3)") = forAll { (T: TypeName, targ: TypeDef, targs: List[TypeDef], t: Tree) =>
- q"type $T[$targ, ..$targs] = $t" ≈ TypeDef(Modifiers(), T, targ :: targs, t)
- }
-
- property("splice typename into typedef with default bounds") = forAll { (T1: TypeName, T2: TypeName, t: Tree) =>
- q"type $T1[$T2 >: Any <: Nothing] = $t" ≈
- TypeDef(
- Modifiers(), T1,
- List(TypeDef(
- Modifiers(PARAM), T2,
- List(),
- TypeBoundsTree(
- Ident(TypeName("Any")),
- Ident(TypeName("Nothing"))))),
- t)
- }
-
- property("splice type names into compound type tree") = forAll { (T: TypeName, A: TypeName, B: TypeName) =>
- q"type $T = $A with $B" ≈
- TypeDef(
- Modifiers(), T, List(),
- CompoundTypeTree(
- Template(List(Ident(A), Ident(B)), ValDef(Modifiers(PRIVATE), nme.WILDCARD, TypeTree(), EmptyTree), List())))
- }
-
- property("splice trees into existential type tree") = forAll {
- (T1: TypeName, T2: TypeName, X: TypeName, Lo: TypeName, Hi: TypeName) =>
-
- q"type $T1 = $T2[$X] forSome { type $X >: $Lo <: $Hi }" ≈
- TypeDef(
- Modifiers(), T1, List(),
- ExistentialTypeTree(
- AppliedTypeTree(Ident(T2), List(Ident(X))),
- List(
- TypeDef(Modifiers(DEFERRED), X, List(), TypeBoundsTree(Ident(Lo), Ident(Hi))))))
+ q"$fun[..$types]" ≈ (if (types.nonEmpty) TypeApply(fun, types) else fun)
}
property("splice names into import selector") = forAll {
@@ -223,95 +120,24 @@ object TermConstructionProps extends QuasiquoteProperties("term construction") {
CaseDef(Alternative(List(A, B)), EmptyTree, Literal(Constant(())))))
}
- property("splice into applied type tree") = forAll { (T1: TypeName, T2: TypeName, args: List[Tree]) =>
- q"type $T1 = $T2[..$args]" ≈
- TypeDef(
- Modifiers(), T1, List(),
- AppliedTypeTree(Ident(T2), args))
- }
+ def blockInvariant(quote: Tree, trees: List[Tree]) =
+ quote ≈ (trees match {
+ case Nil => q"()"
+ case _ :+ last if !last.isTerm => Block(trees, q"()")
+ case head :: Nil => head
+ case init :+ last => Block(init, last)
+ })
property("splice list of trees into block (1)") = forAll { (trees: List[Tree]) =>
- q"{ ..$trees }" ≈ (trees match {
- case Nil => Block(Nil, q"()")
- case _ => Block(trees.init, trees.last)
- })
+ blockInvariant(q"{ ..$trees }", trees)
}
property("splice list of trees into block (2)") = forAll { (trees1: List[Tree], trees2: List[Tree]) =>
- q"{ ..$trees1 ; ..$trees2 }" ≈ ((trees1 ++ trees2) match {
- case Nil => Block(Nil, Literal(Constant(())))
- case trees => Block(trees.init, trees.last)
- })
+ blockInvariant(q"{ ..$trees1 ; ..$trees2 }", trees1 ++ trees2)
}
property("splice list of trees into block (3)") = forAll { (trees: List[Tree], tree: Tree) =>
- q"{ ..$trees; $tree }" ≈ Block(trees, tree)
- }
-
- def assertSameAnnots(tree: {def mods: Modifiers}, annots: List[Tree]) =
- assert(tree.mods.annotations ≈ annots,
- s"${tree.mods.annotations} =/= ${annots}")
-
- def assertSameAnnots(tree1: {def mods: Modifiers}, tree2: {def mods: Modifiers}) =
- assert(tree1.mods.annotations ≈ tree2.mods.annotations,
- s"${tree1.mods.annotations} =/= ${tree2.mods.annotations}")
-
- property("splice type name into annotation") = test {
- val name = TypeName("annot")
- assertSameAnnots(q"@$name def foo", List(annot(name)))
- }
-
- property("splice ident into annotation") = test {
- val name = TypeName("annot")
- val ident = Ident(name)
- assertSameAnnots(q"@$ident def foo", List(annot(name)))
- }
-
- property("splice idents into annotation") = test {
- val idents = List(Ident(TypeName("annot1")), Ident(TypeName("annot2")))
- assertSameAnnots(q"@..$idents def foo",
- idents.map { ident => Apply(Select(New(ident), nme.CONSTRUCTOR), List()) })
- }
-
- property("splice constructor calls into annotation") = test {
- val ctorcalls = List(annot("a1"), annot("a2"))
- assertSameAnnots(q"@..$ctorcalls def foo", ctorcalls)
- }
-
- property("splice multiple annotations (1)") = test {
- val annot1 = annot("a1")
- val annot2 = annot("a2")
- val res = q"@$annot1 @$annot2 def foo"
- assertSameAnnots(res, List(annot1, annot2))
- }
-
- property("splice multiple annotations (2)") = test {
- val annot1 = annot("a1")
- val annots = List(annot("a2"), annot("a3"))
- val res = q"@$annot1 @..$annots def foo"
- assertSameAnnots(res, annot1 :: annots)
- }
-
- property("splice annotations with arguments (1)") = test {
- val a = annot("a", List(q"x"))
- assertSameAnnots(q"@$a def foo", q"@a(x) def foo")
- }
-
- property("splice annotations with arguments (2)") = test {
- val a = newTypeName("a")
- assertSameAnnots(q"@$a(x) def foo", q"@a(x) def foo")
- }
-
- property("splice annotations with arguments (3") = test {
- val a = Ident(newTypeName("a"))
- assertSameAnnots(q"@$a(x) def foo", q"@a(x) def foo")
- }
-
- property("can't splice annotations with arguments specificed twice") = test {
- val a = annot("a", List(q"x"))
- assertThrows[IllegalArgumentException] {
- q"@$a(y) def foo"
- }
+ blockInvariant(q"{ ..$trees; $tree }", trees :+ tree)
}
property("splice term into brackets") = test {
@@ -332,10 +158,13 @@ object TermConstructionProps extends QuasiquoteProperties("term construction") {
assert(q"(..$empty)" ≈ q"()")
}
- property("splice improper tree into annot") = test {
- val t = tq"Foo[Baz]"
- assertThrows[IllegalArgumentException] {
- q"@$t def foo"
- }
+ property("function param flags are the same") = test {
+ val xy = q"val x: A" :: q"val y: B" :: Nil
+ assertEqAst(q"(..$xy) => x + y", "(x: A, y: B) => x + y")
+ }
+
+ property("anonymous functions don't support default values") = test {
+ val x = q"val x: Int = 1"
+ assertThrows[IllegalArgumentException] { q"($x) => x" }
}
}
diff --git a/test/files/scalacheck/quasiquotes/TermDeconstructionProps.scala b/test/files/scalacheck/quasiquotes/TermDeconstructionProps.scala
index 114c9f112b..45c7ee4bb7 100644
--- a/test/files/scalacheck/quasiquotes/TermDeconstructionProps.scala
+++ b/test/files/scalacheck/quasiquotes/TermDeconstructionProps.scala
@@ -7,7 +7,6 @@ import scala.reflect.runtime.universe._
import Flag._
object TermDeconstructionProps extends QuasiquoteProperties("term deconstruction") {
-
property("f(..x) = f") = test {
assertThrows[MatchError] {
val q"f(..$argss)" = q"f"
@@ -44,56 +43,6 @@ object TermDeconstructionProps extends QuasiquoteProperties("term deconstruction
argss ≈ List()
}
- property("@$annot def foo") = forAll { (annotName: TypeName) =>
- val q"@$annot def foo" = q"@$annotName def foo"
- annot ≈ Apply(Select(New(Ident(annotName)), nme.CONSTRUCTOR), List())
- }
-
- property("@$annot(..$args) def foo") = forAll { (annotName: TypeName, tree: Tree) =>
- val q"@$annot(..$args) def foo" = q"@$annotName($tree) def foo"
- annot ≈ Ident(annotName) && args ≈ List(tree)
- }
-
- property("@..$annots def foo") = test {
- val a = annot("a")
- val b = annot("b")
- val q"@..$annots def foo" = q"@$a @$b def foo"
- annots ≈ List(a, b)
- }
-
- property("@$annot @..$annots def foo") = test {
- val a = annot("a")
- val b = annot("b")
- val c = annot("c")
- val q"@$first @..$rest def foo" = q"@$a @$b @$c def foo"
- first ≈ a && rest ≈ List(b, c)
- }
-
- property("class without params") = test {
- val q"class $name { ..$body }" = q"class Foo { def bar = 3 }"
- assert(body ≈ List(q"def bar = 3"))
- }
-
- property("class constructor") = test {
- val q"class $name(...$argss)" = q"class Foo(x: Int)(y: Int)"
- assert(argss.length == 2)
- }
-
- property("class parents") = test {
- val q"class $name extends ..$parents" = q"class Foo extends Bar with Blah"
- assert(parents ≈ List(tq"Bar", tq"Blah"))
- }
-
- property("class selfdef") = test {
- val q"class $name { $self => }" = q"class Foo { self: T => }"
- assert(self.name ≈ TermName("self") && self.tpt ≈ tq"T")
- }
-
- property("class tparams") = test {
- val q"class $name[..$tparams]" = q"class Foo[A, B]"
- assert(tparams.map { _.name } == List(TypeName("A"), TypeName("B")))
- }
-
property("deconstruct unit as tuple") = test {
val q"(..$xs)" = q"()"
assert(xs.isEmpty)
@@ -114,9 +63,32 @@ object TermDeconstructionProps extends QuasiquoteProperties("term deconstruction
x ≈ q"x" && cases ≈ List(cq"1 =>", cq"2 =>")
}
- property("deconstruct mods") = test {
- val mods = Modifiers(IMPLICIT | PRIVATE, TermName("foobar"), Nil)
- val q"$mods0 def foo" = q"$mods def foo"
- assert(mods0 ≈ mods)
+ property("deconstruct block") = test {
+ val q"{ ..$xs }" = q"{ x1; x2; x3 }"
+ assert(xs ≈ List(q"x1", q"x2", q"x3"))
+ }
+
+ property("exhaustive function matcher") = test {
+ def matches(line: String) { val q"(..$args) => $body" = parse(line) }
+ matches("() => bippy")
+ matches("(y: Y) => y oh y")
+ matches("(x: X, y: Y) => x and y")
}
-} \ No newline at end of file
+
+ property("exhaustive new pattern") = test {
+ def matches(line: String) {
+ val q"new { ..$early } with $name[..$targs](...$vargss) with ..$mixin { $self => ..$body }" = parse(line)
+ }
+ matches("new foo")
+ matches("new foo { body }")
+ matches("new foo[t]")
+ matches("new foo(x)")
+ matches("new foo[t](x)")
+ matches("new foo[t](x) { body }")
+ matches("new foo with bar")
+ matches("new foo with bar { body }")
+ matches("new { anonymous }")
+ matches("new { val early = 1} with Parent[Int] { body }")
+ matches("new Foo { selfie => }")
+ }
+}
diff --git a/test/files/scalacheck/quasiquotes/Test.scala b/test/files/scalacheck/quasiquotes/Test.scala
index 2387a9b008..05097711ef 100644
--- a/test/files/scalacheck/quasiquotes/Test.scala
+++ b/test/files/scalacheck/quasiquotes/Test.scala
@@ -9,4 +9,6 @@ object Test extends Properties("quasiquotes") {
include(PatternDeconstructionProps)
include(LiftableProps)
include(ErrorProps)
+ include(DefinitionConstructionProps)
+ include(DefinitionDeconstructionProps)
} \ No newline at end of file
diff --git a/test/files/scalacheck/quasiquotes/TypeConstructionProps.scala b/test/files/scalacheck/quasiquotes/TypeConstructionProps.scala
index 535ed8ecbf..cac83ff8ac 100644
--- a/test/files/scalacheck/quasiquotes/TypeConstructionProps.scala
+++ b/test/files/scalacheck/quasiquotes/TypeConstructionProps.scala
@@ -22,4 +22,15 @@ object TypeConstructionProps extends QuasiquoteProperties("type construction")
assert(tq"(..$ts)" ≈ tq"Tuple2[t1, t2]")
assert(tq"(t0, ..$ts)" ≈ tq"Tuple3[t0, t1, t2]")
}
+
+ property("refined type") = test {
+ val stats = q"def foo" :: q"val x: Int" :: q"type Y = String" :: Nil
+ assert(tq"T { ..$stats }" ≈ tq"T { def foo; val x: Int; type Y = String }")
+ }
+
+ property("function type") = test {
+ val argtpes = tq"A" :: tq"B" :: Nil
+ val restpe = tq"C"
+ assert(tq"..$argtpes => $restpe" ≈ tq"(A, B) => C")
+ }
} \ No newline at end of file
diff --git a/test/files/scalacheck/quasiquotes/TypeDeconstructionProps.scala b/test/files/scalacheck/quasiquotes/TypeDeconstructionProps.scala
index 6ab699d4f0..e1d5f4df96 100644
--- a/test/files/scalacheck/quasiquotes/TypeDeconstructionProps.scala
+++ b/test/files/scalacheck/quasiquotes/TypeDeconstructionProps.scala
@@ -19,11 +19,22 @@ object TypeDeconstructionProps extends QuasiquoteProperties("type deconstruction
}
property("tuple type") = test {
- val tq"(..$empty)" = tq"scala.Unit"
+ val tq"(..$empty)" = tq"_root_.scala.Unit"
assert(empty.isEmpty)
val tq"(..$ts)" = tq"(t1, t2)"
assert(ts ≈ List(tq"t1", tq"t2"))
val tq"($head, ..$tail)" = tq"(t0, t1, t2)"
assert(head ≈ tq"t0" && tail ≈ List(tq"t1", tq"t2"))
}
+
+ property("refined type") = test {
+ val tq"T { ..$stats }" = tq"T { def foo; val x: Int; type Y = String }"
+ assert(stats ≈ (q"def foo" :: q"val x: Int" :: q"type Y = String" :: Nil))
+ }
+
+ property("function type") = test {
+ val tq"..$argtpes => $restpe" = tq"(A, B) => C"
+ assert(argtpes ≈ (tq"A" :: tq"B" :: Nil))
+ assert(restpe ≈ tq"C")
+ }
} \ No newline at end of file
diff --git a/test/junit/scala/tools/nsc/util/StackTraceTest.scala b/test/junit/scala/tools/nsc/util/StackTraceTest.scala
new file mode 100644
index 0000000000..e7654244c5
--- /dev/null
+++ b/test/junit/scala/tools/nsc/util/StackTraceTest.scala
@@ -0,0 +1,159 @@
+
+package scala.tools.nsc.util
+
+import scala.language.reflectiveCalls
+import scala.util._
+import PartialFunction.cond
+import Properties.isJavaAtLeast
+
+import org.junit.Assert._
+import org.junit.Test
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+
+trait Expecting {
+ /*
+ import org.expecty.Expecty
+ final val expect = new Expecty
+ */
+}
+
+
+@RunWith(classOf[JUnit4])
+class StackTraceTest extends Expecting {
+ // formerly an enum
+ val CausedBy = "Caused by: "
+ val Suppressed = "Suppressed: "
+
+ // throws
+ def sample = throw new RuntimeException("Point of failure")
+ def sampler: String = sample
+
+ // repackage with message
+ def resample: String = try { sample } catch { case e: Throwable => throw new RuntimeException("resample", e) }
+ def resampler: String = resample
+
+ // simple wrapper
+ def wrapper: String = try { sample } catch { case e: Throwable => throw new RuntimeException(e) }
+ // another onion skin
+ def rewrapper: String = try { wrapper } catch { case e: Throwable => throw new RuntimeException(e) }
+ def rewrapperer: String = rewrapper
+
+ // only an insane wretch would do this
+ def insane: String = try { sample } catch {
+ case e: Throwable =>
+ val t = new RuntimeException(e)
+ e initCause t
+ throw t
+ }
+ def insaner: String = insane
+
+ /** Java 7 */
+ val suppressable = isJavaAtLeast("1.7")
+ type Suppressing = { def addSuppressed(t: Throwable): Unit }
+
+ def repressed: String = try { sample } catch {
+ case e: Throwable =>
+ val t = new RuntimeException("My problem")
+ if (suppressable) {
+ t.asInstanceOf[Suppressing] addSuppressed e
+ }
+ throw t
+ }
+ def represser: String = repressed
+
+ // evaluating s should throw, p trims stack trace, t is the test of resulting trace string
+ def probe(s: =>String)(p: StackTraceElement => Boolean)(t: String => Unit): Unit = {
+ Try(s) recover { case e => e stackTracePrefixString p } match {
+ case Success(s) => t(s)
+ case Failure(e) => throw e
+ }
+ }
+
+ @Test def showsAllTrace() {
+ probe(sampler)(_ => true) { s =>
+ val res = s.lines.toList
+ /*
+ expect {
+ res.length > 5 // many lines
+ // these expectations may be framework-specific
+ //s contains "sbt.TestFramework"
+ //res.last contains "java.lang.Thread"
+ }
+ */
+ assert (res.length > 5)
+ }
+ }
+ @Test def showsOnlyPrefix() = probe(sample)(_.getMethodName == "sample") { s =>
+ val res = s.lines.toList
+ /*
+ expect {
+ res.length == 3 // summary + one frame + elision
+ }
+ */
+ assert (res.length == 3)
+ }
+ @Test def showsCause() = probe(resampler)(_.getMethodName != "resampler") { s =>
+ val res = s.lines.toList
+ /*
+ expect {
+ res.length == 6 // summary + one frame + elision, caused by + one frame + elision
+ res exists (_ startsWith CausedBy.toString)
+ }
+ */
+ assert (res.length == 6)
+ assert (res exists (_ startsWith CausedBy.toString))
+ }
+ @Test def showsWrappedExceptions() = probe(rewrapperer)(_.getMethodName != "rewrapperer") { s =>
+ val res = s.lines.toList
+ /*
+ expect {
+ res.length == 9 // summary + one frame + elision times three
+ res exists (_ startsWith CausedBy.toString)
+ (res collect {
+ case s if s startsWith CausedBy.toString => s
+ }).size == 2
+ }
+ */
+ assert (res.length == 9)
+ assert (res exists (_ startsWith CausedBy.toString))
+ assert ((res collect {
+ case s if s startsWith CausedBy.toString => s
+ }).size == 2)
+ }
+ @Test def dontBlowOnCycle() = probe(insaner)(_.getMethodName != "insaner") { s =>
+ val res = s.lines.toList
+ /*
+ expect {
+ res.length == 7 // summary + one frame + elision times two with extra frame
+ res exists (_ startsWith CausedBy.toString)
+ }
+ */
+ assert (res.length == 7)
+ assert (res exists (_ startsWith CausedBy.toString))
+ }
+
+ /** Java 7, but shouldn't bomb on Java 6.
+ *
+java.lang.RuntimeException: My problem
+ at scala.tools.nsc.util.StackTraceTest.repressed(StackTraceTest.scala:56)
+ ... 27 elided
+ Suppressed: java.lang.RuntimeException: Point of failure
+ at scala.tools.nsc.util.StackTraceTest.sample(StackTraceTest.scala:29)
+ at scala.tools.nsc.util.StackTraceTest.repressed(StackTraceTest.scala:54)
+ ... 27 more
+ */
+ @Test def showsSuppressed() = probe(represser)(_.getMethodName != "represser") { s =>
+ val res = s.lines.toList
+ if (suppressable) {
+ assert (res.length == 7)
+ assert (res exists (_.trim startsWith Suppressed.toString))
+ }
+ /*
+ expect {
+ res.length == 7
+ res exists (_ startsWith " " + Suppressed.toString)
+ }
+ */
+ }
+}
diff --git a/test/pending/pos/t7778/Foo_1.java b/test/pending/pos/t7778/Foo_1.java
new file mode 100644
index 0000000000..65431ffd46
--- /dev/null
+++ b/test/pending/pos/t7778/Foo_1.java
@@ -0,0 +1,6 @@
+import java.util.concurrent.Callable;
+
+public abstract class Foo_1<T> implements Callable<Foo_1<Object>.Inner> {
+ public abstract class Inner {
+ }
+}
diff --git a/test/pending/pos/t7778/Test_2.scala b/test/pending/pos/t7778/Test_2.scala
new file mode 100644
index 0000000000..306303a99e
--- /dev/null
+++ b/test/pending/pos/t7778/Test_2.scala
@@ -0,0 +1,3 @@
+class Test {
+ null: Foo_1[_]
+}
diff --git a/test/pending/pos/treecheckers.flags b/test/pending/pos/treecheckers.flags
new file mode 100644
index 0000000000..5319681590
--- /dev/null
+++ b/test/pending/pos/treecheckers.flags
@@ -0,0 +1 @@
+-Ycheck:all \ No newline at end of file
diff --git a/test/pending/pos/treecheckers/c1.scala b/test/pending/pos/treecheckers/c1.scala
new file mode 100644
index 0000000000..b936839039
--- /dev/null
+++ b/test/pending/pos/treecheckers/c1.scala
@@ -0,0 +1,12 @@
+object Test1 {
+ def f[T](xs: Array[T]): Array[T] = xs match { case xs => xs }
+ // [check: patmat] The symbol, tpe or info of tree `(x) : Array[T]` refers to a out-of-scope symbol, type T. tree.symbol.ownerChain: value x
+ // [check: patmat] The symbol, tpe or info of tree `(x) : Array[T]` refers to a out-of-scope symbol, type T. tree.symbol.ownerChain: value x
+
+ def g[T](xs: Array[T]): Array[T] = {
+ val x1: Array[T] = xs
+ def case4() = matchEnd3(x1)
+ def matchEnd3(x: Array[T]) = x
+ case4()
+ }
+}
diff --git a/test/pending/pos/treecheckers/c2.scala b/test/pending/pos/treecheckers/c2.scala
new file mode 100644
index 0000000000..c893a5c922
--- /dev/null
+++ b/test/pending/pos/treecheckers/c2.scala
@@ -0,0 +1 @@
+class Test2(val valueVal: Int) extends AnyVal
diff --git a/test/pending/pos/treecheckers/c3.scala b/test/pending/pos/treecheckers/c3.scala
new file mode 100644
index 0000000000..e480bbfb08
--- /dev/null
+++ b/test/pending/pos/treecheckers/c3.scala
@@ -0,0 +1,8 @@
+import scala.collection.mutable.ArrayOps
+
+object Test3 {
+ implicit def genericArrayOps[T](xs: Array[T]): ArrayOps[T] = (xs match {
+ case x: Array[AnyRef] => refArrayOps[AnyRef](x)
+ case x: Array[Boolean] => booleanArrayOps(x)
+ }).asInstanceOf[ArrayOps[T]]
+}
diff --git a/test/pending/pos/treecheckers/c4.scala b/test/pending/pos/treecheckers/c4.scala
new file mode 100644
index 0000000000..2328131770
--- /dev/null
+++ b/test/pending/pos/treecheckers/c4.scala
@@ -0,0 +1,9 @@
+sealed trait Message[+A]
+class Script[A] extends Message[A] {
+ def iterator: Iterator[Message[A]] = ???
+}
+
+trait Test4[A] {
+ def f(cmd: Message[A]): Iterator[A] = cmd match { case s: Script[t] => s.iterator flatMap f }
+ def g(cmd: Message[A]) = cmd match { case s: Script[t] => s }
+}
diff --git a/test/pending/pos/treecheckers/c5.scala b/test/pending/pos/treecheckers/c5.scala
new file mode 100644
index 0000000000..43cbb65d74
--- /dev/null
+++ b/test/pending/pos/treecheckers/c5.scala
@@ -0,0 +1,3 @@
+trait Factory[CC[X] <: Traversable[X]]
+
+object Test5 extends Factory[Traversable]
diff --git a/test/pending/pos/treecheckers/c6.scala b/test/pending/pos/treecheckers/c6.scala
new file mode 100644
index 0000000000..8283655f3a
--- /dev/null
+++ b/test/pending/pos/treecheckers/c6.scala
@@ -0,0 +1,4 @@
+object Test6 {
+ import scala.reflect.ClassTag
+ def f[T: ClassTag] = implicitly[ClassTag[T]].runtimeClass match { case x => x }
+}
diff --git a/test/files/jvm/backendBugUnapply.check b/test/pending/run/t7733.check
index 9d1e7b29c2..19765bd501 100644
--- a/test/files/jvm/backendBugUnapply.check
+++ b/test/pending/run/t7733.check
@@ -1,2 +1 @@
-baz
null
diff --git a/test/pending/run/t7733/Separate_1.scala b/test/pending/run/t7733/Separate_1.scala
new file mode 100644
index 0000000000..a326ecd53e
--- /dev/null
+++ b/test/pending/run/t7733/Separate_1.scala
@@ -0,0 +1,5 @@
+package test
+
+class Separate {
+ for (i <- 1 to 10) println(i)
+} \ No newline at end of file
diff --git a/test/pending/run/t7733/Test_2.scala b/test/pending/run/t7733/Test_2.scala
new file mode 100644
index 0000000000..28358574ec
--- /dev/null
+++ b/test/pending/run/t7733/Test_2.scala
@@ -0,0 +1,9 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect.ToolBox
+
+object Test extends App {
+ val tb = cm.mkToolBox()
+ val code = tb.parse("{ val x: test.Separate$$anonfun$1 = null; x }")
+ println(tb.eval(code))
+} \ No newline at end of file
diff --git a/tools/binary-repo-lib.sh b/tools/binary-repo-lib.sh
index 704bf4944d..654ba21547 100755
--- a/tools/binary-repo-lib.sh
+++ b/tools/binary-repo-lib.sh
@@ -4,7 +4,7 @@
remote_urlget="http://repo.typesafe.com/typesafe/scala-sha-bootstrap/org/scala-lang/bootstrap"
-remote_urlpush="http://typesafe.artifactoryonline.com/typesafe/scala-sha-bootstrap/org/scala-lang/bootstrap"
+remote_urlpush="http://private-repo.typesafe.com/typesafe/scala-sha-bootstrap/org/scala-lang/bootstrap"
libraryJar="$(pwd)/lib/scala-library.jar"
desired_ext=".desired.sha1"
push_jar="$(pwd)/tools/push.jar"
diff --git a/versions.properties b/versions.properties
index 044c57bb0f..2b8c1de7b7 100644
--- a/versions.properties
+++ b/versions.properties
@@ -3,5 +3,5 @@ starr.version=2.11.0-M4
# the below is used for depending on dependencies like partest
scala.binary.version=2.11.0-M4
partest.version.number=1.0-RC4
-scala-xml.version.number=1.0-RC2
+scala-xml.version.number=1.0-RC3
scala-parser-combinators.version.number=1.0-RC1