aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--.travis.yml2
-rw-r--r--project/Build.scala10
-rw-r--r--project/plugins.sbt3
-rw-r--r--scalastyle-config.xml122
-rw-r--r--src/dotty/DottyPredef.scala2
-rw-r--r--src/dotty/Pair.scala2
-rw-r--r--src/dotty/Singleton.scala2
-rw-r--r--src/dotty/annotation/internal/Alias.scala2
-rw-r--r--src/dotty/annotation/internal/AnnotationDefault.scala2
-rw-r--r--src/dotty/annotation/internal/Child.scala2
-rw-r--r--src/dotty/annotation/internal/Repeated.scala2
-rw-r--r--src/dotty/language.scala2
-rw-r--r--src/dotty/runtime/Arrays.scala6
-rw-r--r--src/dotty/tools/backend/jvm/CollectEntryPoints.scala2
-rw-r--r--src/dotty/tools/backend/jvm/DottyBackendInterface.scala20
-rw-r--r--src/dotty/tools/backend/jvm/LabelDefs.scala12
-rw-r--r--[-rwxr-xr-x]src/dotty/tools/backend/jvm/scalaPrimitives.scala0
-rw-r--r--src/dotty/tools/dotc/CompilationUnit.scala2
-rw-r--r--src/dotty/tools/dotc/Compiler.scala2
-rw-r--r--src/dotty/tools/dotc/Run.scala2
-rw-r--r--src/dotty/tools/dotc/ast/Desugar.scala6
-rw-r--r--src/dotty/tools/dotc/ast/PluggableTransformers.scala2
-rw-r--r--src/dotty/tools/dotc/ast/Positioned.scala2
-rw-r--r--src/dotty/tools/dotc/ast/TreeInfo.scala4
-rw-r--r--src/dotty/tools/dotc/ast/Trees.scala12
-rw-r--r--src/dotty/tools/dotc/ast/tpd.scala18
-rw-r--r--src/dotty/tools/dotc/config/CompilerCommand.scala2
-rw-r--r--src/dotty/tools/dotc/config/Config.scala14
-rw-r--r--src/dotty/tools/dotc/config/Printers.scala2
-rw-r--r--src/dotty/tools/dotc/config/ScalaSettings.scala2
-rw-r--r--src/dotty/tools/dotc/config/Settings.scala2
-rw-r--r--src/dotty/tools/dotc/core/Annotations.scala2
-rw-r--r--src/dotty/tools/dotc/core/Constraint.scala54
-rw-r--r--src/dotty/tools/dotc/core/ConstraintHandling.scala38
-rw-r--r--src/dotty/tools/dotc/core/Contexts.scala6
-rw-r--r--src/dotty/tools/dotc/core/Definitions.scala6
-rw-r--r--src/dotty/tools/dotc/core/Denotations.scala6
-rw-r--r--src/dotty/tools/dotc/core/Flags.scala12
-rw-r--r--src/dotty/tools/dotc/core/NameOps.scala6
-rw-r--r--src/dotty/tools/dotc/core/OrderingConstraint.scala194
-rw-r--r--src/dotty/tools/dotc/core/Periods.scala2
-rw-r--r--src/dotty/tools/dotc/core/Phases.scala2
-rw-r--r--src/dotty/tools/dotc/core/Scopes.scala8
-rw-r--r--src/dotty/tools/dotc/core/Signature.scala2
-rw-r--r--src/dotty/tools/dotc/core/Skolemization.scala32
-rw-r--r--src/dotty/tools/dotc/core/StdNames.scala2
-rw-r--r--src/dotty/tools/dotc/core/Substituters.scala2
-rw-r--r--src/dotty/tools/dotc/core/SymDenotations.scala4
-rw-r--r--src/dotty/tools/dotc/core/SymbolLoaders.scala6
-rw-r--r--src/dotty/tools/dotc/core/Symbols.scala4
-rw-r--r--src/dotty/tools/dotc/core/TypeApplications.scala10
-rw-r--r--src/dotty/tools/dotc/core/TypeComparer.scala32
-rw-r--r--src/dotty/tools/dotc/core/TypeErasure.scala2
-rw-r--r--src/dotty/tools/dotc/core/TypeOps.scala4
-rw-r--r--src/dotty/tools/dotc/core/TyperState.scala2
-rw-r--r--src/dotty/tools/dotc/core/Types.scala60
-rw-r--r--src/dotty/tools/dotc/core/Uniques.scala2
-rw-r--r--src/dotty/tools/dotc/core/pickling/AbstractFileReader.scala6
-rw-r--r--src/dotty/tools/dotc/core/pickling/ByteCodecs.scala2
-rw-r--r--src/dotty/tools/dotc/core/pickling/ClassfileParser.scala18
-rw-r--r--src/dotty/tools/dotc/core/pickling/DottyUnpickler.scala10
-rw-r--r--src/dotty/tools/dotc/core/pickling/NameBuffer.scala20
-rw-r--r--src/dotty/tools/dotc/core/pickling/PickleBuffer.scala2
-rw-r--r--src/dotty/tools/dotc/core/pickling/PickleFormat.scala38
-rw-r--r--src/dotty/tools/dotc/core/pickling/PositionPickler.scala18
-rw-r--r--src/dotty/tools/dotc/core/pickling/PositionUnpickler.scala6
-rw-r--r--src/dotty/tools/dotc/core/pickling/TastyBuffer.scala54
-rw-r--r--src/dotty/tools/dotc/core/pickling/TastyName.scala10
-rw-r--r--src/dotty/tools/dotc/core/pickling/TastyPickler.scala14
-rw-r--r--src/dotty/tools/dotc/core/pickling/TastyPrinter.scala30
-rw-r--r--src/dotty/tools/dotc/core/pickling/TastyReader.scala54
-rw-r--r--src/dotty/tools/dotc/core/pickling/TastyUnpickler.scala32
-rw-r--r--src/dotty/tools/dotc/core/pickling/TreeBuffer.scala34
-rw-r--r--src/dotty/tools/dotc/core/pickling/TreePickler.scala92
-rw-r--r--src/dotty/tools/dotc/core/pickling/TreeUnpickler.scala184
-rw-r--r--src/dotty/tools/dotc/core/pickling/UnPickler.scala10
-rw-r--r--src/dotty/tools/dotc/parsing/JavaParsers.scala10
-rw-r--r--src/dotty/tools/dotc/parsing/MarkupParsers.scala8
-rw-r--r--src/dotty/tools/dotc/parsing/Parsers.scala8
-rw-r--r--src/dotty/tools/dotc/parsing/Scanners.scala4
-rw-r--r--src/dotty/tools/dotc/parsing/ScriptParsers.scala2
-rw-r--r--src/dotty/tools/dotc/parsing/Utility.scala5
-rw-r--r--src/dotty/tools/dotc/parsing/package.scala2
-rw-r--r--src/dotty/tools/dotc/printing/Disambiguation.scala2
-rw-r--r--src/dotty/tools/dotc/printing/PlainPrinter.scala14
-rw-r--r--src/dotty/tools/dotc/printing/RefinedPrinter.scala24
-rw-r--r--src/dotty/tools/dotc/printing/Texts.scala2
-rw-r--r--src/dotty/tools/dotc/reporting/Reporter.scala2
-rw-r--r--src/dotty/tools/dotc/transform/CapturedVars.scala2
-rw-r--r--src/dotty/tools/dotc/transform/CollectEntryPoints.scala2
-rw-r--r--src/dotty/tools/dotc/transform/Constructors.scala4
-rw-r--r--src/dotty/tools/dotc/transform/ElimByName.scala2
-rw-r--r--src/dotty/tools/dotc/transform/ElimRepeated.scala8
-rw-r--r--src/dotty/tools/dotc/transform/Erasure.scala14
-rw-r--r--src/dotty/tools/dotc/transform/ExplicitOuter.scala8
-rw-r--r--src/dotty/tools/dotc/transform/ExtensionMethods.scala8
-rw-r--r--src/dotty/tools/dotc/transform/FirstTransform.scala4
-rw-r--r--src/dotty/tools/dotc/transform/FullParameterization.scala4
-rw-r--r--src/dotty/tools/dotc/transform/LazyVals.scala6
-rw-r--r--src/dotty/tools/dotc/transform/Literalize.scala2
-rw-r--r--src/dotty/tools/dotc/transform/Memoize.scala2
-rw-r--r--src/dotty/tools/dotc/transform/NormalizeFlags.scala8
-rw-r--r--src/dotty/tools/dotc/transform/PatternMatcher.scala44
-rw-r--r--src/dotty/tools/dotc/transform/Pickler.scala22
-rw-r--r--src/dotty/tools/dotc/transform/ResolveSuper.scala2
-rw-r--r--src/dotty/tools/dotc/transform/RestoreScopes.scala4
-rw-r--r--src/dotty/tools/dotc/transform/Splitter.scala2
-rw-r--r--src/dotty/tools/dotc/transform/SuperAccessors.scala4
-rw-r--r--src/dotty/tools/dotc/transform/TailRec.scala2
-rw-r--r--src/dotty/tools/dotc/transform/TraitConstructors.scala2
-rw-r--r--src/dotty/tools/dotc/transform/TreeChecker.scala10
-rw-r--r--src/dotty/tools/dotc/transform/TreeGen.scala2
-rw-r--r--src/dotty/tools/dotc/transform/TreeTransform.scala2
-rw-r--r--src/dotty/tools/dotc/transform/TypeTestsCasts.scala2
-rw-r--r--src/dotty/tools/dotc/typer/Applications.scala2
-rw-r--r--src/dotty/tools/dotc/typer/Checking.scala2
-rw-r--r--src/dotty/tools/dotc/typer/ConstFold.scala8
-rw-r--r--src/dotty/tools/dotc/typer/ErrorReporting.scala2
-rw-r--r--src/dotty/tools/dotc/typer/EtaExpansion.scala2
-rw-r--r--src/dotty/tools/dotc/typer/FrontEnd.scala10
-rw-r--r--src/dotty/tools/dotc/typer/Implicits.scala4
-rw-r--r--src/dotty/tools/dotc/typer/ImportInfo.scala2
-rw-r--r--src/dotty/tools/dotc/typer/Inferencing.scala4
-rw-r--r--src/dotty/tools/dotc/typer/Mode.scala8
-rw-r--r--src/dotty/tools/dotc/typer/Namer.scala26
-rw-r--r--src/dotty/tools/dotc/typer/ProtoTypes.scala10
-rw-r--r--src/dotty/tools/dotc/typer/ReTyper.scala2
-rw-r--r--src/dotty/tools/dotc/typer/RefChecks.scala8
-rw-r--r--src/dotty/tools/dotc/typer/TypeAssigner.scala4
-rw-r--r--src/dotty/tools/dotc/typer/Typer.scala18
-rw-r--r--src/dotty/tools/dotc/typer/Variances.scala4
-rw-r--r--src/dotty/tools/dotc/util/Attachment.scala2
-rw-r--r--src/dotty/tools/dotc/util/DotClass.scala2
-rw-r--r--src/dotty/tools/dotc/util/NameTransformer.scala6
-rw-r--r--src/dotty/tools/dotc/util/Positions.scala6
-rw-r--r--src/dotty/tools/dotc/util/SixteenNibbles.scala2
-rw-r--r--src/dotty/tools/dotc/util/SourcePosition.scala2
-rw-r--r--src/dotty/tools/dotc/util/Stats.scala2
-rw-r--r--src/dotty/tools/dotc/util/Util.scala12
-rw-r--r--src/dotty/tools/dotc/util/common.scala2
-rw-r--r--src/dotty/tools/io/ClassPath.scala6
-rw-r--r--src/typedapply.scala2
-rw-r--r--test/dotc/tests.scala6
-rw-r--r--test/test/ContravariantTrees.scala2
-rw-r--r--test/test/DeSugarTest.scala4
-rw-r--r--test/test/ParserTest.scala2
-rw-r--r--test/test/ScannerTest.scala2
-rw-r--r--test/test/parseFile.scala4
-rw-r--r--test/test/showTree.scala6
-rw-r--r--test/x/PatMat.scala2
-rw-r--r--[-rwxr-xr-x]tests/disabled/java-interop/failing/t1459/App.scala0
-rw-r--r--tests/disabled/not-representable/MailBox.scala4
-rw-r--r--tests/disabled/not-representable/pos/annotated-treecopy/Impls_Macros_1.scala2
-rw-r--r--tests/disabled/structural-type/pos/t3363-old.scala14
-rw-r--r--tests/neg/assignments.scala8
-rw-r--r--tests/neg/patternUnsoundness.scala10
-rw-r--r--tests/neg/t1164.scala18
-rw-r--r--tests/neg/tailcall/t6574.scala2
-rw-r--r--tests/neg/templateParents.scala2
-rw-r--r--tests/neg/typedIdents.scala6
-rw-r--r--tests/neg/typedapply.scala12
-rw-r--r--tests/pending/pos/depmet_implicit_oopsla_session.scala2
-rw-r--r--tests/pending/pos/depmet_implicit_oopsla_session_2.scala2
-rw-r--r--tests/pending/pos/depmet_implicit_oopsla_session_simpler.scala2
-rw-r--r--tests/pending/pos/existentials-harmful.scala4
-rw-r--r--tests/pending/pos/lambdalift1.scala8
-rw-r--r--[-rwxr-xr-x]tests/pending/pos/t1756.scala14
-rw-r--r--[-rwxr-xr-x]tests/pending/pos/t2913.scala0
-rw-r--r--tests/pending/pos/t3480.scala2
-rw-r--r--[-rwxr-xr-x]tests/pending/pos/t3568.scala0
-rw-r--r--tests/pending/pos/t4176b.scala6
-rw-r--r--[-rwxr-xr-x]tests/pending/pos/t4553.scala0
-rw-r--r--tests/pending/pos/t4579.scala4
-rw-r--r--tests/pending/pos/t5012.scala2
-rw-r--r--tests/pending/pos/t5029.scala2
-rw-r--r--tests/pending/pos/t5119.scala4
-rw-r--r--tests/pending/pos/t5541.scala4
-rw-r--r--tests/pending/pos/t573.scala2
-rw-r--r--tests/pending/pos/t5967.scala8
-rw-r--r--tests/pending/pos/t6260a.scala2
-rw-r--r--tests/pending/pos/t6335.scala8
-rw-r--r--tests/pending/pos/t6966.scala14
-rw-r--r--tests/pending/pos/t7011.scala2
-rw-r--r--tests/pending/pos/t7022.scala8
-rw-r--r--tests/pending/pos/t796.scala2
-rw-r--r--tests/pending/pos/t7983.scala16
-rw-r--r--tests/pending/pos/t7987/Test_2.scala14
-rw-r--r--tests/pending/pos/t8023.scala2
-rw-r--r--tests/pending/pos/t8111.scala8
-rw-r--r--tests/pending/pos/t8301b.scala10
-rw-r--r--tests/pending/pos/t8367.scala4
-rw-r--r--tests/pending/pos/tcpoly_seq.scala2
-rw-r--r--tests/pending/pos/tcpoly_seq_typealias.scala2
-rw-r--r--tests/pending/pos/test5.scala22
-rw-r--r--tests/pending/pos/test5refine.scala30
-rw-r--r--tests/pending/pos/unapplySeq.scala6
-rw-r--r--tests/pickling/Coder.scala14
-rw-r--r--tests/pickling/Labels.scala12
-rw-r--r--tests/pickling/nameddefaults.scala16
-rw-r--r--tests/pickling/selftypes.scala18
-rw-r--r--tests/pickling/tryTyping.scala2
-rw-r--r--tests/pickling/varargs.scala2
-rw-r--r--tests/pos/Bridges.scala2
-rw-r--r--tests/pos/Coder.scala14
-rw-r--r--tests/pos/Labels.scala12
-rw-r--r--tests/pos/List1.scala6
-rw-r--r--tests/pos/Meter.scala18
-rw-r--r--tests/pos/SI-7638.scala20
-rw-r--r--tests/pos/SI-7638a.scala20
-rw-r--r--tests/pos/assignments.scala8
-rw-r--r--tests/pos/blockescapes.scala6
-rw-r--r--tests/pos/implicits1.scala2
-rw-r--r--tests/pos/inferred.scala2
-rw-r--r--[-rwxr-xr-x]tests/pos/java-interop/t2433/Test.scala0
-rw-r--r--tests/pos/java-interop/t2940/Error.scala2
-rw-r--r--[-rwxr-xr-x]tests/pos/java-interop/t2956/t2956.scala0
-rw-r--r--tests/pos/nameddefaults.scala16
-rw-r--r--tests/pos/packageobject.scala4
-rw-r--r--tests/pos/selftypes.scala18
-rw-r--r--tests/pos/sigs.scala26
-rw-r--r--tests/pos/t0031.scala12
-rw-r--r--tests/pos/t0039.scala4
-rw-r--r--tests/pos/t0061.scala2
-rw-r--r--tests/pos/t0066.scala2
-rw-r--r--tests/pos/t0770.scala8
-rw-r--r--tests/pos/t1085.scala2
-rw-r--r--tests/pos/t1133.scala4
-rw-r--r--tests/pos/t1168.scala2
-rw-r--r--[-rwxr-xr-x]tests/pos/t1722/Test.scala0
-rw-r--r--[-rwxr-xr-x]tests/pos/t1722/Top.scala0
-rw-r--r--[-rwxr-xr-x]tests/pos/t2060.scala0
-rw-r--r--[-rwxr-xr-x]tests/pos/t2082.scala2
-rw-r--r--[-rwxr-xr-x]tests/pos/t2179.scala0
-rw-r--r--tests/pos/t2208_pos.scala8
-rw-r--r--tests/pos/t2305.scala22
-rw-r--r--tests/pos/t2405.scala20
-rw-r--r--[-rwxr-xr-x]tests/pos/t2425.scala0
-rw-r--r--[-rwxr-xr-x]tests/pos/t2429.scala0
-rw-r--r--tests/pos/t2444.scala2
-rw-r--r--[-rwxr-xr-x]tests/pos/t2484.scala0
-rw-r--r--tests/pos/t2500.scala8
-rw-r--r--[-rwxr-xr-x]tests/pos/t2504.scala0
-rw-r--r--[-rwxr-xr-x]tests/pos/t2545.scala0
-rw-r--r--tests/pos/t2591.scala6
-rw-r--r--tests/pos/t262.scala2
-rw-r--r--[-rwxr-xr-x]tests/pos/t2635.scala0
-rw-r--r--[-rwxr-xr-x]tests/pos/t2683.scala0
-rw-r--r--[-rwxr-xr-x]tests/pos/t3174.scala0
-rw-r--r--tests/pos/t3278.scala36
-rw-r--r--tests/pos/tailcall/i321.scala2
-rw-r--r--tests/pos/tailcall/tailcall.scala2
-rw-r--r--tests/pos/templateParents.scala4
-rw-r--r--tests/pos/tryTyping.scala2
-rw-r--r--tests/pos/typedIdents.scala6
-rw-r--r--tests/pos/typedapply.scala2
-rw-r--r--tests/pos/varargs.scala2
-rw-r--r--tests/untried/neg-with-implicits/implicit-shadow.scala6
-rw-r--r--tests/untried/neg-with-implicits/implicits.scala2
-rw-r--r--tests/untried/neg-with-implicits/t2405.scala10
-rw-r--r--[-rwxr-xr-x]tests/untried/neg-with-implicits/t3006.scala0
-rw-r--r--[-rwxr-xr-x]tests/untried/neg-with-implicits/t3224.scala0
-rw-r--r--tests/untried/neg-with-implicits/t7519-b/Mac_1.scala2
-rw-r--r--tests/untried/neg/for-comprehension-old.scala16
-rw-r--r--tests/untried/neg/illegal-stmt-start.scala2
-rw-r--r--tests/untried/neg/literate_existentials.scala12
-rw-r--r--tests/untried/neg/lubs.scala2
-rw-r--r--tests/untried/neg/macro-invalidusage-presuper/Impls_1.scala2
-rw-r--r--tests/untried/neg/macro-invalidusage-presuper/Macros_Test_2.scala2
-rw-r--r--tests/untried/neg/names-defaults-neg.scala2
-rw-r--r--[-rwxr-xr-x]tests/untried/neg/override.scala0
-rw-r--r--tests/untried/neg/specification-scopes/P_2.scala16
-rw-r--r--tests/untried/neg/switch.scala2
-rw-r--r--tests/untried/neg/t0764.scala2
-rw-r--r--tests/untried/neg/t1181.scala2
-rw-r--r--tests/untried/neg/t1432.scala2
-rw-r--r--tests/untried/neg/t2066b.scala22
-rw-r--r--tests/untried/neg/t2208.scala8
-rw-r--r--tests/untried/neg/t2275a.scala2
-rw-r--r--[-rwxr-xr-x]tests/untried/neg/t2336.scala0
-rw-r--r--[-rwxr-xr-x]tests/untried/neg/t2494.scala0
-rw-r--r--[-rwxr-xr-x]tests/untried/neg/t2773.scala0
-rw-r--r--[-rwxr-xr-x]tests/untried/neg/t2779.scala0
-rw-r--r--[-rwxr-xr-x]tests/untried/neg/t2870.scala0
-rw-r--r--[-rwxr-xr-x]tests/untried/neg/t2918.scala0
-rw-r--r--tests/untried/neg/t2968.scala24
-rw-r--r--tests/untried/neg/t3189.scala2
-rw-r--r--tests/untried/neg/t3209.scala2
-rw-r--r--tests/untried/neg/t3913.scala2
-rw-r--r--tests/untried/neg/t4069.scala2
-rw-r--r--tests/untried/neg/t4460b.scala8
-rw-r--r--tests/untried/neg/t4584.scala2
-rw-r--r--tests/untried/neg/t4818.scala6
-rw-r--r--tests/untried/neg/t5702-neg-bad-xbrace.scala2
-rw-r--r--tests/untried/neg/t5702-neg-ugly-xbrace.scala2
-rw-r--r--tests/untried/neg/t576.scala2
-rw-r--r--tests/untried/neg/t5856.scala2
-rw-r--r--tests/untried/neg/t6214.scala2
-rw-r--r--tests/untried/neg/t6258.scala8
-rw-r--r--tests/untried/neg/t6558.scala2
-rw-r--r--[-rwxr-xr-x]tests/untried/neg/t771.scala0
-rw-r--r--tests/untried/neg/t7757b.scala2
-rw-r--r--tests/untried/neg/t7872.scala2
-rw-r--r--tests/untried/neg/t7872b.scala8
-rw-r--r--tests/untried/neg/t7872c.scala2
-rw-r--r--tests/untried/neg/t8158/Macros_1.scala2
-rw-r--r--tests/untried/neg/t8158/Test_2.scala2
-rw-r--r--tests/untried/neg/t856.scala2
-rw-r--r--tests/untried/neg/t963b.scala2
-rw-r--r--tests/untried/neg/unicode-unterminated-quote.scala2
-rw-r--r--tests/untried/neg/warn-unused-privates.scala2
-rw-r--r--tests/untried/neg/xmlcorner.scala2
-rw-r--r--tests/untried/neg/xmltruncated7.scala2
-rw-r--r--tests/untried/pos/FPTest.scala2
-rw-r--r--tests/untried/pos/SI-4012-a.scala2
-rw-r--r--tests/untried/pos/SI-4012-b.scala4
-rw-r--r--tests/untried/pos/ilya2/A.scala2
-rw-r--r--tests/untried/pos/iterator-traversable-mix.scala2
-rw-r--r--[-rwxr-xr-x]tests/untried/pos/lexical.scala0
-rw-r--r--tests/untried/pos/nested.scala6
-rw-r--r--[-rwxr-xr-x]tests/untried/pos/packageobjs.scala0
-rw-r--r--tests/untried/pos/pos-bug1210.scala10
-rw-r--r--tests/untried/pos/scoping2.scala4
-rw-r--r--tests/untried/pos/simple-exceptions.scala2
-rw-r--r--tests/untried/pos/spec-sparsearray-new.scala2
-rw-r--r--tests/untried/pos/spec-sparsearray-old.scala2
-rw-r--r--[-rwxr-xr-x]tests/untried/pos/spec-t6286.scala0
-rw-r--r--tests/untried/pos/sudoku.scala12
-rw-r--r--tests/untried/pos/t262.scala2
-rw-r--r--[-rwxr-xr-x]tests/untried/pos/t2635.scala0
-rw-r--r--[-rwxr-xr-x]tests/untried/pos/t2683.scala0
-rw-r--r--tests/untried/pos/t3136.scala4
-rw-r--r--[-rwxr-xr-x]tests/untried/pos/t3174b.scala0
-rw-r--r--tests/untried/pos/t3570.scala10
-rw-r--r--tests/untried/pos/t3578.scala20
-rw-r--r--tests/untried/pos/t3670.scala4
-rw-r--r--tests/untried/pos/t4220.scala2
-rw-r--r--tests/untried/pos/t443.scala14
-rw-r--r--tests/untried/pos/t4842.scala16
-rw-r--r--tests/untried/pos/t592.scala4
-rw-r--r--tests/untried/pos/t6028/t6028_1.scala2
-rw-r--r--tests/untried/pos/t6028/t6028_2.scala4
-rw-r--r--tests/untried/pos/t6601/UsePrivateValueClass_2.scala8
-rw-r--r--tests/untried/pos/t7532b/A_1.scala4
-rw-r--r--tests/untried/pos/t789.scala12
-rw-r--r--tests/untried/pos/t8046.scala6
-rw-r--r--tests/untried/pos/t8046b.scala4
-rw-r--r--tests/untried/pos/t8060.scala4
-rw-r--r--tests/untried/pos/t8170.scala2
-rw-r--r--tests/untried/pos/t8170b.scala4
-rw-r--r--tests/untried/pos/t8315.scala4
-rw-r--r--tests/untried/pos/t8363.scala2
-rw-r--r--tests/untried/pos/t8376/Test.scala2
-rw-r--r--tests/untried/pos/tcpoly_infer_easy.scala6
-rw-r--r--tests/untried/pos/test4refine.scala2
-rw-r--r--tests/untried/pos/unapplyComplex.scala12
-rw-r--r--tests/untried/pos/unapplyVal.scala2
356 files changed, 1541 insertions, 1417 deletions
diff --git a/.travis.yml b/.travis.yml
index 110cae400..c8061b4f8 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -1,6 +1,6 @@
language: scala
script:
- - sbt -Ddotty.travis.build=yes update compile test
+ - sbt -Ddotty.travis.build=yes update compile test scalastyle
jdk:
- oraclejdk8
notifications:
diff --git a/project/Build.scala b/project/Build.scala
index 8bba50e4a..e5c4a0699 100644
--- a/project/Build.scala
+++ b/project/Build.scala
@@ -27,7 +27,7 @@ object DottyBuild extends Build {
// to get Scala 2.11
resolvers += Resolver.sonatypeRepo("releases"),
-
+
// get reflect and xml onboard
libraryDependencies ++= Seq("org.scala-lang" % "scala-reflect" % scalaVersion.value,
"org.scala-lang.modules" %% "scala-xml" % "1.0.1",
@@ -59,7 +59,7 @@ object DottyBuild extends Build {
val path = for {
file <- attList.map(_.data)
path = file.getAbsolutePath
- } yield "-Xbootclasspath/p:" + path
+ } yield "-Xbootclasspath/p:" + path
// dotty itself needs to be in the bootclasspath
val fullpath = ("-Xbootclasspath/a:" + bin) :: path.toList
// System.err.println("BOOTPATH: " + fullpath)
@@ -109,8 +109,8 @@ object DottyBuild extends Build {
val path = for {
file <- attList.map(_.data)
path = file.getAbsolutePath
- prefix = if(path.endsWith(".jar")) "p" else "a"
- } yield "-Xbootclasspath/"+ prefix +":" + path
+ prefix = if (path.endsWith(".jar")) "p" else "a"
+ } yield "-Xbootclasspath/" + prefix + ":" + path
// dotty itself needs to be in the bootclasspath
val fullpath = ("-Xbootclasspath/a:" + bin) :: path.toList
// System.err.println("BOOTPATH: " + fullpath)
@@ -121,7 +121,7 @@ object DottyBuild extends Build {
else
List()
val res = agentOptions ::: travis_build ::: fullpath
- println("Running with javaOptions: " +res)
+ println("Running with javaOptions: " + res)
res
}
)
diff --git a/project/plugins.sbt b/project/plugins.sbt
index 654570000..47bc44dfe 100644
--- a/project/plugins.sbt
+++ b/project/plugins.sbt
@@ -4,3 +4,6 @@
// Scala IDE project file generator
addSbtPlugin("com.typesafe.sbteclipse" % "sbteclipse-plugin" % "2.2.0")
+
+addSbtPlugin("org.scalastyle" %% "scalastyle-sbt-plugin" % "0.6.0")
+
diff --git a/scalastyle-config.xml b/scalastyle-config.xml
new file mode 100644
index 000000000..2b2f8c8f2
--- /dev/null
+++ b/scalastyle-config.xml
@@ -0,0 +1,122 @@
+<scalastyle>
+ <name>Scalastyle standard configuration</name>
+ <check level="warning" class="org.scalastyle.file.FileTabChecker" enabled="true"></check>
+ <check level="warning" class="org.scalastyle.file.FileLengthChecker" enabled="false">
+ <parameters>
+ <parameter name="maxFileLength"><![CDATA[800]]></parameter>
+ </parameters>
+ </check>
+ <check level="warning" class="org.scalastyle.file.HeaderMatchesChecker" enabled="false">
+ <parameters>
+ <parameter name="header"><![CDATA[// Copyright (C) 2011-2012 the original author or authors.
+// See the LICENCE.txt file distributed with this work for additional
+// information regarding copyright ownership.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.]]></parameter>
+ </parameters>
+ </check>
+ <check level="warning" class="org.scalastyle.scalariform.EnsureSingleSpaceAfterTokenChecker" enabled="false">
+ <parameters>
+ <parameter name="tokens">IF</parameter>
+ </parameters>
+ </check>
+ <check level="warning" class="org.scalastyle.scalariform.SpacesAfterPlusChecker" enabled="false"></check>
+ <check level="warning" class="org.scalastyle.file.WhitespaceEndOfLineChecker" enabled="true"></check>
+ <check level="warning" class="org.scalastyle.scalariform.SpacesBeforePlusChecker" enabled="true"></check>
+ <check level="warning" class="org.scalastyle.file.FileLineLengthChecker" enabled="false">
+ <parameters>
+ <parameter name="maxLineLength"><![CDATA[160]]></parameter>
+ <parameter name="tabSize"><![CDATA[4]]></parameter>
+ </parameters>
+ </check>
+ <check level="warning" class="org.scalastyle.scalariform.ClassNamesChecker" enabled="true">
+ <parameters>
+ <parameter name="regex"><![CDATA[[A-Z][A-Za-z]*]]></parameter>
+ </parameters>
+ </check>
+ <check level="warning" class="org.scalastyle.scalariform.ObjectNamesChecker" enabled="true">
+ <parameters>
+ <parameter name="regex"><![CDATA[[A-Z]?[A-Za-z]*]]></parameter>
+ </parameters>
+ </check>
+ <check level="warning" class="org.scalastyle.scalariform.PackageObjectNamesChecker" enabled="true">
+ <parameters>
+ <parameter name="regex"><![CDATA[^[a-z][A-Za-z]*$]]></parameter>
+ </parameters>
+ </check>
+ <check level="warning" class="org.scalastyle.scalariform.EqualsHashCodeChecker" enabled="false"></check>
+ <check level="warning" class="org.scalastyle.scalariform.IllegalImportsChecker" enabled="true">
+ <parameters>
+ <parameter name="illegalImports"><![CDATA[sun._,java.awt._]]></parameter>
+ </parameters>
+ </check>
+ <check level="warning" class="org.scalastyle.scalariform.ParameterNumberChecker" enabled="false">
+ <parameters>
+ <parameter name="maxParameters"><![CDATA[8]]></parameter>
+ </parameters>
+ </check>
+ <check level="warning" class="org.scalastyle.scalariform.MagicNumberChecker" enabled="false">
+ <parameters>
+ <parameter name="ignore"><![CDATA[-1,0,1,2,3]]></parameter>
+ </parameters>
+ </check>
+ <check level="warning" class="org.scalastyle.scalariform.NoWhitespaceBeforeLeftBracketChecker" enabled="true"></check>
+ <check level="warning" class="org.scalastyle.scalariform.NoWhitespaceAfterLeftBracketChecker" enabled="true"></check>
+ <check level="warning" class="org.scalastyle.scalariform.ReturnChecker" enabled="false"></check>
+ <check level="warning" class="org.scalastyle.scalariform.NullChecker" enabled="false"></check>
+ <check level="warning" class="org.scalastyle.scalariform.NoCloneChecker" enabled="false"></check>
+ <check level="warning" class="org.scalastyle.scalariform.NoFinalizeChecker" enabled="true"></check>
+ <check level="warning" class="org.scalastyle.scalariform.CovariantEqualsChecker" enabled="true"></check>
+ <check level="warning" class="org.scalastyle.scalariform.StructuralTypeChecker" enabled="false"></check>
+ <check level="warning" class="org.scalastyle.file.RegexChecker" enabled="true">
+ <parameters>
+ <parameter name="regex"><![CDATA[if\(]]></parameter>
+ </parameters>
+ </check>
+ <check level="warning" class="org.scalastyle.scalariform.NumberOfTypesChecker" enabled="false">
+ <parameters>
+ <parameter name="maxTypes"><![CDATA[30]]></parameter>
+ </parameters>
+ </check>
+ <check level="warning" class="org.scalastyle.scalariform.CyclomaticComplexityChecker" enabled="false">
+ <parameters>
+ <parameter name="maximum"><![CDATA[10]]></parameter>
+ </parameters>
+ </check>
+ <check level="warning" class="org.scalastyle.scalariform.UppercaseLChecker" enabled="true"></check>
+ <check level="warning" class="org.scalastyle.scalariform.SimplifyBooleanExpressionChecker" enabled="false"></check>
+ <check level="warning" class="org.scalastyle.scalariform.IfBraceChecker" enabled="false">
+ <parameters>
+ <parameter name="singleLineAllowed"><![CDATA[true]]></parameter>
+ <parameter name="doubleLineAllowed"><![CDATA[true]]></parameter>
+ </parameters>
+ </check>
+ <check level="warning" class="org.scalastyle.scalariform.MethodLengthChecker" enabled="false">
+ <parameters>
+ <parameter name="maxLength"><![CDATA[50]]></parameter>
+ </parameters>
+ </check>
+ <check level="warning" class="org.scalastyle.scalariform.MethodNamesChecker" enabled="false">
+ <parameters>
+ <parameter name="regex"><![CDATA[^[a-z][A-Za-z0-9]*(_=)?$]]></parameter>
+ </parameters>
+ </check>
+ <check level="warning" class="org.scalastyle.scalariform.NumberOfMethodsInTypeChecker" enabled="false">
+ <parameters>
+ <parameter name="maxMethods"><![CDATA[30]]></parameter>
+ </parameters>
+ </check>
+ <check level="warning" class="org.scalastyle.scalariform.PublicMethodsHaveTypeChecker" enabled="false"></check>
+ <check level="warning" class="org.scalastyle.file.NewLineAtEofChecker" enabled="true"></check>
+ <check level="warning" class="org.scalastyle.file.NoNewLineAtEofChecker" enabled="false"></check>
+</scalastyle> \ No newline at end of file
diff --git a/src/dotty/DottyPredef.scala b/src/dotty/DottyPredef.scala
index 3ffa4049f..b4580b6a3 100644
--- a/src/dotty/DottyPredef.scala
+++ b/src/dotty/DottyPredef.scala
@@ -7,4 +7,4 @@ object DottyPredef {
/** implicits for ClassTag and TypeTag. Should be implemented with macros */
implicit def classTag[T]: ClassTag[T] = ???
implicit def typeTag[T]: TypeTag[T] = ???
-} \ No newline at end of file
+}
diff --git a/src/dotty/Pair.scala b/src/dotty/Pair.scala
index 73696b69c..2322fe169 100644
--- a/src/dotty/Pair.scala
+++ b/src/dotty/Pair.scala
@@ -2,4 +2,4 @@ package dotty
class Pair[T, U](x: T, y: U) {
-} \ No newline at end of file
+}
diff --git a/src/dotty/Singleton.scala b/src/dotty/Singleton.scala
index 06f975b01..4ba57a12d 100644
--- a/src/dotty/Singleton.scala
+++ b/src/dotty/Singleton.scala
@@ -2,4 +2,4 @@ package dotty
class Singleton {
-} \ No newline at end of file
+}
diff --git a/src/dotty/annotation/internal/Alias.scala b/src/dotty/annotation/internal/Alias.scala
index 44695c7dd..de51153f1 100644
--- a/src/dotty/annotation/internal/Alias.scala
+++ b/src/dotty/annotation/internal/Alias.scala
@@ -4,4 +4,4 @@ import scala.annotation.Annotation
class Alias(aliased: Any) extends Annotation {
-} \ No newline at end of file
+}
diff --git a/src/dotty/annotation/internal/AnnotationDefault.scala b/src/dotty/annotation/internal/AnnotationDefault.scala
index 1405c94bd..90471b8fe 100644
--- a/src/dotty/annotation/internal/AnnotationDefault.scala
+++ b/src/dotty/annotation/internal/AnnotationDefault.scala
@@ -4,4 +4,4 @@ import scala.annotation.Annotation
class AnnotationDefault extends Annotation {
-} \ No newline at end of file
+}
diff --git a/src/dotty/annotation/internal/Child.scala b/src/dotty/annotation/internal/Child.scala
index 9c2f83d7b..ac253ed8d 100644
--- a/src/dotty/annotation/internal/Child.scala
+++ b/src/dotty/annotation/internal/Child.scala
@@ -4,4 +4,4 @@ import scala.annotation.Annotation
class Child[T] extends Annotation {
-} \ No newline at end of file
+}
diff --git a/src/dotty/annotation/internal/Repeated.scala b/src/dotty/annotation/internal/Repeated.scala
index 94e9df858..3065b7a06 100644
--- a/src/dotty/annotation/internal/Repeated.scala
+++ b/src/dotty/annotation/internal/Repeated.scala
@@ -2,4 +2,4 @@ package dotty.annotation.internal
import scala.annotation.Annotation
-final class Repeated() extends Annotation \ No newline at end of file
+final class Repeated() extends Annotation
diff --git a/src/dotty/language.scala b/src/dotty/language.scala
index 169b2604c..96250a9f2 100644
--- a/src/dotty/language.scala
+++ b/src/dotty/language.scala
@@ -13,4 +13,4 @@ object language {
/** No auto tupling */
val noAutoTupling = new Feature
-} \ No newline at end of file
+}
diff --git a/src/dotty/runtime/Arrays.scala b/src/dotty/runtime/Arrays.scala
index 5767991e5..1fb4fe5eb 100644
--- a/src/dotty/runtime/Arrays.scala
+++ b/src/dotty/runtime/Arrays.scala
@@ -12,14 +12,14 @@ object Arrays {
*/
def newGenericArray[T](length: Int)(implicit tag: ClassTag[T]): Array[T] =
tag.newArray(length)
-
+
/** Convert a sequence to a Java array with element type given by `clazz`. */
def seqToArray[T](xs: Seq[T], clazz: Class[_]): Array[T] = {
val arr = java.lang.reflect.Array.newInstance(clazz, xs.length).asInstanceOf[Array[T]]
xs.copyToArray(arr)
arr
}
-
+
/** Create an array of type T. T must be of form Array[E], with
* E being a reference type.
*/
@@ -51,4 +51,4 @@ object Arrays {
/** Create a scala.runtime.BoxedUnit[] array */
def newUnitArray(length: Int): Array[Unit] = ???
-} \ No newline at end of file
+}
diff --git a/src/dotty/tools/backend/jvm/CollectEntryPoints.scala b/src/dotty/tools/backend/jvm/CollectEntryPoints.scala
index 734890daa..513144bd6 100644
--- a/src/dotty/tools/backend/jvm/CollectEntryPoints.scala
+++ b/src/dotty/tools/backend/jvm/CollectEntryPoints.scala
@@ -40,7 +40,7 @@ class CollectEntryPoints extends MiniPhaseTransform {
def phaseName: String = "Collect entry points"
override def transformDefDef(tree: tpd.DefDef)(implicit ctx: Context, info: TransformerInfo): tpd.Tree = {
- if((tree.symbol ne NoSymbol) && CollectEntryPoints.isJavaEntyPoint(tree.symbol)) {
+ if ((tree.symbol ne NoSymbol) && CollectEntryPoints.isJavaEntyPoint(tree.symbol)) {
ctx.genBCodePhase.asInstanceOf[GenBCode].registerEntryPoint(tree.symbol)
}
tree
diff --git a/src/dotty/tools/backend/jvm/DottyBackendInterface.scala b/src/dotty/tools/backend/jvm/DottyBackendInterface.scala
index 8cc3c34e5..e9c8dbc80 100644
--- a/src/dotty/tools/backend/jvm/DottyBackendInterface.scala
+++ b/src/dotty/tools/backend/jvm/DottyBackendInterface.scala
@@ -376,7 +376,7 @@ class DottyBackendInterface()(implicit ctx: Context) extends BackendInterface{
def shouldEmitJumpAfterLabels = true
def dumpClasses: Option[String] =
- if(ctx.settings.Ydumpclasses.isDefault) None
+ if (ctx.settings.Ydumpclasses.isDefault) None
else Some(ctx.settings.Ydumpclasses.value)
def mainClass: Option[String] =
@@ -423,7 +423,7 @@ class DottyBackendInterface()(implicit ctx: Context) extends BackendInterface{
case TermRef(prefix: ThisType, name) =>
Some(tpd.This(prefix.cls).select(i.symbol))
case TermRef(NoPrefix, name) =>
- if(i.symbol is Flags.Method) Some(This(i.symbol.enclosingClass).select(i.symbol)) // workaround #342 todo: remove after fixed
+ if (i.symbol is Flags.Method) Some(This(i.symbol.enclosingClass).select(i.symbol)) // workaround #342 todo: remove after fixed
else None
case _ => None
}
@@ -663,7 +663,7 @@ class DottyBackendInterface()(implicit ctx: Context) extends BackendInterface{
def companionSymbol: Symbol = if (sym is Flags.Module) companionClass else companionModule
def moduleClass: Symbol = toDenot(sym).moduleClass
def enclosingClassSym: Symbol = {
- if(this.isClass) {
+ if (this.isClass) {
val ct = ctx.withPhase(ctx.flattenPhase.prev)
toDenot(sym)(ct).owner.enclosingClass(ct)
}
@@ -792,7 +792,7 @@ class DottyBackendInterface()(implicit ctx: Context) extends BackendInterface{
t.info match {
case _ =>
- if(!t.symbol.isClass) nonClassTypeRefToBType(t.symbol) // See comment on nonClassTypeRefToBType
+ if (!t.symbol.isClass) nonClassTypeRefToBType(t.symbol) // See comment on nonClassTypeRefToBType
else primitiveOrClassToBType(t.symbol) // Common reference to a type such as scala.Int or java.lang.String
}
case Types.ClassInfo(_, sym, _, _, _) => primitiveOrClassToBType(sym) // We get here, for example, for genLoadModule, which invokes toTypeKind(moduleClassSymbol.info)
@@ -942,7 +942,7 @@ class DottyBackendInterface()(implicit ctx: Context) extends BackendInterface{
def _3: Tree = field.rhs
override def unapply(s: LabelDef): DottyBackendInterface.this.LabelDef.type = {
- if(s.symbol is Flags.Label) this.field = s
+ if (s.symbol is Flags.Label) this.field = s
else this.field = null
this
}
@@ -1021,16 +1021,16 @@ class DottyBackendInterface()(implicit ctx: Context) extends BackendInterface{
def _2 = field.meth
def _3 = {
val t = field.tpt.tpe.typeSymbol
- if(t.exists) t
+ if (t.exists) t
else {
val arity = field.meth.tpe.widenDealias.paramTypes.size - _1.size
val returnsUnit = field.meth.tpe.widenDealias.resultType.classSymbol == UnitClass
- if(returnsUnit)
- ctx.requiredClass(("scala.compat.java8.JProcedure"+arity).toTermName)
- else ctx.requiredClass(("scala.compat.java8.JFunction"+arity).toTermName)
+ if (returnsUnit)
+ ctx.requiredClass(("scala.compat.java8.JProcedure" + arity).toTermName)
+ else ctx.requiredClass(("scala.compat.java8.JFunction" + arity).toTermName)
}
}
}
def currentUnit = ctx.compilationUnit
-} \ No newline at end of file
+}
diff --git a/src/dotty/tools/backend/jvm/LabelDefs.scala b/src/dotty/tools/backend/jvm/LabelDefs.scala
index 769dcdc36..8f9e3bdcc 100644
--- a/src/dotty/tools/backend/jvm/LabelDefs.scala
+++ b/src/dotty/tools/backend/jvm/LabelDefs.scala
@@ -44,7 +44,7 @@ import StdNames.nme
*
* <label> def foo(i: Int) = {
* <label> def bar = 0
- * <label> def dough(i: Int) = if(i == 0) bar else foo(i-1)
+ * <label> def dough(i: Int) = if (i == 0) bar else foo(i-1)
* dough(i)
* }
*
@@ -54,7 +54,7 @@ import StdNames.nme
*
* \
* <label> def foo(i: Int) = dough(i)
- * <label> def dough(i: Int) = if(i == 0) bar else foo(i-1)
+ * <label> def dough(i: Int) = if (i == 0) bar else foo(i-1)
* <label> def bar = 2
* foo(100)
*
@@ -64,7 +64,7 @@ import StdNames.nme
* <jump foo>
* <label> def foo(i: Int) = dough(i)
* // <jump a> // unreachable
- * <label> def dough(i: Int) = if(i == 0) bar else foo(i-1)
+ * <label> def dough(i: Int) = if (i == 0) bar else foo(i-1)
* // <jump a> // unreachable
* <label> def bar = 2
* // <jump a> // unreachable
@@ -107,7 +107,7 @@ class LabelDefs extends MiniPhaseTransform {
labelLevel = labelLevel + 1
val r = Block(moveLabels(t), t)
labelLevel = labelLevel - 1
- if(labelLevel == 0) beingAppended.clear()
+ if (labelLevel == 0) beingAppended.clear()
r
case _ => if (entryPoints.nonEmpty && labelDefs.nonEmpty) super.transform(tree) else tree
}
@@ -206,14 +206,14 @@ class LabelDefs extends MiniPhaseTransform {
labelCalls(r.symbol) = parentLabelCalls
parentLabelCalls = st
- if(shouldMoveLabel) {
+ if (shouldMoveLabel) {
labelDefs(r.symbol) = r
EmptyTree
} else r
case t: Apply if t.symbol is Flags.Label =>
val sym = t.symbol
parentLabelCalls = parentLabelCalls + t
- if(owner != sym) callCounts(sym) = callCounts(sym) + 1
+ if (owner != sym) callCounts(sym) = callCounts(sym) + 1
super.transform(tree)
case _ =>
super.transform(tree)
diff --git a/src/dotty/tools/backend/jvm/scalaPrimitives.scala b/src/dotty/tools/backend/jvm/scalaPrimitives.scala
index 857a92d21..857a92d21 100755..100644
--- a/src/dotty/tools/backend/jvm/scalaPrimitives.scala
+++ b/src/dotty/tools/backend/jvm/scalaPrimitives.scala
diff --git a/src/dotty/tools/dotc/CompilationUnit.scala b/src/dotty/tools/dotc/CompilationUnit.scala
index de51a84cf..60e16ec3e 100644
--- a/src/dotty/tools/dotc/CompilationUnit.scala
+++ b/src/dotty/tools/dotc/CompilationUnit.scala
@@ -38,4 +38,4 @@ class CompilationUnit(val source: SourceFile) {
* so one can reliably use this function only dirrectly after `pickler`
*/
var addrOfSym: Symbol => Option[Addr] = (_ => None)
-} \ No newline at end of file
+}
diff --git a/src/dotty/tools/dotc/Compiler.scala b/src/dotty/tools/dotc/Compiler.scala
index 40f354c7c..25a4c578b 100644
--- a/src/dotty/tools/dotc/Compiler.scala
+++ b/src/dotty/tools/dotc/Compiler.scala
@@ -106,4 +106,4 @@ class Compiler {
ctx.runInfo.clear()
new Run(this)(rootContext)
}
-} \ No newline at end of file
+}
diff --git a/src/dotty/tools/dotc/Run.scala b/src/dotty/tools/dotc/Run.scala
index a8cc01fc8..af9c878f0 100644
--- a/src/dotty/tools/dotc/Run.scala
+++ b/src/dotty/tools/dotc/Run.scala
@@ -81,4 +81,4 @@ class Run(comp: Compiler)(implicit ctx: Context) {
r.printSummary
r
}
-} \ No newline at end of file
+}
diff --git a/src/dotty/tools/dotc/ast/Desugar.scala b/src/dotty/tools/dotc/ast/Desugar.scala
index 1dfa24291..165f4f535 100644
--- a/src/dotty/tools/dotc/ast/Desugar.scala
+++ b/src/dotty/tools/dotc/ast/Desugar.scala
@@ -342,9 +342,9 @@ object desugar {
val companions =
if (mods is Case) {
val parent =
- if (constrTparams.nonEmpty ||
- constrVparamss.length > 1 ||
- mods.is(Abstract) ||
+ if (constrTparams.nonEmpty ||
+ constrVparamss.length > 1 ||
+ mods.is(Abstract) ||
constr.mods.is(Private)) anyRef
// todo: also use anyRef if constructor has a dependent method type (or rule that out)!
else (constrVparamss :\ classTypeRef) ((vparams, restpe) => Function(vparams map (_.tpt), restpe))
diff --git a/src/dotty/tools/dotc/ast/PluggableTransformers.scala b/src/dotty/tools/dotc/ast/PluggableTransformers.scala
index 84bbf833c..a584230a2 100644
--- a/src/dotty/tools/dotc/ast/PluggableTransformers.scala
+++ b/src/dotty/tools/dotc/ast/PluggableTransformers.scala
@@ -102,4 +102,4 @@ class ExampleTransformer extends PluggableTransformer[Type] {
override def transform(tree: tpd.Tree, ctx: Context) =
super.transform(tree, ctx)
*/
-} \ No newline at end of file
+}
diff --git a/src/dotty/tools/dotc/ast/Positioned.scala b/src/dotty/tools/dotc/ast/Positioned.scala
index be3f904a9..e0bd6c75a 100644
--- a/src/dotty/tools/dotc/ast/Positioned.scala
+++ b/src/dotty/tools/dotc/ast/Positioned.scala
@@ -136,4 +136,4 @@ abstract class Positioned extends DotClass with Product {
if (cpath.nonEmpty) this :: cpath else Nil
} else Nil
}
-} \ No newline at end of file
+}
diff --git a/src/dotty/tools/dotc/ast/TreeInfo.scala b/src/dotty/tools/dotc/ast/TreeInfo.scala
index 1bb0e0f4f..a7f89337c 100644
--- a/src/dotty/tools/dotc/ast/TreeInfo.scala
+++ b/src/dotty/tools/dotc/ast/TreeInfo.scala
@@ -242,9 +242,9 @@ trait TreeInfo[T >: Untyped <: Type] { self: Trees.Instance[T] =>
* is an abstract typoe declaration
*/
def lacksDefinition(mdef: MemberDef)(implicit ctx: Context) = mdef match {
- case mdef: ValOrDefDef =>
+ case mdef: ValOrDefDef =>
mdef.unforcedRhs == EmptyTree && !mdef.name.isConstructorName && !mdef.mods.is(ParamAccessor)
- case mdef: TypeDef =>
+ case mdef: TypeDef =>
mdef.rhs.isEmpty || mdef.rhs.isInstanceOf[TypeBoundsTree]
case _ => false
}
diff --git a/src/dotty/tools/dotc/ast/Trees.scala b/src/dotty/tools/dotc/ast/Trees.scala
index a70869e22..201c29fe1 100644
--- a/src/dotty/tools/dotc/ast/Trees.scala
+++ b/src/dotty/tools/dotc/ast/Trees.scala
@@ -76,7 +76,7 @@ object Trees {
}
private var nextId = 0 // for debugging
-
+
type LazyTree = AnyRef /* really: Tree | Lazy[Tree] */
type LazyTreeList = AnyRef /* really: List[Tree] | Lazy[List[Tree]] */
@@ -632,7 +632,7 @@ object Trees {
}
/** mods def name[tparams](vparams_1)...(vparams_n): tpt = rhs */
- case class DefDef[-T >: Untyped] private[ast] (name: TermName, tparams: List[TypeDef[T]],
+ case class DefDef[-T >: Untyped] private[ast] (name: TermName, tparams: List[TypeDef[T]],
vparamss: List[List[ValDef[T]]], tpt: Tree[T], private var preRhs: LazyTree)
extends ValOrDefDef[T] {
type ThisTree[-T >: Untyped] = DefDef[T]
@@ -761,16 +761,16 @@ object Trees {
// ----- Lazy trees and tree sequences
- /** A tree that can have a lazy field
+ /** A tree that can have a lazy field
* The field is represented by some private `var` which is
* proxied `unforced` and `force`. Forcing the field will
- * set the `var` to the underlying value.
+ * set the `var` to the underlying value.
*/
trait WithLazyField[+T <: AnyRef] {
def unforced: AnyRef
protected def force(x: AnyRef): Unit
def forceIfLazy(implicit ctx: Context): T = unforced match {
- case lzy: Lazy[T] =>
+ case lzy: Lazy[T] =>
val x = lzy.complete
force(x)
x
@@ -1189,7 +1189,7 @@ object Trees {
def apply(x: X, tree: Tree)(implicit ctx: Context): X
def apply(x: X, trees: Traversable[Tree])(implicit ctx: Context): X = (x /: trees)(apply)
def foldOver(x: X, tree: Tree)(implicit ctx: Context): X = {
- def localCtx =
+ def localCtx =
if (tree.hasType && tree.symbol.exists) ctx.withOwner(tree.symbol) else ctx
tree match {
case Ident(name) =>
diff --git a/src/dotty/tools/dotc/ast/tpd.scala b/src/dotty/tools/dotc/ast/tpd.scala
index 0f4585a53..955439413 100644
--- a/src/dotty/tools/dotc/ast/tpd.scala
+++ b/src/dotty/tools/dotc/ast/tpd.scala
@@ -251,8 +251,8 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo {
def Annotated(annot: Tree, arg: Tree)(implicit ctx: Context): Annotated =
ta.assignType(untpd.Annotated(annot, arg), annot, arg)
-
- def Throw(expr: Tree)(implicit ctx: Context): Tree =
+
+ def Throw(expr: Tree)(implicit ctx: Context): Tree =
ref(defn.throwMethod).appliedTo(expr)
// ------ Making references ------------------------------------------------------
@@ -409,7 +409,7 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo {
override def Select(tree: Tree)(qualifier: Tree, name: Name)(implicit ctx: Context): Select = {
val tree1 = untpd.cpy.Select(tree)(qualifier, name)
tree match {
- case tree: Select if (qualifier.tpe eq tree.qualifier.tpe) =>
+ case tree: Select if (qualifier.tpe eq tree.qualifier.tpe) =>
tree1.withTypeUnchecked(tree.tpe)
case _ => tree.tpe match {
case tpe: NamedType => tree1.withType(tpe.derivedSelect(qualifier.tpe))
@@ -609,10 +609,10 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo {
untpd.SelectWithSig(tree, name, sig)
.withType(TermRef.withSig(tree.tpe, name.asTermName, sig))
- /** A select node with selector name and signature taken from `sym`.
+ /** A select node with selector name and signature taken from `sym`.
* Note: Use this method instead of select(sym) if the referenced symbol
* might be overridden in the type of the qualifier prefix. See note
- * on select(sym: Symbol).
+ * on select(sym: Symbol).
*/
def selectWithSig(sym: Symbol)(implicit ctx: Context): Tree =
selectWithSig(sym.name, sym.signature)
@@ -629,7 +629,7 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo {
def appliedToArgs(args: List[Tree])(implicit ctx: Context): Apply =
Apply(tree, args)
- /** The current tree applied to given argument lists:
+ /** The current tree applied to given argument lists:
* `tree (argss(0)) ... (argss(argss.length -1))`
*/
def appliedToArgss(argss: List[List[Tree]])(implicit ctx: Context): Tree =
@@ -676,7 +676,7 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo {
def or(that: Tree)(implicit ctx: Context): Tree =
tree.select(defn.Boolean_||).appliedTo(that)
- /** The translation of `tree = rhs`.
+ /** The translation of `tree = rhs`.
* This is either the tree as an assignment, to a setter call.
*/
def becomes(rhs: Tree)(implicit ctx: Context): Tree =
@@ -695,7 +695,7 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo {
// --- Higher order traversal methods -------------------------------
/** Apply `f` to each subtree of this tree */
- def foreachSubTree(f: Tree => Unit)(implicit ctx: Context): Unit = {
+ def foreachSubTree(f: Tree => Unit)(implicit ctx: Context): Unit = {
val traverser = new TreeTraverser {
def traverse(tree: Tree)(implicit ctx: Context) = foldOver(f(tree), tree)
}
@@ -776,7 +776,7 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo {
} else args
}
- val callArgs: List[Tree] = if(args.isEmpty) Nil else {
+ val callArgs: List[Tree] = if (args.isEmpty) Nil else {
val expectedType = selected.widen.paramTypess.head.last
val lastParam = args.last
adaptLastArg(lastParam, expectedType)
diff --git a/src/dotty/tools/dotc/config/CompilerCommand.scala b/src/dotty/tools/dotc/config/CompilerCommand.scala
index aa8e7abbf..629042291 100644
--- a/src/dotty/tools/dotc/config/CompilerCommand.scala
+++ b/src/dotty/tools/dotc/config/CompilerCommand.scala
@@ -25,7 +25,7 @@ object CompilerCommand extends DotClass {
| example: -Xprint:front,mixin prints the frontend and mixin phases.
| example: -Ylog:erasure+ logs the erasure phase and the phase after the erasure phase.
| This is useful because during the tree transform of phase X, we often
- | already are in phase X+1.
+ | already are in phase X + 1.
""".stripMargin.trim + "\n"
def shortUsage = s"Usage: $cmdName <options> <source files>"
diff --git a/src/dotty/tools/dotc/config/Config.scala b/src/dotty/tools/dotc/config/Config.scala
index a599b5892..6022d3580 100644
--- a/src/dotty/tools/dotc/config/Config.scala
+++ b/src/dotty/tools/dotc/config/Config.scala
@@ -14,28 +14,28 @@ object Config {
* does not appear at the top-level of either of its bounds.
*/
final val checkConstraintsNonCyclic = false
-
+
/** Like `checkConstraintsNonCyclic`, but all constrained parameters
* are tested for direct or indirect dependencies, each time a
* constraint is added in TypeComparer.
*/
final val checkConstraintsNonCyclicTrans = false
-
+
/** Check that each constraint resulting from a subtype test
* is satisfiable.
*/
final val checkConstraintsSatisfiable = false
-
+
/** Check that each constraint is fully propagated. i.e.
* If P <: Q then the upper bound of P is a subtype of the upper bound of Q
* and the lower bound of Q is a subtype of the lower bound of P.
*/
final val checkConstraintsPropagated = false
-
+
/** Type comparer will fail with an assert if the upper bound
* of a constrained parameter becomes Nothing. This should be turned
* on only for specific debugging as normally instantiation to Nothing
- * is not an error consdition.
+ * is not an error consdition.
*/
final val failOnInstantiationToNothing = false
@@ -51,7 +51,7 @@ object Config {
/** When explaining subtypes and this flag is set, also show the classes of the compared types. */
final val verboseExplainSubtype = true
-
+
/** If this flag is set, take the fast path when comparing same-named type-aliases and types */
final val fastPathForRefinedSubtype = true
@@ -71,4 +71,4 @@ object Config {
/** Check that certain types cannot be created in erasedTypes phases */
final val checkUnerased = true
-} \ No newline at end of file
+}
diff --git a/src/dotty/tools/dotc/config/Printers.scala b/src/dotty/tools/dotc/config/Printers.scala
index d1738ee6f..b44a1d2f6 100644
--- a/src/dotty/tools/dotc/config/Printers.scala
+++ b/src/dotty/tools/dotc/config/Printers.scala
@@ -31,4 +31,4 @@ object Printers {
val transforms = noPrinter
val cyclicErrors = noPrinter
val pickling = noPrinter
-} \ No newline at end of file
+}
diff --git a/src/dotty/tools/dotc/config/ScalaSettings.scala b/src/dotty/tools/dotc/config/ScalaSettings.scala
index 0d4034db2..444a1c1ae 100644
--- a/src/dotty/tools/dotc/config/ScalaSettings.scala
+++ b/src/dotty/tools/dotc/config/ScalaSettings.scala
@@ -183,4 +183,4 @@ class ScalaSettings extends Settings.SettingGroup {
val YpresentationLog = StringSetting("-Ypresentation-log", "file", "Log presentation compiler events into file", "")
val YpresentationReplay = StringSetting("-Ypresentation-replay", "file", "Replay presentation compiler events from file", "")
val YpresentationDelay = IntSetting("-Ypresentation-delay", "Wait number of ms after typing before starting typechecking", 0, 0 to 999)
-} \ No newline at end of file
+}
diff --git a/src/dotty/tools/dotc/config/Settings.scala b/src/dotty/tools/dotc/config/Settings.scala
index 531c49bfb..73bb056aa 100644
--- a/src/dotty/tools/dotc/config/Settings.scala
+++ b/src/dotty/tools/dotc/config/Settings.scala
@@ -256,4 +256,4 @@ object Settings {
def VersionSetting(name: String, descr: String, default: ScalaVersion = NoScalaVersion): Setting[ScalaVersion] =
publish(Setting(name, descr, default))
}
-} \ No newline at end of file
+}
diff --git a/src/dotty/tools/dotc/core/Annotations.scala b/src/dotty/tools/dotc/core/Annotations.scala
index d264483e6..0dc7113f2 100644
--- a/src/dotty/tools/dotc/core/Annotations.scala
+++ b/src/dotty/tools/dotc/core/Annotations.scala
@@ -121,4 +121,4 @@ object Annotations {
arg <- annot.argumentConstant(0))
yield ScalaVersion.parse(arg.stringValue)
}
-} \ No newline at end of file
+}
diff --git a/src/dotty/tools/dotc/core/Constraint.scala b/src/dotty/tools/dotc/core/Constraint.scala
index bc0d476a9..5a758f144 100644
--- a/src/dotty/tools/dotc/core/Constraint.scala
+++ b/src/dotty/tools/dotc/core/Constraint.scala
@@ -12,16 +12,16 @@ import config.Printers._
/** Constraint over undetermined type parameters. Constraints are built
* over values of the following types:
- *
+ *
* - PolyType A constraint constrains the type parameters of a set of PolyTypes
* - PolyParam The parameters of the constrained polytypes
- * - TypeVar Every constrained parameter might be associated with a TypeVar
+ * - TypeVar Every constrained parameter might be associated with a TypeVar
* that has the PolyParam as origin.
*/
abstract class Constraint extends Showable {
-
+
type This <: Constraint
-
+
/** Does the constraint's domain contain the type parameters of `pt`? */
def contains(pt: PolyType): Boolean
@@ -30,34 +30,34 @@ abstract class Constraint extends Showable {
/** Does this constraint contain the type variable `tvar` and is it uninstantiated? */
def contains(tvar: TypeVar): Boolean
-
+
/** The constraint entry for given type parameter `param`, or NoType if `param` is not part of
* the constraint domain.
*/
def entry(param: PolyParam): Type
-
+
/** The type variable corresponding to parameter `param`, or
* NoType, if `param` is not in constrained or is not paired with a type variable.
*/
def typeVarOfParam(param: PolyParam): Type
-
+
/** Is it known that `param1 <:< param2`? */
def isLess(param1: PolyParam, param2: PolyParam): Boolean
- /** The parameters that are known to be smaller wrt <: than `param` */
+ /** The parameters that are known to be smaller wrt <: than `param` */
def lower(param: PolyParam): List[PolyParam]
-
- /** The parameters that are known to be greater wrt <: than `param` */
+
+ /** The parameters that are known to be greater wrt <: than `param` */
def upper(param: PolyParam): List[PolyParam]
-
- /** lower(param) \ lower(butNot) */
+
+ /** lower(param) \ lower(butNot) */
def exclusiveLower(param: PolyParam, butNot: PolyParam): List[PolyParam]
-
- /** upper(param) \ upper(butNot) */
+
+ /** upper(param) \ upper(butNot) */
def exclusiveUpper(param: PolyParam, butNot: PolyParam): List[PolyParam]
/** The constraint bounds for given type parameter `param`.
- * Poly params that are known to be smaller or greater than `param`
+ * Poly params that are known to be smaller or greater than `param`
* are not contained in the return bounds.
* @pre `param` is not part of the constraint domain.
*/
@@ -65,16 +65,16 @@ abstract class Constraint extends Showable {
/** The lower bound of `param` including all known-to-be-smaller parameters */
def fullLowerBound(param: PolyParam)(implicit ctx: Context): Type
-
+
/** The upper bound of `param` including all known-to-be-greater parameters */
def fullUpperBound(param: PolyParam)(implicit ctx: Context): Type
-
+
/** The bounds of `param` including all known-to-be-smaller and -greater parameters */
def fullBounds(param: PolyParam)(implicit ctx: Context): TypeBounds
-
+
/** A new constraint which is derived from this constraint by adding
* entries for all type parameters of `poly`.
- * @param tvars A list of type variables associated with the params,
+ * @param tvars A list of type variables associated with the params,
* or Nil if the constraint will just be checked for
* satisfiability but will solved to give instances of
* type variables.
@@ -84,15 +84,15 @@ abstract class Constraint extends Showable {
/** A new constraint which is derived from this constraint by updating
* the entry for parameter `param` to `tp`.
* `tp` can be one of the following:
- *
+ *
* - A TypeBounds value, indicating new constraint bounds
* - Another type, indicating a solution for the parameter
- *
- * @pre `this contains param`.
+ *
+ * @pre `this contains param`.
*/
def updateEntry(param: PolyParam, tp: Type)(implicit ctx: Context): This
-
- /** A constraint that includes the relationship `p1 <: p2`.
+
+ /** A constraint that includes the relationship `p1 <: p2`.
* `<:` relationships between parameters ("edges") are propagated, but
* non-parameter bounds are left alone.
*/
@@ -113,17 +113,17 @@ abstract class Constraint extends Showable {
/** Narrow one of the bounds of type parameter `param`
* If `isUpper` is true, ensure that `param <: `bound`, otherwise ensure
- * that `param >: bound`.
+ * that `param >: bound`.
*/
def narrowBound(param: PolyParam, bound: Type, isUpper: Boolean)(implicit ctx: Context): This
-
+
/** Is entry associated with `pt` removable?
* @param removedParam The index of a parameter which is still present in the
* entry array, but is going to be removed at the same step,
* or -1 if no such parameter exists.
*/
def isRemovable(pt: PolyType, removedParam: Int = -1): Boolean
-
+
/** A new constraint with all entries coming from `pt` removed. */
def remove(pt: PolyType)(implicit ctx: Context): This
diff --git a/src/dotty/tools/dotc/core/ConstraintHandling.scala b/src/dotty/tools/dotc/core/ConstraintHandling.scala
index 796960337..8caacfb2f 100644
--- a/src/dotty/tools/dotc/core/ConstraintHandling.scala
+++ b/src/dotty/tools/dotc/core/ConstraintHandling.scala
@@ -10,28 +10,28 @@ import config.Printers._
/** Methods for adding constraints and solving them.
*
* What goes into a Constraint as opposed to a ConstrainHandler?
- *
+ *
* Constraint code is purely functional: Operations get constraints and produce new ones.
- * Constraint code does not have access to a type-comparer. Anything regarding lubs and glbs has to be done
+ * Constraint code does not have access to a type-comparer. Anything regarding lubs and glbs has to be done
* elsewhere.
- *
+ *
* By comparison: Constraint handlers are parts of type comparers and can use their functionality.
* Constraint handlers update the current constraint as a side effect.
*/
trait ConstraintHandling {
-
+
implicit val ctx: Context
-
+
protected def isSubType(tp1: Type, tp2: Type): Boolean
-
+
val state: TyperState
import state.constraint
-
+
private var addConstraintInvocations = 0
/** If the constraint is frozen we cannot add new bounds to the constraint. */
protected var frozenConstraint = false
-
+
private def addOneBound(param: PolyParam, bound: Type, isUpper: Boolean): Boolean =
!constraint.contains(param) || {
val c1 = constraint.narrowBound(param, bound, isUpper)
@@ -57,7 +57,7 @@ trait ConstraintHandling {
constr.println(i"added $description = $res")
res
}
-
+
protected def addLowerBound(param: PolyParam, bound: Type): Boolean = {
def description = i"constraint $param >: $bound to\n$constraint"
constr.println(i"adding $description")
@@ -68,11 +68,11 @@ trait ConstraintHandling {
constr.println(i"added $description = $res")
res
}
-
+
protected def addLess(p1: PolyParam, p2: PolyParam): Boolean = {
def description = i"ordering $p1 <: $p2 to\n$constraint"
val res =
- if (constraint.isLess(p2, p1)) unify(p2, p1)
+ if (constraint.isLess(p2, p1)) unify(p2, p1)
else {
val down1 = p1 :: constraint.exclusiveLower(p1, p2)
val up2 = p2 :: constraint.exclusiveUpper(p2, p1)
@@ -86,7 +86,7 @@ trait ConstraintHandling {
constr.println(i"added $description = $res")
res
}
-
+
/** Make p2 = p1, transfer all bounds of p2 to p1
* @pre less(p1)(p2)
*/
@@ -100,10 +100,10 @@ trait ConstraintHandling {
val lo = bounds.lo
val hi = bounds.hi
isSubType(lo, hi) &&
- down.forall(addOneBound(_, hi, isUpper = true)) &&
+ down.forall(addOneBound(_, hi, isUpper = true)) &&
up.forall(addOneBound(_, lo, isUpper = false))
}
-
+
protected final def isSubTypeWhenFrozen(tp1: Type, tp2: Type): Boolean = {
val saved = frozenConstraint
frozenConstraint = true
@@ -164,20 +164,20 @@ trait ConstraintHandling {
else {
val saved = constraint
try
- c2.forallParams(p =>
+ c2.forallParams(p =>
c1.contains(p) &&
c2.upper(p).forall(c1.isLess(p, _)) &&
isSubTypeWhenFrozen(c1.nonParamBounds(p), c2.nonParamBounds(p)))
finally constraint = saved
}
-
+
/** The current bounds of type parameter `param` */
final def bounds(param: PolyParam): TypeBounds = constraint.entry(param) match {
case bounds: TypeBounds => bounds
case _ => param.binder.paramBounds(param.paramNum)
}
-
- /** Add polytype `pt`, possibly with type variables `tvars`, to current constraint
+
+ /** Add polytype `pt`, possibly with type variables `tvars`, to current constraint
* and propagate all bounds.
* @param tvars See Constraint#add
*/
@@ -223,7 +223,7 @@ trait ConstraintHandling {
finally addConstraintInvocations -= 1
}
}
-
+
/** Check that constraint is fully propagated. See comment in Config.checkConstraintsPropagated */
def checkPropagated(msg: => String)(result: Boolean): Boolean = {
if (Config.checkConstraintsPropagated && result && addConstraintInvocations == 0) {
diff --git a/src/dotty/tools/dotc/core/Contexts.scala b/src/dotty/tools/dotc/core/Contexts.scala
index 412960983..61d4e9874 100644
--- a/src/dotty/tools/dotc/core/Contexts.scala
+++ b/src/dotty/tools/dotc/core/Contexts.scala
@@ -170,7 +170,7 @@ object Contexts {
if (implicitsCache == null )
implicitsCache = {
val implicitRefs: List[TermRef] =
- if (isClassDefContext)
+ if (isClassDefContext)
try owner.thisType.implicitMembers
catch {
case ex: CyclicReference => Nil
@@ -561,14 +561,14 @@ object Contexts {
private[core] val pendingUnderlying = new mutable.HashSet[Type]
- private [core] var phasesPlan: List[List[Phase]] = _
+ private[core] var phasesPlan: List[List[Phase]] = _
// Phases state
/** Phases by id */
private[core] var phases: Array[Phase] = _
/** Phases with consecutive Transforms groupped into a single phase, Empty array if squashing is disabled */
- private [core] var squashedPhases: Array[Phase] = Array.empty[Phase]
+ private[core] var squashedPhases: Array[Phase] = Array.empty[Phase]
/** Next denotation transformer id */
private[core] var nextDenotTransformerId: Array[Int] = _
diff --git a/src/dotty/tools/dotc/core/Definitions.scala b/src/dotty/tools/dotc/core/Definitions.scala
index 89e4bd371..7accf9148 100644
--- a/src/dotty/tools/dotc/core/Definitions.scala
+++ b/src/dotty/tools/dotc/core/Definitions.scala
@@ -175,14 +175,14 @@ class Definitions {
def ObjectMethods = List(Object_eq, Object_ne, Object_synchronized, Object_clone,
Object_finalize, Object_notify, Object_notifyAll, Object_wait, Object_waitL, Object_waitLI)
-
+
/** Dummy method needed by elimByName */
lazy val dummyApply = newPolyMethod(
OpsPackageClass, nme.dummyApply, 1,
pt => MethodType(List(FunctionType(Nil, PolyParam(pt, 0))), PolyParam(pt, 0)))
-
+
/** Method representing a throw */
- lazy val throwMethod = newMethod(OpsPackageClass, nme.THROWkw,
+ lazy val throwMethod = newMethod(OpsPackageClass, nme.THROWkw,
MethodType(List(ThrowableType), NothingType))
lazy val NothingClass: ClassSymbol = newCompleteClassSymbol(
diff --git a/src/dotty/tools/dotc/core/Denotations.scala b/src/dotty/tools/dotc/core/Denotations.scala
index 849e934f0..f038e8f2f 100644
--- a/src/dotty/tools/dotc/core/Denotations.scala
+++ b/src/dotty/tools/dotc/core/Denotations.scala
@@ -476,14 +476,14 @@ object Denotations {
/** The version of this SingleDenotation that was valid in the first phase
* of this run.
*/
- def initial: SingleDenotation =
+ def initial: SingleDenotation =
if (validFor == Nowhere) this
else {
var current = nextInRun
while (current.validFor.code > this.myValidFor.code) current = current.nextInRun
current
}
-
+
def history: List[SingleDenotation] = {
val b = new ListBuffer[SingleDenotation]
var current = initial
@@ -497,7 +497,7 @@ object Denotations {
/** Invalidate all caches and fields that depend on base classes and their contents */
def invalidateInheritedInfo(): Unit = ()
-
+
/** Move validity period of this denotation to a new run. Throw a StaleSymbol error
* if denotation is no longer valid.
*/
diff --git a/src/dotty/tools/dotc/core/Flags.scala b/src/dotty/tools/dotc/core/Flags.scala
index 108a9edee..f62c3cae8 100644
--- a/src/dotty/tools/dotc/core/Flags.scala
+++ b/src/dotty/tools/dotc/core/Flags.scala
@@ -300,7 +300,7 @@ object Flags {
/** Method is assumed to be stable */
final val Stable = termFlag(24, "<stable>")
-
+
/** A case parameter accessor */
final val CaseAccessor = termFlag(25, "<caseaccessor>")
@@ -499,7 +499,7 @@ object Flags {
/** These flags are pickled */
final val PickledFlags = flagRange(FirstFlag, FirstNotPickledFlag)
-
+
final val AllFlags = flagRange(FirstFlag, MaxFlag)
/** An abstract class or a trait */
@@ -531,10 +531,10 @@ object Flags {
/** A type parameter or type parameter accessor */
final val TypeParamOrAccessor = TypeParam | TypeParamAccessor
-
- /** If symbol of a type alias has these flags, prefer the alias */
+
+ /** If symbol of a type alias has these flags, prefer the alias */
final val AliasPreferred = TypeParam | TypeArgument | ExpandedName
-
+
/** A covariant type parameter instance */
final val LocalCovariant = allOf(Local, Covariant)
@@ -596,4 +596,4 @@ object Flags {
implicit def conjToFlagSet(conj: FlagConjunction): FlagSet =
FlagSet(conj.bits)
-} \ No newline at end of file
+}
diff --git a/src/dotty/tools/dotc/core/NameOps.scala b/src/dotty/tools/dotc/core/NameOps.scala
index 13ff92a8a..35607cf74 100644
--- a/src/dotty/tools/dotc/core/NameOps.scala
+++ b/src/dotty/tools/dotc/core/NameOps.scala
@@ -130,7 +130,7 @@ object NameOps {
/** If name ends in module class suffix, drop it */
def stripModuleClassSuffix: Name =
if (isModuleClassName) name dropRight MODULE_SUFFIX.length else name
-
+
/** Append a suffix so that this name does not clash with another name in the same scope */
def avoidClashName: TermName = (name ++ AVOID_CLASH_SUFFIX).toTermName
@@ -161,7 +161,7 @@ object NameOps {
val idx = name.lastIndexOfSlice(nme.EXPAND_SEPARATOR)
if (idx < 0) name else (name drop (idx + nme.EXPAND_SEPARATOR.length)).asInstanceOf[N]
}
-
+
def expandedPrefix: N = {
val idx = name.lastIndexOfSlice(nme.EXPAND_SEPARATOR)
assert(idx >= 0)
@@ -365,4 +365,4 @@ object NameOps {
case name => name
}
}
-} \ No newline at end of file
+}
diff --git a/src/dotty/tools/dotc/core/OrderingConstraint.scala b/src/dotty/tools/dotc/core/OrderingConstraint.scala
index 53378435e..21d003451 100644
--- a/src/dotty/tools/dotc/core/OrderingConstraint.scala
+++ b/src/dotty/tools/dotc/core/OrderingConstraint.scala
@@ -16,10 +16,10 @@ object OrderingConstraint {
/** The type of `OrderingConstraint#boundsMap` */
type ParamBounds = SimpleMap[PolyType, Array[Type]]
-
+
/** The type of `OrderingConstraint#lowerMap`, `OrderingConstraint#upperMap` */
type ParamOrdering = SimpleMap[PolyType, Array[List[PolyParam]]]
-
+
/** A new constraint with given maps */
private def newConstraint(boundsMap: ParamBounds, lowerMap: ParamOrdering, upperMap: ParamOrdering)(implicit ctx: Context) : OrderingConstraint = {
val result = new OrderingConstraint(boundsMap, lowerMap, upperMap)
@@ -27,29 +27,29 @@ object OrderingConstraint {
ctx.runInfo.recordConstraintSize(result, result.boundsMap.size)
result
}
-
+
/** A lens for updating a single entry array in one of the three constraint maps */
abstract class ConstraintLens[T <: AnyRef: ClassTag] {
- def entries(c: OrderingConstraint, poly: PolyType): Array[T]
+ def entries(c: OrderingConstraint, poly: PolyType): Array[T]
def updateEntries(c: OrderingConstraint, poly: PolyType, entries: Array[T])(implicit ctx: Context): OrderingConstraint
def initial: T
-
+
def apply(c: OrderingConstraint, poly: PolyType, idx: Int) = {
val es = entries(c, poly)
if (es == null) initial else es(idx)
}
-
+
/** The `current` constraint but with the entry for `param` updated to `entry`.
* `current` is used linearly. If it is different from `prev` it is
* known to be dead after the call. Hence it is OK to update destructively
* parts of `current` which are not shared by `prev`.
*/
- def update(prev: OrderingConstraint, current: OrderingConstraint,
+ def update(prev: OrderingConstraint, current: OrderingConstraint,
poly: PolyType, idx: Int, entry: T)(implicit ctx: Context): OrderingConstraint = {
var es = entries(current, poly)
if (es != null && (es(idx) eq entry)) current
else {
- val result =
+ val result =
if (es == null) {
es = Array.fill(poly.paramNames.length)(initial)
updateEntries(current, poly, es)
@@ -64,40 +64,40 @@ object OrderingConstraint {
result
}
}
-
- def update(prev: OrderingConstraint, current: OrderingConstraint,
+
+ def update(prev: OrderingConstraint, current: OrderingConstraint,
param: PolyParam, entry: T)(implicit ctx: Context): OrderingConstraint =
update(prev, current, param.binder, param.paramNum, entry)
-
- def map(prev: OrderingConstraint, current: OrderingConstraint,
- poly: PolyType, idx: Int, f: T => T)(implicit ctx: Context): OrderingConstraint =
+
+ def map(prev: OrderingConstraint, current: OrderingConstraint,
+ poly: PolyType, idx: Int, f: T => T)(implicit ctx: Context): OrderingConstraint =
update(prev, current, poly, idx, f(apply(current, poly, idx)))
- def map(prev: OrderingConstraint, current: OrderingConstraint,
- param: PolyParam, f: T => T)(implicit ctx: Context): OrderingConstraint =
+ def map(prev: OrderingConstraint, current: OrderingConstraint,
+ param: PolyParam, f: T => T)(implicit ctx: Context): OrderingConstraint =
map(prev, current, param.binder, param.paramNum, f)
}
val boundsLens = new ConstraintLens[Type] {
- def entries(c: OrderingConstraint, poly: PolyType): Array[Type] =
+ def entries(c: OrderingConstraint, poly: PolyType): Array[Type] =
c.boundsMap(poly)
- def updateEntries(c: OrderingConstraint, poly: PolyType, entries: Array[Type])(implicit ctx: Context): OrderingConstraint =
+ def updateEntries(c: OrderingConstraint, poly: PolyType, entries: Array[Type])(implicit ctx: Context): OrderingConstraint =
newConstraint(c.boundsMap.updated(poly, entries), c.lowerMap, c.upperMap)
def initial = NoType
}
-
+
val lowerLens = new ConstraintLens[List[PolyParam]] {
- def entries(c: OrderingConstraint, poly: PolyType): Array[List[PolyParam]] =
+ def entries(c: OrderingConstraint, poly: PolyType): Array[List[PolyParam]] =
c.lowerMap(poly)
- def updateEntries(c: OrderingConstraint, poly: PolyType, entries: Array[List[PolyParam]])(implicit ctx: Context): OrderingConstraint =
+ def updateEntries(c: OrderingConstraint, poly: PolyType, entries: Array[List[PolyParam]])(implicit ctx: Context): OrderingConstraint =
newConstraint(c.boundsMap, c.lowerMap.updated(poly, entries), c.upperMap)
def initial = Nil
}
val upperLens = new ConstraintLens[List[PolyParam]] {
- def entries(c: OrderingConstraint, poly: PolyType): Array[List[PolyParam]] =
+ def entries(c: OrderingConstraint, poly: PolyType): Array[List[PolyParam]] =
c.upperMap(poly)
- def updateEntries(c: OrderingConstraint, poly: PolyType, entries: Array[List[PolyParam]])(implicit ctx: Context): OrderingConstraint =
+ def updateEntries(c: OrderingConstraint, poly: PolyType, entries: Array[List[PolyParam]])(implicit ctx: Context): OrderingConstraint =
newConstraint(c.boundsMap, c.lowerMap, c.upperMap.updated(poly, entries))
def initial = Nil
}
@@ -105,7 +105,7 @@ object OrderingConstraint {
import OrderingConstraint._
-/** Constraint over undetermined type parameters that keeps separate maps to
+/** Constraint over undetermined type parameters that keeps separate maps to
* reflect parameter orderings.
* @param boundsMap a map from PolyType to arrays.
* Each array contains twice the number of entries as there a type parameters
@@ -115,23 +115,23 @@ import OrderingConstraint._
* An instantiated type parameter is represented by having its instance type in
* the corresponding array entry. The dual use of arrays for poly params
* and typevars is to save space and hopefully gain some speed.
- *
+ *
* @param lowerMap a map from PolyTypes to arrays. Each array entry corresponds
* to a parameter P of the polytype; it contains all constrained parameters
- * Q that are known to be smaller than P, i.e. Q <: P.
+ * Q that are known to be smaller than P, i.e. Q <: P.
* @param upperMap a map from PolyTypes to arrays. Each array entry corresponds
* to a parameter P of the polytype; it contains all constrained parameters
- * Q that are known to be greater than P, i.e. P <: Q.
+ * Q that are known to be greater than P, i.e. P <: Q.
*/
-class OrderingConstraint(private val boundsMap: ParamBounds,
- private val lowerMap : ParamOrdering,
+class OrderingConstraint(private val boundsMap: ParamBounds,
+ private val lowerMap : ParamOrdering,
private val upperMap : ParamOrdering) extends Constraint {
-
+
type This = OrderingConstraint
-
-
+
+
// ----------- Basic indices --------------------------------------------------
-
+
/** The number of type parameters in the given entry array */
private def paramCount(entries: Array[Type]) = entries.length >> 1
@@ -145,7 +145,7 @@ class OrderingConstraint(private val boundsMap: ParamBounds,
if (entries == null) NoType
else entries(param.paramNum)
}
-
+
// ----------- Contains tests --------------------------------------------------
def contains(pt: PolyType): Boolean = boundsMap(pt) != null
@@ -163,42 +163,42 @@ class OrderingConstraint(private val boundsMap: ParamBounds,
}
private def isBounds(tp: Type) = tp.isInstanceOf[TypeBounds]
-
+
// ---------- Dependency handling ----------------------------------------------
-
+
def lower(param: PolyParam): List[PolyParam] = lowerLens(this, param.binder, param.paramNum)
def upper(param: PolyParam): List[PolyParam] = upperLens(this, param.binder, param.paramNum)
-
+
def minLower(param: PolyParam): List[PolyParam] = {
val all = lower(param)
all.filterNot(p => all.exists(isLess(p, _)))
}
-
+
def minUpper(param: PolyParam): List[PolyParam] = {
val all = upper(param)
all.filterNot(p => all.exists(isLess(_, p)))
}
-
+
def exclusiveLower(param: PolyParam, butNot: PolyParam): List[PolyParam] =
lower(param).filterNot(isLess(_, butNot))
-
+
def exclusiveUpper(param: PolyParam, butNot: PolyParam): List[PolyParam] =
upper(param).filterNot(isLess(butNot, _))
-
+
// ---------- Info related to PolyParams -------------------------------------------
def isLess(param1: PolyParam, param2: PolyParam): Boolean =
upper(param1).contains(param2)
- def nonParamBounds(param: PolyParam): TypeBounds =
+ def nonParamBounds(param: PolyParam): TypeBounds =
entry(param).asInstanceOf[TypeBounds]
-
+
def fullLowerBound(param: PolyParam)(implicit ctx: Context): Type =
(nonParamBounds(param).lo /: minLower(param))(_ | _)
- def fullUpperBound(param: PolyParam)(implicit ctx: Context): Type =
+ def fullUpperBound(param: PolyParam)(implicit ctx: Context): Type =
(nonParamBounds(param).hi /: minUpper(param))(_ & _)
-
+
def fullBounds(param: PolyParam)(implicit ctx: Context): TypeBounds =
nonParamBounds(param).derivedTypeBounds(fullLowerBound(param), fullUpperBound(param))
@@ -209,17 +209,17 @@ class OrderingConstraint(private val boundsMap: ParamBounds,
val tvar = typeVar(entries, param.paramNum)
if (tvar != null) tvar else NoType
}
- }
+ }
// ---------- Adding PolyTypes --------------------------------------------------
-
+
/** The list of parameters P such that, for a fresh type parameter Q:
- *
+ *
* Q <: tp implies Q <: P and isUpper = true, or
* tp <: Q implies P <: Q and isUpper = false
*/
def dependentParams(tp: Type, isUpper: Boolean): List[PolyParam] = tp match {
- case param: PolyParam if contains(param) =>
+ case param: PolyParam if contains(param) =>
param :: (if (isUpper) upper(param) else lower(param))
case tp: AndOrType =>
val ps1 = dependentParams(tp.tp1, isUpper)
@@ -228,7 +228,7 @@ class OrderingConstraint(private val boundsMap: ParamBounds,
case _ =>
Nil
}
-
+
/** The bound type `tp` without constrained parameters which are clearly
* dependent. A parameter in an upper bound is clearly dependent if it appears
* in a hole of a context H given by:
@@ -237,7 +237,7 @@ class OrderingConstraint(private val boundsMap: ParamBounds,
* H & T
* T & H
*
- * (the idea is that a parameter P in a H context is guaranteed to be a supertype of the
+ * (the idea is that a parameter P in a H context is guaranteed to be a supertype of the
* bounded parameter.)
* Analogously, a parameter in a lower bound is clearly dependent if it appears
* in a hole of a context H given by:
@@ -245,18 +245,18 @@ class OrderingConstraint(private val boundsMap: ParamBounds,
* L = []
* L | T
* T | L
- *
+ *
* "Clearly dependent" is not synonymous with "dependent" in the sense
* it is defined in `dependentParams`. Dependent parameters are handled
* in `updateEntry`. The idea of stripping off clearly dependent parameters
- * and to handle them separately is for efficiency, so that type expressions
+ * and to handle them separately is for efficiency, so that type expressions
* used as bounds become smaller.
- *
+ *
* @param isUpper If true, `bound` is an upper bound, else a lower bound.
*/
- private def stripParams(tp: Type, paramBuf: mutable.ListBuffer[PolyParam],
+ private def stripParams(tp: Type, paramBuf: mutable.ListBuffer[PolyParam],
isUpper: Boolean)(implicit ctx: Context): Type = tp match {
- case param: PolyParam if contains(param) =>
+ case param: PolyParam if contains(param) =>
if (!paramBuf.contains(param)) paramBuf += param
NoType
case tp: AndOrType if isUpper == tp.isAnd =>
@@ -268,17 +268,17 @@ class OrderingConstraint(private val boundsMap: ParamBounds,
else tp2
case _ =>
tp
- }
-
+ }
+
/** The bound type `tp` without clearly dependent parameters.
* A top or bottom type if type consists only of dependent parameters.
* @param isUpper If true, `bound` is an upper bound, else a lower bound.
*/
- private def normalizedType(tp: Type, paramBuf: mutable.ListBuffer[PolyParam],
- isUpper: Boolean)(implicit ctx: Context): Type =
+ private def normalizedType(tp: Type, paramBuf: mutable.ListBuffer[PolyParam],
+ isUpper: Boolean)(implicit ctx: Context): Type =
stripParams(tp, paramBuf, isUpper)
.orElse(if (isUpper) defn.AnyType else defn.NothingType)
-
+
def add(poly: PolyType, tvars: List[TypeVar])(implicit ctx: Context): This = {
assert(!contains(poly))
val nparams = poly.paramNames.length
@@ -287,7 +287,7 @@ class OrderingConstraint(private val boundsMap: ParamBounds,
tvars.copyToArray(entries1, nparams)
newConstraint(boundsMap.updated(poly, entries1), lowerMap, upperMap).init(poly)
}
-
+
/** Split dependent parameters off the bounds for parameters in `poly`.
* Update all bounds to be normalized and update ordering to account for
* dependent parameters.
@@ -311,9 +311,9 @@ class OrderingConstraint(private val boundsMap: ParamBounds,
if (Config.checkConstraintsNonCyclic) checkNonCyclic()
current
}
-
+
// ---------- Updates ------------------------------------------------------------
-
+
/** Add the fact `param1 <: param2` to the constraint `current` and propagate
* `<:<` relationships between parameters ("edges") but not bounds.
*/
@@ -331,31 +331,31 @@ class OrderingConstraint(private val boundsMap: ParamBounds,
def addLess(param1: PolyParam, param2: PolyParam)(implicit ctx: Context): This =
order(this, param1, param2)
-
+
def updateEntry(current: This, param: PolyParam, tp: Type)(implicit ctx: Context): This = {
var current1 = boundsLens.update(this, current, param, tp)
tp match {
case TypeBounds(lo, hi) =>
- for (p <- dependentParams(lo, isUpper = false))
+ for (p <- dependentParams(lo, isUpper = false))
current1 = order(current1, p, param)
- for (p <- dependentParams(hi, isUpper = true))
+ for (p <- dependentParams(hi, isUpper = true))
current1 = order(current1, param, p)
case _ =>
}
current1
}
-
+
def updateEntry(param: PolyParam, tp: Type)(implicit ctx: Context): This =
updateEntry(this, param, tp)
-
+
def unify(p1: PolyParam, p2: PolyParam)(implicit ctx: Context): This = {
val p1Bounds = (nonParamBounds(p1) & nonParamBounds(p2)).substParam(p2, p1)
updateEntry(p1, p1Bounds).replace(p2, p1)
}
-
+
def narrowBound(param: PolyParam, bound: Type, isUpper: Boolean)(implicit ctx: Context): This = {
- val oldBounds @ TypeBounds(lo, hi) = nonParamBounds(param)
- val newBounds =
+ val oldBounds @ TypeBounds(lo, hi) = nonParamBounds(param)
+ val newBounds =
if (isUpper) oldBounds.derivedTypeBounds(lo, hi & bound)
else oldBounds.derivedTypeBounds(lo | bound, hi)
updateEntry(param, newBounds)
@@ -368,40 +368,40 @@ class OrderingConstraint(private val boundsMap: ParamBounds,
* of the parameter elsewhere in the constraint by type `tp`, or a conservative
* approximation of it if that is needed to avoid cycles.
* Occurrences nested inside a refinement or prefix are not affected.
- *
+ *
* The reason we need to substitute top-level occurrences of the parameter
* is to deal with situations like the following. Say we have in the constraint
- *
+ *
* P <: Q & String
* Q
- *
+ *
* and we replace Q with P. Then substitution gives
- *
+ *
* P <: P & String
- *
+ *
* this would be a cyclic constraint is therefore changed by `normalize` and
* `recombine` below to
- *
+ *
* P <: String
- *
- * approximating the RHS occurrence of P with Any. Without the substitution we
+ *
+ * approximating the RHS occurrence of P with Any. Without the substitution we
* would not find out where we need to approximate. Occurrences of parameters
* that are not top-level are not affected.
*/
def replace(param: PolyParam, tp: Type)(implicit ctx: Context): OrderingConstraint = {
val replacement = tp.dealias.stripTypeVar
- if (param == replacement) this
+ if (param == replacement) this
else {
assert(replacement.isValueType)
val poly = param.binder
val idx = param.paramNum
-
- def removeParam(ps: List[PolyParam]) =
+
+ def removeParam(ps: List[PolyParam]) =
ps.filterNot(p => p.binder.eq(poly) && p.paramNum == idx)
-
+
def replaceParam(tp: Type, atPoly: PolyType, atIdx: Int) = tp match {
case bounds @ TypeBounds(lo, hi) =>
-
+
def recombine(andor: AndOrType, op: (Type, Boolean) => Type, isUpper: Boolean): Type = {
val tp1 = op(andor.tp1, isUpper)
val tp2 = op(andor.tp2, isUpper)
@@ -409,7 +409,7 @@ class OrderingConstraint(private val boundsMap: ParamBounds,
else if (andor.isAnd) tp1 & tp2
else tp1 | tp2
}
-
+
def normalize(tp: Type, isUpper: Boolean): Type = tp match {
case p: PolyParam if p.binder == atPoly && p.paramNum == atIdx =>
if (isUpper) defn.AnyType else defn.NothingType
@@ -422,12 +422,12 @@ class OrderingConstraint(private val boundsMap: ParamBounds,
case tp: AndOrType if isUpper == tp.isAnd => recombine(tp, replaceIn, isUpper)
case _ => tp
}
-
+
bounds.derivedTypeBounds(replaceIn(lo, isUpper = false), replaceIn(hi, isUpper = true))
case _ => tp
}
-
- var current =
+
+ var current =
if (isRemovable(poly, idx)) remove(poly) else updateEntry(param, replacement)
current.foreachParam {(p, i) =>
current = boundsLens.map(this, current, p, i, replaceParam(_, p, i))
@@ -438,7 +438,7 @@ class OrderingConstraint(private val boundsMap: ParamBounds,
}
}
- def remove(pt: PolyType)(implicit ctx: Context): This =
+ def remove(pt: PolyType)(implicit ctx: Context): This =
newConstraint(boundsMap.remove(pt), lowerMap.remove(pt), upperMap.remove(pt))
def isRemovable(pt: PolyType, removedParam: Int = -1): Boolean = {
@@ -461,7 +461,7 @@ class OrderingConstraint(private val boundsMap: ParamBounds,
def domainPolys: List[PolyType] = boundsMap.keys
- def domainParams: List[PolyParam] =
+ def domainParams: List[PolyParam] =
for {
(poly, entries) <- boundsMap.toList
n <- 0 until paramCount(entries)
@@ -475,7 +475,7 @@ class OrderingConstraint(private val boundsMap: ParamBounds,
}
true
}
-
+
def foreachParam(p: (PolyType, Int) => Unit): Unit =
boundsMap.foreachBinding { (poly, entries) =>
0.until(poly.paramNames.length).foreach(p(poly, _))
@@ -513,17 +513,17 @@ class OrderingConstraint(private val boundsMap: ParamBounds,
def checkNonCyclic()(implicit ctx: Context): Unit =
domainParams.foreach(checkNonCyclic)
-
+
private def checkNonCyclic(param: PolyParam)(implicit ctx: Context): Unit =
assert(!isLess(param, param), i"cyclic constraint involving $param in $this")
-
+
// ---------- toText -----------------------------------------------------
override def toText(printer: Printer): Text = {
def entryText(tp: Type) = tp match {
- case tp: TypeBounds =>
+ case tp: TypeBounds =>
tp.toText(printer)
- case _ =>
+ case _ =>
" := " ~ tp.toText(printer)
}
val indent = 3
@@ -547,11 +547,11 @@ class OrderingConstraint(private val boundsMap: ParamBounds,
ups = minUpper(param)
if ups.nonEmpty
}
- yield
+ yield
(" " * indent) ~ param.toText(printer) ~ " <: " ~
Text(ups.map(_.toText(printer)), ", ")
Text(deps, "\n")
}
Text.lines(List(header, uninstVarsText, constrainedText, boundsText, orderingText, ")"))
}
-} \ No newline at end of file
+}
diff --git a/src/dotty/tools/dotc/core/Periods.scala b/src/dotty/tools/dotc/core/Periods.scala
index 0cd41a7df..6efadab7f 100644
--- a/src/dotty/tools/dotc/core/Periods.scala
+++ b/src/dotty/tools/dotc/core/Periods.scala
@@ -156,4 +156,4 @@ object Periods {
final val PhaseWidth = 6
final val PhaseMask = (1 << PhaseWidth) - 1
final val MaxPossiblePhaseId = PhaseMask
-} \ No newline at end of file
+}
diff --git a/src/dotty/tools/dotc/core/Phases.scala b/src/dotty/tools/dotc/core/Phases.scala
index aa089bc15..96066db5e 100644
--- a/src/dotty/tools/dotc/core/Phases.scala
+++ b/src/dotty/tools/dotc/core/Phases.scala
@@ -342,4 +342,4 @@ object Phases {
private implicit class getClassDeco[T](val x: T) extends AnyVal {
def getClazz: Class[_ <: T] = x.getClass.asInstanceOf[Class[_ <: T]]
}
-} \ No newline at end of file
+}
diff --git a/src/dotty/tools/dotc/core/Scopes.scala b/src/dotty/tools/dotc/core/Scopes.scala
index 70bcbdee6..ad9ba4201 100644
--- a/src/dotty/tools/dotc/core/Scopes.scala
+++ b/src/dotty/tools/dotc/core/Scopes.scala
@@ -93,7 +93,7 @@ object Scopes {
/** Lookup next entry with same name as this one */
def lookupNextEntry(entry: ScopeEntry)(implicit ctx: Context): ScopeEntry
-
+
/** Lookup a symbol */
final def lookup(name: Name)(implicit ctx: Context): Symbol = {
val e = lookupEntry(name)
@@ -138,9 +138,9 @@ object Scopes {
}
def implicitDecls(implicit ctx: Context): List[TermRef] = Nil
-
+
def openForMutations: MutableScope = unsupported("openForMutations")
-
+
final def toText(printer: Printer): Text = printer.toText(this)
}
@@ -376,7 +376,7 @@ object Scopes {
}
syms
}
-
+
override def openForMutations: MutableScope = this
}
diff --git a/src/dotty/tools/dotc/core/Signature.scala b/src/dotty/tools/dotc/core/Signature.scala
index 4e041e629..8717c4f42 100644
--- a/src/dotty/tools/dotc/core/Signature.scala
+++ b/src/dotty/tools/dotc/core/Signature.scala
@@ -59,4 +59,4 @@ object Signature {
assert(!resultType.isInstanceOf[ExprType])
apply(Nil, sigName(resultType, isJava))
}
-} \ No newline at end of file
+}
diff --git a/src/dotty/tools/dotc/core/Skolemization.scala b/src/dotty/tools/dotc/core/Skolemization.scala
index 2832a3bad..fb47cb62a 100644
--- a/src/dotty/tools/dotc/core/Skolemization.scala
+++ b/src/dotty/tools/dotc/core/Skolemization.scala
@@ -5,42 +5,42 @@ import Symbols._, Types._, Contexts._
import collection.mutable
/** Methods to add and remove skolemtypes.
- *
- * Skolem types are generated when comparing refinements.
+ *
+ * Skolem types are generated when comparing refinements.
* A skolem type is simply a fresh singleton type that has a given type
* as underlying type.
- * Two skolem types are equal if they refer to the same underlying type.
+ * Two skolem types are equal if they refer to the same underlying type.
* To avoid unsoundness, skolem types have to be kept strictly local to the
* comparison, they are not allowed to escape the lifetime of a comparison
- * by surviving in a context or in GADT bounds.
+ * by surviving in a context or in GADT bounds.
*/
trait Skolemization {
-
+
implicit val ctx: Context
protected var skolemsOutstanding = false
-
+
def ensureStableSingleton(tp: Type): SingletonType = tp.stripTypeVar match {
- case tp: SingletonType if tp.isStable =>
+ case tp: SingletonType if tp.isStable =>
tp
- case tp: ValueType =>
+ case tp: ValueType =>
skolemsOutstanding = true
SkolemType(tp)
- case tp: TypeProxy =>
+ case tp: TypeProxy =>
ensureStableSingleton(tp.underlying)
}
-
+
/** Approximate a type `tp` with a type that does not contain skolem types.
* @param toSuper if true, return the smallest supertype of `tp` with this property
* else return the largest subtype.
*/
- final def deSkolemize(tp: Type, toSuper: Boolean): Type =
- if (skolemsOutstanding) deSkolemize(tp, if (toSuper) 1 else -1, Set())
+ final def deSkolemize(tp: Type, toSuper: Boolean): Type =
+ if (skolemsOutstanding) deSkolemize(tp, if (toSuper) 1 else -1, Set())
else tp
private def deSkolemize(tp: Type, variance: Int, seen: Set[SkolemType]): Type =
ctx.traceIndented(s"deskolemize $tp, variance = $variance, seen = $seen = ") {
- def approx(lo: Type = defn.NothingType, hi: Type = defn.AnyType, newSeen: Set[SkolemType] = seen) =
+ def approx(lo: Type = defn.NothingType, hi: Type = defn.AnyType, newSeen: Set[SkolemType] = seen) =
if (variance == 0) NoType
else deSkolemize(if (variance < 0) lo else hi, variance, newSeen)
tp match {
@@ -71,7 +71,7 @@ trait Skolemization {
tp.derivedRefinedType(parent1, tp.refinedName, refinedInfo1)
else
approx(hi = parent1)
- }
+ }
else approx()
case tp: TypeAlias =>
val alias1 = deSkolemize(tp.alias, variance * tp.variance, seen)
@@ -107,7 +107,7 @@ trait Skolemization {
deSkolemizeMap.mapOver(tp, variance, seen)
}
}
-
+
object deSkolemizeMap extends TypeMap {
private var seen: Set[SkolemType] = _
def apply(tp: Type) = deSkolemize(tp, variance, seen)
@@ -123,4 +123,4 @@ trait Skolemization {
}
}
}
-} \ No newline at end of file
+}
diff --git a/src/dotty/tools/dotc/core/StdNames.scala b/src/dotty/tools/dotc/core/StdNames.scala
index 4f59bd453..a4471ebb4 100644
--- a/src/dotty/tools/dotc/core/StdNames.scala
+++ b/src/dotty/tools/dotc/core/StdNames.scala
@@ -709,7 +709,7 @@ object StdNames {
class ScalaTypeNames extends ScalaNames[TypeName] {
protected implicit def fromString(s: String): TypeName = typeName(s)
- @switch def syntheticTypeParamName(i: Int): TypeName = "T"+i
+ @switch def syntheticTypeParamName(i: Int): TypeName = "T" + i
def syntheticTypeParamNames(num: Int): List[TypeName] =
(0 until num).map(syntheticTypeParamName)(breakOut)
diff --git a/src/dotty/tools/dotc/core/Substituters.scala b/src/dotty/tools/dotc/core/Substituters.scala
index b45522bf9..77ecf7fba 100644
--- a/src/dotty/tools/dotc/core/Substituters.scala
+++ b/src/dotty/tools/dotc/core/Substituters.scala
@@ -277,4 +277,4 @@ trait Substituters { this: Context =>
final class SubstParamsMap(from: BindingType, to: List[Type]) extends DeepTypeMap {
def apply(tp: Type) = substParams(tp, from, to, this)
}
-} \ No newline at end of file
+}
diff --git a/src/dotty/tools/dotc/core/SymDenotations.scala b/src/dotty/tools/dotc/core/SymDenotations.scala
index e572f129b..3566595f2 100644
--- a/src/dotty/tools/dotc/core/SymDenotations.scala
+++ b/src/dotty/tools/dotc/core/SymDenotations.scala
@@ -43,7 +43,7 @@ trait SymDenotations { this: Context =>
if (denot is ValidForever) true
else {
val initial = denot.initial
- if (initial ne denot)
+ if (initial ne denot)
ctx.withPhase(initial.validFor.firstPhaseId).stillValid(initial.asSymDenotation)
else try {
val owner = denot.owner.denot
@@ -79,7 +79,7 @@ object SymDenotations {
super.validFor_=(p)
}
*/
-
+
// ------ Getting and setting fields -----------------------------
private[this] var myFlags: FlagSet = adaptFlags(initFlags)
diff --git a/src/dotty/tools/dotc/core/SymbolLoaders.scala b/src/dotty/tools/dotc/core/SymbolLoaders.scala
index a863ad1b9..0e8c9a41d 100644
--- a/src/dotty/tools/dotc/core/SymbolLoaders.scala
+++ b/src/dotty/tools/dotc/core/SymbolLoaders.scala
@@ -152,7 +152,7 @@ class SymbolLoaders {
def doComplete(root: SymDenotation)(implicit ctx: Context): Unit = {
assert(root is PackageClass, root)
- def maybeModuleClass(classRep: ClassPath#ClassRep) = classRep.name.last == '$'
+ def maybeModuleClass(classRep: ClassPath#ClassRep) = classRep.name.last == '$'
val pre = root.owner.thisType
root.info = ClassInfo(pre, root.symbol.asClass, Nil, currentDecls, pre select sourceModule)
if (!sourceModule.isCompleted)
@@ -226,7 +226,7 @@ class ClassfileLoader(val classfile: AbstractFile) extends SymbolLoader {
override def sourceFileOrNull: AbstractFile = classfile
- def description = "class file "+ classfile.toString
+ def description = "class file " + classfile.toString
def rootDenots(rootDenot: ClassDenotation)(implicit ctx: Context): (ClassDenotation, ClassDenotation) = {
val linkedDenot = rootDenot.scalacLinkedClass.denot match {
@@ -258,7 +258,7 @@ class ClassfileLoader(val classfile: AbstractFile) extends SymbolLoader {
}
class SourcefileLoader(val srcfile: AbstractFile) extends SymbolLoader {
- def description = "source file "+ srcfile.toString
+ def description = "source file " + srcfile.toString
override def sourceFileOrNull = srcfile
def doComplete(root: SymDenotation)(implicit ctx: Context): Unit = unsupported("doComplete")
}
diff --git a/src/dotty/tools/dotc/core/Symbols.scala b/src/dotty/tools/dotc/core/Symbols.scala
index c655f1c52..dab84bb5f 100644
--- a/src/dotty/tools/dotc/core/Symbols.scala
+++ b/src/dotty/tools/dotc/core/Symbols.scala
@@ -162,7 +162,7 @@ trait Symbols { this: Context =>
privateWithin, coord, assocFile)
def synthesizeCompanionMethod(name: Name, target: SymDenotation, owner: SymDenotation)(implicit ctx: Context) =
- if(owner.exists && target.exists && !owner.isAbsent && !target.isAbsent) {
+ if (owner.exists && target.exists && !owner.isAbsent && !target.isAbsent) {
val existing = owner.unforcedDecls.lookup(name)
existing.orElse{
@@ -472,7 +472,7 @@ object Symbols {
override def toString: String =
if (lastDenot == null) s"Naked$prefixString#$id"
- else lastDenot.toString// +"#"+id // !!! DEBUG
+ else lastDenot.toString// + "#" + id // !!! DEBUG
def toText(printer: Printer): Text = printer.toText(this)
diff --git a/src/dotty/tools/dotc/core/TypeApplications.scala b/src/dotty/tools/dotc/core/TypeApplications.scala
index 6c3fef1a9..df18813b9 100644
--- a/src/dotty/tools/dotc/core/TypeApplications.scala
+++ b/src/dotty/tools/dotc/core/TypeApplications.scala
@@ -191,8 +191,8 @@ class TypeApplications(val self: Type) extends AnyVal {
if (res.isInstantiatedLambda) res.select(tpnme.Apply) else res
}
}
-
- /** Simplify a fully instantiated type of the form `LambdaX{... type Apply = T } # Apply` to `T`.
+
+ /** Simplify a fully instantiated type of the form `LambdaX{... type Apply = T } # Apply` to `T`.
*/
def simplifyApply(implicit ctx: Context): Type = self match {
case self @ TypeRef(prefix, tpnme.Apply) if prefix.isInstantiatedLambda =>
@@ -383,7 +383,7 @@ class TypeApplications(val self: Type) extends AnyVal {
case JavaArrayType(elemtp) => elemtp
case _ => firstBaseArgInfo(defn.SeqClass)
}
-
+
def containsSkolemType(target: Type)(implicit ctx: Context): Boolean = {
def recur(tp: Type): Boolean = tp.stripTypeVar match {
case SkolemType(tp) =>
@@ -404,7 +404,7 @@ class TypeApplications(val self: Type) extends AnyVal {
case _ =>
false
}
- recur(self)
+ recur(self)
}
/** Given a type alias
@@ -548,4 +548,4 @@ class TypeApplications(val self: Type) extends AnyVal {
else if (typeParams.nonEmpty) p(EtaExpand) || tryLift(self.baseClasses)
else tryLift(self.baseClasses)
}
-} \ No newline at end of file
+}
diff --git a/src/dotty/tools/dotc/core/TypeComparer.scala b/src/dotty/tools/dotc/core/TypeComparer.scala
index 1687d6159..a59a64a91 100644
--- a/src/dotty/tools/dotc/core/TypeComparer.scala
+++ b/src/dotty/tools/dotc/core/TypeComparer.scala
@@ -163,7 +163,7 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling wi
// Dealiasing is taken care of elsewhere.
val pre1 = tp1.prefix
val pre2 = tp2.prefix
- isSameType(pre1, pre2) ||
+ isSameType(pre1, pre2) ||
sym1.isClass &&
pre2.classSymbol.exists &&
pre2.abstractTypeMembers.isEmpty &&
@@ -176,11 +176,11 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling wi
!tp1.isInstanceOf[WithFixedSym] &&
!tp2.isInstanceOf[WithFixedSym]
) ||
- compareHK(tp1, tp2, inOrder = true) ||
+ compareHK(tp1, tp2, inOrder = true) ||
compareHK(tp2, tp1, inOrder = false) ||
compareAlias(tp1.info)
case _ =>
- compareHK(tp2, tp1, inOrder = false) ||
+ compareHK(tp2, tp1, inOrder = false) ||
compareAlias(NoType)
}
}
@@ -312,7 +312,7 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling wi
}
fourthTry(tp1, tp2)
}
-
+
private def thirdTry(tp1: Type, tp2: Type): Boolean = tp2 match {
case tp2: NamedType =>
thirdTryNamed(tp1, tp2)
@@ -453,7 +453,7 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling wi
case JavaArrayType(elem2) => isSubType(elem1, elem2)
case _ => tp2 isRef ObjectClass
}
- compareJavaArray
+ compareJavaArray
case _ =>
false
}
@@ -518,7 +518,7 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling wi
false
} else isSubType(tp1, tp2)
- /** Does type `tp1` have a member with name `name` whose normalized type is a subtype of
+ /** Does type `tp1` have a member with name `name` whose normalized type is a subtype of
* the normalized type of the refinement `tp2`?
* Normalization is as follows: If `tp2` contains a skolem to its refinement type,
* rebase both itself and the member info of `tp` on a freshly created skolem type.
@@ -552,16 +552,16 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling wi
}
/** Skip refinements in `tp2` which match corresponding refinements in `tp1`.
- * "Match" means:
- * - they appear in the same order,
- * - they refine the same names,
- * - the refinement in `tp1` is an alias type, and
+ * "Match" means:
+ * - they appear in the same order,
+ * - they refine the same names,
+ * - the refinement in `tp1` is an alias type, and
* - neither refinement refers back to the refined type via a refined this.
* @return The parent type of `tp2` after skipping the matching refinements.
*/
private def skipMatching(tp1: Type, tp2: RefinedType): Type = tp1 match {
case tp1 @ RefinedType(parent1, name1)
- if name1 == tp2.refinedName &&
+ if name1 == tp2.refinedName &&
tp1.refinedInfo.isInstanceOf[TypeAlias] &&
!tp2.refinementRefersToThis &&
!tp1.refinementRefersToThis =>
@@ -672,7 +672,7 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling wi
case _: PolyType =>
false
case tp2: MethodType =>
- relaxed && tp2.paramNames.isEmpty &&
+ relaxed && tp2.paramNames.isEmpty &&
matchesType(tp1, tp2.resultType, relaxed)
case tp2 =>
relaxed || isSameType(tp1, tp2)
@@ -1118,15 +1118,15 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling wi
def copyIn(ctx: Context) = new TypeComparer(ctx)
// ----------- Diagnostics --------------------------------------------------
-
+
/** A hook for showing subtype traces. Overridden in ExplainingTypeComparer */
def traceIndented[T](str: String)(op: => T): T = op
-
+
private def traceInfo(tp1: Type, tp2: Type) =
s"${tp1.show} <:< ${tp2.show}" + {
if (ctx.settings.verbose.value || Config.verboseExplainSubtype) {
- s" ${tp1.getClass}, ${tp2.getClass}" +
- (if (frozenConstraint) " frozen" else "") +
+ s" ${tp1.getClass}, ${tp2.getClass}" +
+ (if (frozenConstraint) " frozen" else "") +
(if (ctx.mode is Mode.TypevarsMissContext) " tvars-miss-ctx" else "")
}
else ""
diff --git a/src/dotty/tools/dotc/core/TypeErasure.scala b/src/dotty/tools/dotc/core/TypeErasure.scala
index a45e29287..20cf816c2 100644
--- a/src/dotty/tools/dotc/core/TypeErasure.scala
+++ b/src/dotty/tools/dotc/core/TypeErasure.scala
@@ -95,7 +95,7 @@ object TypeErasure {
def erasure(tp: Type)(implicit ctx: Context): Type = scalaErasureFn(tp)(erasureCtx)
def semiErasure(tp: Type)(implicit ctx: Context): Type = semiErasureFn(tp)(erasureCtx)
def sigName(tp: Type, isJava: Boolean)(implicit ctx: Context): TypeName = {
- val seqClass = if(isJava) defn.ArrayClass else defn.SeqClass
+ val seqClass = if (isJava) defn.ArrayClass else defn.SeqClass
val normTp =
if (tp.isRepeatedParam) tp.translateParameterized(defn.RepeatedParamClass, seqClass)
else tp
diff --git a/src/dotty/tools/dotc/core/TypeOps.scala b/src/dotty/tools/dotc/core/TypeOps.scala
index e6a81248c..2b6ea49e8 100644
--- a/src/dotty/tools/dotc/core/TypeOps.scala
+++ b/src/dotty/tools/dotc/core/TypeOps.scala
@@ -86,7 +86,7 @@ trait TypeOps { this: Context => // TODO: Make standalone object.
class SimplifyMap extends TypeMap {
def apply(tp: Type) = simplify(tp, this)
}
-
+
/** Approximate union type by intersection of its dominators.
* See Type#approximateUnion for an explanation.
*/
@@ -330,7 +330,7 @@ trait TypeOps { this: Context => // TODO: Make standalone object.
}
parentRefs
}
-
+
/** An argument bounds violation is a triple consisting of
* - the argument tree
* - a string "upper" or "lower" indicating which bound is violated
diff --git a/src/dotty/tools/dotc/core/TyperState.scala b/src/dotty/tools/dotc/core/TyperState.scala
index 1079af510..91cda1dd8 100644
--- a/src/dotty/tools/dotc/core/TyperState.scala
+++ b/src/dotty/tools/dotc/core/TyperState.scala
@@ -17,7 +17,7 @@ class TyperState(r: Reporter) extends DotClass with Showable {
def reporter = r
/** The current constraint set */
- def constraint: Constraint =
+ def constraint: Constraint =
new OrderingConstraint(SimpleMap.Empty, SimpleMap.Empty, SimpleMap.Empty)
def constraint_=(c: Constraint): Unit = {}
diff --git a/src/dotty/tools/dotc/core/Types.scala b/src/dotty/tools/dotc/core/Types.scala
index 31567fee0..e6235695e 100644
--- a/src/dotty/tools/dotc/core/Types.scala
+++ b/src/dotty/tools/dotc/core/Types.scala
@@ -76,7 +76,7 @@ object Types {
val uniqId = {
nextId = nextId + 1
-// if(nextId == 19555)
+// if (nextId == 19555)
// println("foo")
nextId
}
@@ -471,7 +471,7 @@ object Types {
go(bounds.hi)
case _ =>
go(next)
- }
+ }
}
def goAnd(l: Type, r: Type) = go(l) & (go(r), pre)
def goOr(l: Type, r: Type) = go(l) | (go(r), pre)
@@ -603,9 +603,9 @@ object Types {
* and matching result types after renaming corresponding parameter types
* if the method types are dependent.
* - Or both types are =:=-equivalent
- * - Or phase.erasedTypes is false, and neither type takes
+ * - Or phase.erasedTypes is false, and neither type takes
* term or type parameters.
- *
+ *
* (*) when matching with a Java method, we also regard Any and Object as equivalent
* parameter types.
*/
@@ -777,9 +777,9 @@ object Types {
* to just U. Does not perform the reduction if the resulting type would contain
* a reference to the "this" of the current refined type. But does follow
* aliases in order to avoid such references. Example:
- *
+ *
* Lambda$I { type $hk$Arg0 = String, type Apply = Lambda$I{...}.$hk$Arg0 } # Apply
- *
+ *
* Here, the refinement for `Apply` has a refined this node, yet dereferencing ones more
* yields `String` as the result of lookupRefined.
*/
@@ -788,7 +788,7 @@ object Types {
case pre: RefinedType =>
if (pre.refinedName ne name) loop(pre.parent)
else pre.refinedInfo match {
- case TypeAlias(tp) =>
+ case TypeAlias(tp) =>
if (!pre.refinementRefersToThis) tp
else tp match {
case TypeRef(SkolemType(`pre`), alias) => lookupRefined(alias)
@@ -1158,7 +1158,7 @@ object Types {
private[this] var lastDenotation: Denotation = _
private[this] var lastSymbol: Symbol = _
private[this] var checkedPeriod = Nowhere
-
+
// Invariants:
// (1) checkedPeriod != Nowhere => lastDenotation != null
// (2) lastDenotation != null => lastSymbol != null
@@ -1286,7 +1286,7 @@ object Types {
checkSymAssign(denot.symbol)
// additional checks that intercept `denot` can be added here
-
+
lastDenotation = denot
lastSymbol = denot.symbol
}
@@ -1737,10 +1737,10 @@ object Types {
extends CachedProxyType with BindingType with ValueType {
val refinedInfo: Type
-
+
private var refinementRefersToThisCache: Boolean = _
private var refinementRefersToThisKnown: Boolean = false
-
+
def refinementRefersToThis(implicit ctx: Context): Boolean = {
if (!refinementRefersToThisKnown) {
refinementRefersToThisCache = refinedInfo.containsSkolemType(this)
@@ -1778,7 +1778,7 @@ object Types {
&& !parent.isLambda)
derivedRefinedType(parent.EtaExpand, refinedName, refinedInfo)
else
- if (false) RefinedType(parent, refinedName, refinedInfo)
+ if (false) RefinedType(parent, refinedName, refinedInfo)
else RefinedType(parent, refinedName, rt => refinedInfo.substSkolem(this, SkolemType(rt)))
}
@@ -1929,11 +1929,11 @@ object Types {
def isJava = false
def isImplicit = false
-
+
private val resType = resultTypeExp(this)
assert(resType.exists)
-
- override def resultType(implicit ctx: Context): Type =
+
+ override def resultType(implicit ctx: Context): Type =
if (dependencyStatus == FalseDeps) { // dealias all false dependencies
val dealiasMap = new TypeMap {
def apply(tp: Type) = tp match {
@@ -1949,28 +1949,28 @@ object Types {
else resType
var myDependencyStatus: DependencyStatus = Unknown
-
+
private def combine(x: DependencyStatus, y: DependencyStatus): DependencyStatus = {
val status = (x & StatusMask) max (y & StatusMask)
val provisional = (x | y) & Provisional
(if (status == TrueDeps) status else status | provisional).toByte
}
-
+
/** The dependency status of this method. Some examples:
- *
+ *
* class C extends { type S; type T = String }
* def f(x: C)(y: Boolean) // dependencyStatus = NoDeps
* def f(x: C)(y: x.S) // dependencyStatus = TrueDeps
- * def f(x: C)(y: x.T) // dependencyStatus = FalseDeps, i.e.
+ * def f(x: C)(y: x.T) // dependencyStatus = FalseDeps, i.e.
* // dependency can be eliminated by dealiasing.
*/
private def dependencyStatus(implicit ctx: Context): DependencyStatus = {
if (myDependencyStatus != Unknown) myDependencyStatus
else {
val isDepAcc = new TypeAccumulator[DependencyStatus] {
- def apply(x: DependencyStatus, tp: Type) =
+ def apply(x: DependencyStatus, tp: Type) =
if (x == TrueDeps) x
- else
+ else
tp match {
case MethodParam(`thisMethodType`, _) => TrueDeps
case tp @ TypeRef(MethodParam(`thisMethodType`, _), name) =>
@@ -1992,7 +1992,7 @@ object Types {
* which cannot be eliminated by de-aliasing?
*/
def isDependent(implicit ctx: Context): Boolean = dependencyStatus == TrueDeps
-
+
protected def computeSignature(implicit ctx: Context): Signature =
resultSignature.prepend(paramTypes, isJava)
@@ -2071,7 +2071,7 @@ object Types {
object MethodType extends MethodTypeCompanion {
def apply(paramNames: List[TermName], paramTypes: List[Type])(resultTypeExp: MethodType => Type)(implicit ctx: Context) =
unique(new CachedMethodType(paramNames, paramTypes)(resultTypeExp))
-
+
private type DependencyStatus = Byte
private final val Unknown: DependencyStatus = 0 // not yet computed
private final val NoDeps: DependencyStatus = 1 // no dependent parameters found
@@ -2116,7 +2116,7 @@ object Types {
val paramBounds = paramBoundsExp(this)
val resType = resultTypeExp(this)
-
+
override def resultType(implicit ctx: Context) = resType
protected def computeSignature(implicit ctx: Context) = resultSignature
@@ -2234,7 +2234,7 @@ object Types {
type BT = Type
override def underlying(implicit ctx: Context) = binder
def copyBoundType(bt: BT) = SkolemType(bt)
-
+
// need to customize hashCode and equals to prevent infinite recursion for
// refinements that refer to the refinement type via this
override def computeHash = addDelta(binder.identityHash, 41)
@@ -2263,7 +2263,7 @@ object Types {
* @param owningTree The function part of the TypeApply tree tree that introduces
* the type variable.
* @paran owner The current owner if the context where the variable was created.
- *
+ *
* `owningTree` and `owner` are used to determine whether a type-variable can be instantiated
* at some given point. See `Inferencing#interpolateUndetVars`.
*/
@@ -2599,7 +2599,7 @@ object Types {
if ((annot eq this.annot) && (tpe eq this.tpe)) this
else AnnotatedType(annot, tpe)
- override def stripTypeVar(implicit ctx: Context): Type =
+ override def stripTypeVar(implicit ctx: Context): Type =
derivedAnnotatedType(annot, tpe.stripTypeVar)
override def stripAnnots(implicit ctx: Context): Type = tpe.stripAnnots
}
@@ -2871,7 +2871,7 @@ object Types {
protected def applyToAnnot(x: T, annot: Annotation): T = x // don't go into annotations
protected var variance = 1
-
+
protected def applyToPrefix(x: T, tp: NamedType) = {
val saved = variance
variance = 0
@@ -2879,7 +2879,7 @@ object Types {
variance = saved
result
}
-
+
def foldOver(x: T, tp: Type): T = tp match {
case tp: TypeRef =>
if (stopAtStatic && tp.symbol.isStatic) x
@@ -3073,7 +3073,7 @@ object Types {
// ----- Debug ---------------------------------------------------------
var debugTrace = false
-
+
val watchList = List[String](
) map (_.toTypeName)
diff --git a/src/dotty/tools/dotc/core/Uniques.scala b/src/dotty/tools/dotc/core/Uniques.scala
index fcf2df30b..c24b0cabc 100644
--- a/src/dotty/tools/dotc/core/Uniques.scala
+++ b/src/dotty/tools/dotc/core/Uniques.scala
@@ -124,4 +124,4 @@ object Uniques {
}
}
}
-} \ No newline at end of file
+}
diff --git a/src/dotty/tools/dotc/core/pickling/AbstractFileReader.scala b/src/dotty/tools/dotc/core/pickling/AbstractFileReader.scala
index 5de7ab0cc..dbde8548f 100644
--- a/src/dotty/tools/dotc/core/pickling/AbstractFileReader.scala
+++ b/src/dotty/tools/dotc/core/pickling/AbstractFileReader.scala
@@ -60,13 +60,13 @@ class AbstractFileReader(val file: AbstractFile) {
/** extract a character at position bp from buf
*/
def getChar(mybp: Int): Char =
- (((buf(mybp) & 0xff) << 8) + (buf(mybp+1) & 0xff)).toChar
+ (((buf(mybp) & 0xff) << 8) + (buf(mybp + 1) & 0xff)).toChar
/** extract an integer at position bp from buf
*/
def getInt(mybp: Int): Int =
- ((buf(mybp ) & 0xff) << 24) + ((buf(mybp+1) & 0xff) << 16) +
- ((buf(mybp+2) & 0xff) << 8) + (buf(mybp+3) & 0xff)
+ ((buf(mybp ) & 0xff) << 24) + ((buf(mybp + 1) & 0xff) << 16) +
+ ((buf(mybp + 2) & 0xff) << 8) + (buf(mybp + 3) & 0xff)
/** extract a long integer at position bp from buf
*/
diff --git a/src/dotty/tools/dotc/core/pickling/ByteCodecs.scala b/src/dotty/tools/dotc/core/pickling/ByteCodecs.scala
index 0cffe43bc..8b3e49bd0 100644
--- a/src/dotty/tools/dotc/core/pickling/ByteCodecs.scala
+++ b/src/dotty/tools/dotc/core/pickling/ByteCodecs.scala
@@ -193,7 +193,7 @@ object ByteCodecs {
/**
* Destructively decodes array xs and returns the length of the decoded array.
*
- * Sometimes returns (length+1) of the decoded array. Example:
+ * Sometimes returns (length + 1) of the decoded array. Example:
*
* scala> val enc = reflect.generic.ByteCodecs.encode(Array(1,2,3))
* enc: Array[Byte] = Array(2, 5, 13, 1)
diff --git a/src/dotty/tools/dotc/core/pickling/ClassfileParser.scala b/src/dotty/tools/dotc/core/pickling/ClassfileParser.scala
index 52ea7ba38..3d47678b7 100644
--- a/src/dotty/tools/dotc/core/pickling/ClassfileParser.scala
+++ b/src/dotty/tools/dotc/core/pickling/ClassfileParser.scala
@@ -92,10 +92,6 @@ class ClassfileParser(
if (c != classRoot.symbol) mismatchError(c)
}
- if(classRoot.symbol.id == 4812) {
- println("bar")
- }
-
addEnclosingTParams()
if (unpickleOrParseInnerClasses()) return
@@ -139,7 +135,7 @@ class ClassfileParser(
if (companionClassMethod.exists) companionClassMethod.entered
val companionModuleMethod = ctx.synthesizeCompanionMethod(nme.COMPANION_MODULE_METHOD, moduleRoot, classRoot)
if (companionModuleMethod.exists) companionModuleMethod.entered
-
+
setClassInfo(classRoot, classInfo)
setClassInfo(moduleRoot, staticInfo)
}
@@ -590,7 +586,7 @@ class ClassfileParser(
val targs = tparams.map(_.typeRef)
val paramNames = attrs.map(_.name.asTermName)
val paramTypes = attrs.map(_.info.resultType)
-
+
def addConstr(ptypes: List[Type]) = {
val mtype = MethodType(paramNames, ptypes, classRoot.typeRef.appliedTo(targs))
val constrType = if (tparams.isEmpty) mtype else TempPolyType(tparams, mtype)
@@ -606,15 +602,15 @@ class ClassfileParser(
addDefaultGetter(attr, i)
}
}
-
+
addConstr(paramTypes)
if (paramTypes.nonEmpty)
paramTypes.last match {
- case defn.ArrayType(elemtp) =>
- addConstr(paramTypes.init :+ defn.RepeatedParamType.appliedTo(elemtp))
+ case defn.ArrayType(elemtp) =>
+ addConstr(paramTypes.init :+ defn.RepeatedParamType.appliedTo(elemtp))
case _ =>
}
-
+
}
}
@@ -739,7 +735,7 @@ class ClassfileParser(
def originalName = pool.getName(name)
override def toString =
- originalName + " in " + outerName + "(" + externalName +")"
+ originalName + " in " + outerName + "(" + externalName + ")"
}
object innerClasses extends scala.collection.mutable.HashMap[Name, InnerClassEntry] {
diff --git a/src/dotty/tools/dotc/core/pickling/DottyUnpickler.scala b/src/dotty/tools/dotc/core/pickling/DottyUnpickler.scala
index 84a9a1744..64be68975 100644
--- a/src/dotty/tools/dotc/core/pickling/DottyUnpickler.scala
+++ b/src/dotty/tools/dotc/core/pickling/DottyUnpickler.scala
@@ -23,14 +23,14 @@ class DottyUnpickler(bytes: Array[Byte]) {
private val unpickler = new TastyUnpickler(bytes)
private val treeUnpickler = unpickler.unpickle(new TreeSectionUnpickler).get
-
+
/** Enter all toplevel classes and objects into their scopes
* @param roots a set of SymDenotations that should be overwritten by unpickling
*/
- def enter(roots: Set[SymDenotation])(implicit ctx: Context): Unit =
+ def enter(roots: Set[SymDenotation])(implicit ctx: Context): Unit =
treeUnpickler.enterTopLevel(roots)
-
- /** The unpickled trees
+
+ /** The unpickled trees
* @param readPositions if true, trees get decorated with position information.
*/
def body(readPositions: Boolean = false)(implicit ctx: Context): List[Tree] = {
@@ -44,7 +44,7 @@ class DottyUnpickler(bytes: Array[Byte]) {
def unpickle(reader: TastyReader, tastyName: TastyName.Table) =
new TreeUnpickler(reader, tastyName)
}
-
+
private class PositionsSectionUnpickler extends SectionUnpickler[(Position, AddrToPosition)]("Positions") {
def unpickle(reader: TastyReader, tastyName: TastyName.Table) =
new PositionUnpickler(reader).unpickle()
diff --git a/src/dotty/tools/dotc/core/pickling/NameBuffer.scala b/src/dotty/tools/dotc/core/pickling/NameBuffer.scala
index a60767fe6..2a6239c5a 100644
--- a/src/dotty/tools/dotc/core/pickling/NameBuffer.scala
+++ b/src/dotty/tools/dotc/core/pickling/NameBuffer.scala
@@ -24,14 +24,14 @@ class NameBuffer extends TastyBuffer(100000) {
ref
}
def nameIndex(name: Name): NameRef = {
- val tname =
+ val tname =
if (name.isShadowedName) Shadowed(nameIndex(name.revertShadowed))
else Simple(name.toTermName)
nameIndex(tname)
}
-
+
def nameIndex(str: String): NameRef = nameIndex(str.toTermName)
-
+
def fullNameIndex(name: Name): NameRef = {
val pos = name.lastIndexOf('.')
if (pos > 0)
@@ -39,7 +39,7 @@ class NameBuffer extends TastyBuffer(100000) {
else
nameIndex(name)
}
-
+
private def withLength(op: => Unit): Unit = {
val lengthAddr = currentAddr
writeByte(0)
@@ -48,12 +48,12 @@ class NameBuffer extends TastyBuffer(100000) {
assert(length < 128)
putNat(lengthAddr, length, 1)
}
-
+
def writeNameRef(ref: NameRef) = writeNat(ref.index)
-
+
def pickleName(name: TastyName): Unit = name match {
- case Simple(name) =>
- val bytes =
+ case Simple(name) =>
+ val bytes =
if (name.length == 0) new Array[Byte](0)
else Codec.toUTF8(chrs, name.start, name.length)
writeByte(UTF8)
@@ -62,7 +62,7 @@ class NameBuffer extends TastyBuffer(100000) {
case Qualified(qualified, selector) =>
writeByte(QUALIFIED)
withLength { writeNameRef(qualified); writeNameRef(selector) }
- case Signed(original, params, result) =>
+ case Signed(original, params, result) =>
writeByte(SIGNED)
withLength { writeNameRef(original); writeNameRef(result); params.foreach(writeNameRef) }
case Expanded(prefix, original) =>
@@ -81,7 +81,7 @@ class NameBuffer extends TastyBuffer(100000) {
writeByte(SHADOWED)
withLength { writeNameRef(original) }
}
-
+
override def assemble(): Unit = {
var i = 0
for ((name, ref) <- nameRefs) {
diff --git a/src/dotty/tools/dotc/core/pickling/PickleBuffer.scala b/src/dotty/tools/dotc/core/pickling/PickleBuffer.scala
index 06d02d888..33ba4439b 100644
--- a/src/dotty/tools/dotc/core/pickling/PickleBuffer.scala
+++ b/src/dotty/tools/dotc/core/pickling/PickleBuffer.scala
@@ -69,7 +69,7 @@ class PickleBuffer(data: Array[Byte], from: Int, to: Int) {
def patchNat(pos: Int, x: Int): Unit = {
def patchNatPrefix(x: Int): Unit = {
writeByte(0)
- Array.copy(bytes, pos, bytes, pos+1, writeIndex - (pos+1))
+ Array.copy(bytes, pos, bytes, pos + 1, writeIndex - (pos + 1))
bytes(pos) = ((x & 0x7f) | 0x80).toByte
val y = x >>> 7
if (y != 0) patchNatPrefix(y)
diff --git a/src/dotty/tools/dotc/core/pickling/PickleFormat.scala b/src/dotty/tools/dotc/core/pickling/PickleFormat.scala
index 872265e2d..d12a879ba 100644
--- a/src/dotty/tools/dotc/core/pickling/PickleFormat.scala
+++ b/src/dotty/tools/dotc/core/pickling/PickleFormat.scala
@@ -29,7 +29,7 @@ Macro-format:
Section = NameRef Length Bytes
Length = Nat // length of rest of entry in bytes
- Name = UTF8 Length UTF8-CodePoint*
+ Name = UTF8 Length UTF8-CodePoint*
QUALIFIED Length qualified_NameRef selector_NameRef
SIGNED Length original_NameRef resultSig_NameRef paramSig_NameRef*
EXPANDED Length original_NameRef
@@ -95,7 +95,7 @@ Standard-Section: "ASTs" TopLevelStat*
TYPEAPPLY Length fn_Term arg_Type*
CaseDef = CASEDEF Length pat_Term rhs_Tree guard_Tree?
ImplicitArg = IMPLICITARG arg_Term
- ASTRef = Nat // byte position in AST payload
+ ASTRef = Nat // byte position in AST payload
Path = Constant
TERMREFdirect sym_ASTRef
@@ -146,10 +146,10 @@ Standard-Section: "ASTs" TopLevelStat*
NameType = paramName_NameRef typeOrBounds_ASTRef
Modifier = PRIVATE
- INTERNAL // package private
+ INTERNAL // package private
PROTECTED
- PRIVATEqualified qualifier_Type // will be dropped
- PROTECTEDqualified qualifier_Type // will be dropped
+ PRIVATEqualified qualifier_Type // will be dropped
+ PROTECTEDqualified qualifier_Type // will be dropped
ABSTRACT
FINAL
SEALED
@@ -158,20 +158,20 @@ Standard-Section: "ASTs" TopLevelStat*
LAZY
OVERRIDE
INLINE // macro
- ABSOVERRIDE // abstract override
- STATIC // mapped to static Java member
- OBJECT // an object or its class
+ ABSOVERRIDE // abstract override
+ STATIC // mapped to static Java member
+ OBJECT // an object or its class
TRAIT // a trait
- LOCAL // private[this] or protected[this]
- SYNTHETIC // generated by Scala compiler
- ARTIFACT // to be tagged Java Synthetic
- MUTABLE // a var
- LABEL // method generated as a label
- FIELDaccessor // getter or setter
- CASEaccessor // getter for case class param
- COVARIANT // type param marked “+”
- CONTRAVARIANT // type param marked “-”
- SCALA2X // Imported from Scala2.x
+ LOCAL // private[this] or protected[this]
+ SYNTHETIC // generated by Scala compiler
+ ARTIFACT // to be tagged Java Synthetic
+ MUTABLE // a var
+ LABEL // method generated as a label
+ FIELDaccessor // getter or setter
+ CASEaccessor // getter for case class param
+ COVARIANT // type param marked “+”
+ CONTRAVARIANT // type param marked “-”
+ SCALA2X // Imported from Scala2.x
DEFAULTparameterized // Method with default params
INSUPERCALL // defined in the argument of a constructor supercall
Annotation
@@ -199,7 +199,7 @@ Standard Section: "Positions" sourceLength_Nat Assoc*
// Difference of end offset of addressed node vs parent node.
// Offsets and addresses are difference encoded.
// Nodes which have the same positions as their parents are omitted.
- Delta = Int // Difference between consecutive offsets / tree addresses,
+ Delta = Int // Difference between consecutive offsets / tree addresses,
**************************************************************************************/
diff --git a/src/dotty/tools/dotc/core/pickling/PositionPickler.scala b/src/dotty/tools/dotc/core/pickling/PositionPickler.scala
index 8ee70719e..e8a0b3d01 100644
--- a/src/dotty/tools/dotc/core/pickling/PositionPickler.scala
+++ b/src/dotty/tools/dotc/core/pickling/PositionPickler.scala
@@ -13,13 +13,13 @@ import TastyBuffer._
import util.Positions._
object PositionPickler {
-
+
trait DeferredPosition {
var parentPos: Position = NoPosition
}
- def traverse(x: Any, parentPos: Position, op: (Tree, Position) => Unit)(implicit ctx: Context): Unit =
- if (parentPos.exists)
+ def traverse(x: Any, parentPos: Position, op: (Tree, Position) => Unit)(implicit ctx: Context): Unit =
+ if (parentPos.exists)
x match {
case x: Tree @unchecked =>
op(x, parentPos)
@@ -33,7 +33,7 @@ object PositionPickler {
case xs: TraversableOnce[_] =>
xs.foreach(traverse(_, parentPos, op))
case _ =>
- }
+ }
}
import PositionPickler._
@@ -41,18 +41,18 @@ class PositionPickler(pickler: TastyPickler, addrOfTree: Tree => Option[Addr]) {
val buf = new TastyBuffer(100000)
pickler.newSection("Positions", buf)
import buf._
-
+
def picklePositions(roots: List[Tree], totalRange: Position)(implicit ctx: Context) = {
var lastIndex = 0
def record(tree: Tree, parentPos: Position): Unit =
if (tree.pos.exists) {
def msg = s"failure to pickle $tree at ${tree.pos}, parent = $parentPos"
- val endPos = tree.pos.end min parentPos.end
+ val endPos = tree.pos.end min parentPos.end
// end positions can be larger than their parents
// e.g. in the case of synthetic empty ranges, which are placed at the next token after
// the current construct.
val endDelta = endPos - parentPos.end
- val startPos =
+ val startPos =
if (endDelta == 0) tree.pos.start max parentPos.start else tree.pos.start min endPos
// Since end positions are corrected above, start positions have to follow suit.
val startDelta = startPos - parentPos.start
@@ -68,8 +68,8 @@ class PositionPickler(pickler: TastyPickler, addrOfTree: Tree => Option[Addr]) {
assert(startDelta >= 0, msg)
}
}
-
+
buf.writeNat(totalRange.end)
traverse(roots, totalRange, record)
}
-} \ No newline at end of file
+}
diff --git a/src/dotty/tools/dotc/core/pickling/PositionUnpickler.scala b/src/dotty/tools/dotc/core/pickling/PositionUnpickler.scala
index 4d06cf792..cfcc4a835 100644
--- a/src/dotty/tools/dotc/core/pickling/PositionUnpickler.scala
+++ b/src/dotty/tools/dotc/core/pickling/PositionUnpickler.scala
@@ -24,14 +24,14 @@ class PositionUnpickler(reader: TastyReader) {
while (!isAtEnd) {
val delta1 = readDelta()
val delta2 = readDelta()
- val (startDelta, endDelta, indexDelta) =
+ val (startDelta, endDelta, indexDelta) =
if (delta2 <= 0) (delta1, -delta2, readDelta())
else if (delta1 < 0) (0, -delta1, delta2)
else (delta1, 0, delta2)
- positions(curIndex) = Position(startDelta, endDelta, startDelta)
+ positions(curIndex) = Position(startDelta, endDelta, startDelta)
// make non-synthetic position; will be made synthetic by normalization.
curIndex += indexDelta
}
(Position(0, sourceLength), positions)
}
-} \ No newline at end of file
+}
diff --git a/src/dotty/tools/dotc/core/pickling/TastyBuffer.scala b/src/dotty/tools/dotc/core/pickling/TastyBuffer.scala
index 9197a2acc..a67722227 100644
--- a/src/dotty/tools/dotc/core/pickling/TastyBuffer.scala
+++ b/src/dotty/tools/dotc/core/pickling/TastyBuffer.scala
@@ -6,19 +6,19 @@ package pickling
import util.Util.dble
object TastyBuffer {
-
+
/** The number of digits of the natural number `nat`, written in base 128 format. */
- def natSize(nat: Int): Int =
+ def natSize(nat: Int): Int =
if (nat < 128) 1 else natSize(nat >>> 7) + 1
/** An address pointing to an index in a Tasty buffer's byte array */
case class Addr(val index: Int) extends AnyVal {
- def -(delta: Int): Addr = Addr(this.index - delta)
- def +(delta: Int): Addr = Addr(this.index + delta)
-
+ def - (delta: Int): Addr = Addr(this.index - delta)
+ def + (delta: Int): Addr = Addr(this.index + delta)
+
def relativeTo(base: Addr): Addr = this - base.index - AddrWidth
}
-
+
val NoAddr = Addr(-1)
/** The maximal number of address bytes.
@@ -33,13 +33,13 @@ import TastyBuffer._
* and that supports reading and patching addresses represented as natural numbers.
*/
class TastyBuffer(initialSize: Int) {
-
+
/** The current byte array, will be expanded as needed */
var bytes = new Array[Byte](initialSize)
-
+
/** The number of bytes written */
var length = 0
-
+
// -- Output routines --------------------------------------------
/** Write a byte of data. */
@@ -48,7 +48,7 @@ class TastyBuffer(initialSize: Int) {
bytes(length) = b.toByte
length += 1
}
-
+
/** Write the first `n` bytes of `data`. */
def writeBytes(data: Array[Byte], n: Int): Unit = {
while (bytes.length < length + n) bytes = dble(bytes)
@@ -61,13 +61,13 @@ class TastyBuffer(initialSize: Int) {
*/
def writeNat(x: Int): Unit =
writeLongNat(x.toLong & 0x00000000FFFFFFFFL)
-
+
/** Write a natural number in 2's complement big endian format, base 128.
* All but the last digits have bit 0x80 set.
*/
- def writeInt(x: Int): Unit =
+ def writeInt(x: Int): Unit =
writeLongInt(x)
-
+
/**
* Like writeNat, but for longs. Note that the
* binary representation of LongNat is identical to Nat
@@ -84,7 +84,7 @@ class TastyBuffer(initialSize: Int) {
if (y != 0L) writePrefix(y)
writeByte(((x & 0x7f) | 0x80).toInt)
}
-
+
/** Like writeInt, but for longs */
def writeLongInt(x: Long): Unit = {
def writePrefix(x: Long): Unit = {
@@ -94,22 +94,22 @@ class TastyBuffer(initialSize: Int) {
}
val y = x >> 7
if (y != 0L - ((x >> 6) & 1)) writePrefix(y)
- writeByte(((x & 0x7f) | 0x80).toInt)
+ writeByte(((x & 0x7f) | 0x80).toInt)
}
-
+
/** Write an uncompressed Long stored in 8 bytes in big endian format */
def writeUncompressedLong(x: Long): Unit = {
var y = x
val bytes = new Array[Byte](8)
for (i <- 7 to 0 by -1) {
bytes(i) = (y & 0xff).toByte
- y = y >>> 8
+ y = y >>> 8
}
writeBytes(bytes, 8)
}
// -- Address handling --------------------------------------------
-
+
/** Write natural number `x` right-adjusted in a field of `width` bytes
* starting with address `at`.
*/
@@ -125,10 +125,10 @@ class TastyBuffer(initialSize: Int) {
}
assert(y == 0, s"number $x too large to fit in $width bytes")
}
-
+
/** The byte at given address */
def getByte(at: Addr): Int = bytes(at.index)
-
+
/** The natural number at address `at` */
def getNat(at: Addr): Int = getLongNat(at).toInt
@@ -148,8 +148,8 @@ class TastyBuffer(initialSize: Int) {
/** The address (represented as a natural number) at address `at` */
def getAddr(at: Addr) = Addr(getNat(at))
- /** The smallest address equal to or following `at` which points to a non-zero byte */
- final def skipZeroes(at: Addr): Addr =
+ /** The smallest address equal to or following `at` which points to a non-zero byte */
+ final def skipZeroes(at: Addr): Addr =
if (getByte(at) != 0) at else skipZeroes(at + 1)
/** The address after the natural number found at address `at`. */
@@ -160,21 +160,21 @@ class TastyBuffer(initialSize: Int) {
/** The address referring to the end of data written so far */
def currentAddr: Addr = Addr(length)
-
+
/** Reserve `AddrWidth` bytes to write an address into */
def reserveAddr(): Addr = {
val result = currentAddr
length += AddrWidth
result
}
-
+
/** Fill reserved space at address `at` with address `target` */
- def fillAddr(at: Addr, target: Addr) =
+ def fillAddr(at: Addr, target: Addr) =
putNat(at, target.index, AddrWidth)
-
+
/** Write address without leading zeroes */
def writeAddr(addr: Addr): Unit = writeNat(addr.index)
-
+
// -- Finalization --------------------------------------------
/** Hook to be overridden in subclasses.
diff --git a/src/dotty/tools/dotc/core/pickling/TastyName.scala b/src/dotty/tools/dotc/core/pickling/TastyName.scala
index e8f30a234..e47ff9fc4 100644
--- a/src/dotty/tools/dotc/core/pickling/TastyName.scala
+++ b/src/dotty/tools/dotc/core/pickling/TastyName.scala
@@ -9,22 +9,22 @@ import collection.mutable
abstract class TastyName
object TastyName {
-
+
case class NameRef(val index: Int) extends AnyVal
-
+
case class Simple(name: TermName) extends TastyName
case class Qualified(qualified: NameRef, selector: NameRef) extends TastyName
- case class Signed(original: NameRef, params: List[NameRef], result: NameRef) extends TastyName
+ case class Signed(original: NameRef, params: List[NameRef], result: NameRef) extends TastyName
case class Expanded(prefix: NameRef, original: NameRef) extends TastyName
case class ModuleClass(module: NameRef) extends TastyName
case class SuperAccessor(accessed: NameRef) extends TastyName
case class DefaultGetter(method: NameRef, num: Int) extends TastyName
case class Shadowed(original: NameRef) extends TastyName
-
+
class Table extends (NameRef => TastyName) {
private val names = new mutable.ArrayBuffer[TastyName]
def add(name: TastyName) = names += name
def apply(ref: NameRef) = names(ref.index)
def contents: Iterable[TastyName] = names
}
-}
+}
diff --git a/src/dotty/tools/dotc/core/pickling/TastyPickler.scala b/src/dotty/tools/dotc/core/pickling/TastyPickler.scala
index f998cf377..6bd6f1c44 100644
--- a/src/dotty/tools/dotc/core/pickling/TastyPickler.scala
+++ b/src/dotty/tools/dotc/core/pickling/TastyPickler.scala
@@ -9,9 +9,9 @@ import TastyBuffer._
import java.util.UUID
class TastyPickler {
-
+
private val sections = new mutable.ArrayBuffer[(TastyName.NameRef, TastyBuffer)]
-
+
private val headerBuffer = {
val buf = new TastyBuffer(24)
for (ch <- header) buf.writeByte(ch.toByte)
@@ -24,17 +24,17 @@ class TastyPickler {
}
val nameBuffer = new NameBuffer
-
- def newSection(name: String, buf: TastyBuffer) =
+
+ def newSection(name: String, buf: TastyBuffer) =
sections += ((nameBuffer.nameIndex(name), buf))
-
+
def assembleParts(): Array[Byte] = {
def lengthWithLength(buf: TastyBuffer) = {
buf.assemble()
buf.length + natSize(buf.length)
}
- val totalSize =
- headerBuffer.length +
+ val totalSize =
+ headerBuffer.length +
lengthWithLength(nameBuffer) + {
for ((nameRef, buf) <- sections) yield
natSize(nameRef.index) + lengthWithLength(buf)
diff --git a/src/dotty/tools/dotc/core/pickling/TastyPrinter.scala b/src/dotty/tools/dotc/core/pickling/TastyPrinter.scala
index a3d30b9b2..9d07fc5da 100644
--- a/src/dotty/tools/dotc/core/pickling/TastyPrinter.scala
+++ b/src/dotty/tools/dotc/core/pickling/TastyPrinter.scala
@@ -12,14 +12,14 @@ import util.Positions.{Position, offsetToInt}
import collection.mutable
class TastyPrinter(bytes: Array[Byte])(implicit ctx: Context) {
-
+
val unpickler = new TastyUnpickler(bytes)
import unpickler.{tastyName, unpickle}
-
+
def nameToString(name: TastyName): String = name match {
case Simple(name) => name.toString
case Qualified(qual, name) => nameRefToString(qual) + "." + nameRefToString(name)
- case Signed(original, params, result) =>
+ case Signed(original, params, result) =>
i"${nameRefToString(original)}@${params.map(nameRefToString)}%,%:${nameRefToString(result)}"
case Expanded(prefix, original) => s"$prefix${nme.EXPAND_SEPARATOR}$original"
case ModuleClass(original) => nameRefToString(original) + "/MODULECLASS"
@@ -27,13 +27,13 @@ class TastyPrinter(bytes: Array[Byte])(implicit ctx: Context) {
case DefaultGetter(meth, num) => nameRefToString(meth) + "/DEFAULTGETTER" + num
case Shadowed(original) => nameRefToString(original) + "/SHADOWED"
}
-
+
def nameRefToString(ref: NameRef): String = nameToString(tastyName(ref))
-
- def printNames() =
+
+ def printNames() =
for ((name, idx) <- tastyName.contents.zipWithIndex)
println(f"$idx%4d: " + nameToString(name))
-
+
def printContents(): Unit = {
println("Names:")
printNames()
@@ -41,7 +41,7 @@ class TastyPrinter(bytes: Array[Byte])(implicit ctx: Context) {
unpickle(new TreeSectionUnpickler)
unpickle(new PositionSectionUnpickler)
}
-
+
class TreeSectionUnpickler extends SectionUnpickler[Unit]("ASTs") {
import PickleFormat._
def unpickle(reader: TastyReader, tastyName: TastyName.Table): Unit = {
@@ -66,12 +66,12 @@ class TastyPrinter(bytes: Array[Byte])(implicit ctx: Context) {
tag match {
case RENAMED =>
printName(); printName()
- case VALDEF | DEFDEF | TYPEDEF | TYPEPARAM | PARAM | NAMEDARG | BIND =>
+ case VALDEF | DEFDEF | TYPEDEF | TYPEPARAM | PARAM | NAMEDARG | BIND =>
printName(); printTrees()
case REFINEDtype =>
printTree(); printName(); printTrees()
case RETURN =>
- printNat(); printTrees()
+ printNat(); printTrees()
case METHODtype | POLYtype =>
printTree()
until(end) { printName(); printTree() }
@@ -85,16 +85,16 @@ class TastyPrinter(bytes: Array[Byte])(implicit ctx: Context) {
goto(end)
}
}
- else if (tag >= firstNatASTTreeTag) {
+ else if (tag >= firstNatASTTreeTag) {
tag match {
case IDENT | SELECT | TERMREF | TYPEREF | SELFDEF => printName()
- case _ => printNat()
+ case _ => printNat()
}
printTree()
}
- else if (tag >= firstASTTreeTag)
+ else if (tag >= firstASTTreeTag)
printTree()
- else if (tag >= firstNatTreeTag)
+ else if (tag >= firstNatTreeTag)
tag match {
case TERMREFpkg | TYPEREFpkg | STRINGconst | IMPORTED => printName()
case _ => printNat()
@@ -119,4 +119,4 @@ class TastyPrinter(bytes: Array[Byte])(implicit ctx: Context) {
for ((addr, pos) <- sorted) println(s"${addr.index}: ${offsetToInt(pos.start)} .. ${pos.end}")
}
}
-} \ No newline at end of file
+}
diff --git a/src/dotty/tools/dotc/core/pickling/TastyReader.scala b/src/dotty/tools/dotc/core/pickling/TastyReader.scala
index 0385e9adb..35724e557 100644
--- a/src/dotty/tools/dotc/core/pickling/TastyReader.scala
+++ b/src/dotty/tools/dotc/core/pickling/TastyReader.scala
@@ -10,49 +10,49 @@ import collection.mutable
/** A byte array buffer that can be filled with bytes or natural numbers in TASTY format,
* and that supports reading and patching addresses represented as natural numbers.
- *
+ *
* @param bytes The array containing data
* @param from The position from which to read
* @param end The position one greater than the last byte to be read
- * @param base The index referenced by the logical zero address Addr(0)
+ * @param base The index referenced by the logical zero address Addr(0)
*/
class TastyReader(val bytes: Array[Byte], start: Int, end: Int, val base: Int = 0) {
-
+
def this(bytes: Array[Byte]) = this(bytes, 0, bytes.length)
-
+
private var bp: Int = start
-
+
def addr(idx: Int) = Addr(idx - base)
def index(addr: Addr) = addr.index + base
-
+
/** The address of the first byte to read, respectively byte that was read */
def startAddr: Addr = addr(start)
-
+
/** The address of the next byte to read */
def currentAddr: Addr = addr(bp)
-
+
/** the address one greater than the last brte to read */
def endAddr: Addr = addr(end)
-
+
/** Have all bytes been read? */
def isAtEnd: Boolean = bp == end
-
+
/** A new reader over the same array with the same address base, but with
* specified start and end positions
*/
- def subReader(start: Addr, end: Addr): TastyReader =
+ def subReader(start: Addr, end: Addr): TastyReader =
new TastyReader(bytes, index(start), index(end), base)
-
+
/** Read a byte of data. */
def readByte(): Int = {
val result = bytes(bp) & 0xff
bp += 1
result
}
-
- /** Returns the next byte of data as a natural number without advancing the read position */
+
+ /** Returns the next byte of data as a natural number without advancing the read position */
def nextByte: Int = bytes(bp) & 0xff
-
+
/** Read the next `n` bytes of `data`. */
def readBytes(n: Int): Array[Byte] = {
val result = new Array[Byte](n)
@@ -65,12 +65,12 @@ class TastyReader(val bytes: Array[Byte], start: Int, end: Int, val base: Int =
* All but the last digits have bit 0x80 set.
*/
def readNat(): Int = readLongNat.toInt
-
+
/** Read an integer number in 2's complement big endian format, base 128.
* All but the last digits have bit 0x80 set.
*/
def readInt(): Int = readLongInt.toInt
-
+
/** Read a natural number fitting in a Long in big endian format, base 128.
* All but the last digits have bit 0x80 set.
*/
@@ -84,7 +84,7 @@ class TastyReader(val bytes: Array[Byte], start: Int, end: Int, val base: Int =
} while ((b & 0x80) == 0)
x
}
-
+
/** Read a long integer number in 2's complement big endian format, base 128. */
def readLongInt(): Long = {
var b = bytes(bp)
@@ -95,9 +95,9 @@ class TastyReader(val bytes: Array[Byte], start: Int, end: Int, val base: Int =
x = (x << 7) | (b & 0x7f)
bp += 1
}
- x
+ x
}
-
+
/** Read an uncompressed Long stored in 8 bytes in big endian format */
def readUncompressedLong(): Long = {
var x = 0
@@ -105,22 +105,22 @@ class TastyReader(val bytes: Array[Byte], start: Int, end: Int, val base: Int =
x = (x << 8) | (readByte() & 0xff)
x
}
-
+
/** Read a natural number and return as a NameRef */
def readNameRef() = NameRef(readNat())
-
- /** Read a natural number and return as an address */
+
+ /** Read a natural number and return as an address */
def readAddr() = Addr(readNat())
-
+
/** Read a length number and return the absolute end address implied by it,
* given as <address following length field> + <length-value-read>.
*/
def readEnd(): Addr = addr(readNat() + bp)
-
+
/** Set read position to the one pointed to by `addr` */
- def goto(addr: Addr): Unit =
+ def goto(addr: Addr): Unit =
bp = index(addr)
-
+
/** Perform `op` until `end` address is reached and collect results in a list. */
def until[T](end: Addr)(op: => T): List[T] = {
val buf = new mutable.ListBuffer[T]
diff --git a/src/dotty/tools/dotc/core/pickling/TastyUnpickler.scala b/src/dotty/tools/dotc/core/pickling/TastyUnpickler.scala
index b5e978afa..5fbb85768 100644
--- a/src/dotty/tools/dotc/core/pickling/TastyUnpickler.scala
+++ b/src/dotty/tools/dotc/core/pickling/TastyUnpickler.scala
@@ -7,9 +7,9 @@ import PickleFormat._
import Names.{Name, termName}
import java.util.UUID
-object TastyUnpickler {
+object TastyUnpickler {
class UnpickleException(msg: String) extends Exception(msg)
-
+
abstract class SectionUnpickler[R](val name: String) {
def unpickle(reader: TastyReader, tastyName: TastyName.Table): R
}
@@ -19,28 +19,28 @@ import TastyUnpickler._
class TastyUnpickler(reader: TastyReader) {
import reader._
-
+
def this(bytes: Array[Byte]) = this(new TastyReader(bytes))
-
+
private val sectionReader = new mutable.HashMap[String, TastyReader]
val tastyName = new TastyName.Table
-
- def check(cond: Boolean, msg: => String) =
+
+ def check(cond: Boolean, msg: => String) =
if (!cond) throw new UnpickleException(msg)
-
+
def readString(): String = {
val TastyName.Simple(name) = tastyName(readNameRef())
name.toString
}
-
+
def readName(): TastyName = {
import TastyName._
- val tag = readByte()
+ val tag = readByte()
val length = readNat()
val start = currentAddr
val end = start + length
val result = tag match {
- case UTF8 =>
+ case UTF8 =>
goto(end)
Simple(termName(bytes, start.index, length))
case QUALIFIED =>
@@ -64,21 +64,21 @@ class TastyUnpickler(reader: TastyReader) {
assert(currentAddr == end, s"bad name $result $start $currentAddr $end")
result
}
-
+
private def readHeader(): UUID = {
for (i <- 0 until header.length)
check(readByte() == header(i), "not a TASTy file")
val major = readNat()
val minor = readNat()
- check(major == MajorVersion && minor <= MinorVersion,
+ check(major == MajorVersion && minor <= MinorVersion,
s"""TASTy signature has wrong version.
| expected: $MajorVersion.$MinorVersion
| found : $major.$minor""".stripMargin)
new UUID(readUncompressedLong(), readUncompressedLong())
}
-
+
val uuid = readHeader()
-
+
locally {
until(readEnd()) { tastyName.add(readName()) }
while (!isAtEnd) {
@@ -88,8 +88,8 @@ class TastyUnpickler(reader: TastyReader) {
goto(secEnd)
}
}
-
- def unpickle[R](sec: SectionUnpickler[R]): Option[R] =
+
+ def unpickle[R](sec: SectionUnpickler[R]): Option[R] =
for (reader <- sectionReader.get(sec.name)) yield
sec.unpickle(reader, tastyName)
}
diff --git a/src/dotty/tools/dotc/core/pickling/TreeBuffer.scala b/src/dotty/tools/dotc/core/pickling/TreeBuffer.scala
index c1eae5014..c224fc30b 100644
--- a/src/dotty/tools/dotc/core/pickling/TreeBuffer.scala
+++ b/src/dotty/tools/dotc/core/pickling/TreeBuffer.scala
@@ -11,20 +11,20 @@ import ast.tpd.Tree
class TreeBuffer extends TastyBuffer(1000000) {
private final val ItemsOverOffsets = 2
-
+
private val initialOffsetSize = bytes.length / (AddrWidth * ItemsOverOffsets)
private var offsets = new Array[Int](initialOffsetSize)
private var isRelative = new Array[Boolean](initialOffsetSize)
private var delta: Array[Int] = _
private var numOffsets = 0
- private[pickling] val pickledTrees = new java.util.IdentityHashMap[Tree, Any] // Value type is really Addr, but that's not compatible with null
-
+ private[pickling] val pickledTrees = new java.util.IdentityHashMap[Tree, Any] // Value type is really Addr, but that's not compatible with null
+
def addrOfTree(tree: Tree): Option[Addr] = pickledTrees.get(tree) match {
case null => None
case n => Some(n.asInstanceOf[Addr])
}
-
+
private def offset(i: Int): Addr = Addr(offsets(i))
private def keepOffset(relative: Boolean): Unit = {
@@ -36,7 +36,7 @@ class TreeBuffer extends TastyBuffer(1000000) {
isRelative(numOffsets) = relative
numOffsets += 1
}
-
+
/** Reserve space for a reference, to be adjusted later */
def reserveRef(relative: Boolean): Addr = {
val addr = currentAddr
@@ -50,19 +50,19 @@ class TreeBuffer extends TastyBuffer(1000000) {
keepOffset(relative = false)
fillAddr(reserveAddr(), target)
}
-
+
/** Fill previously reserved field with a reference */
def fillRef(at: Addr, target: Addr, relative: Boolean) = {
val addr = if (relative) target.relativeTo(at) else target
fillAddr(at, addr)
}
-
+
/** The amount by which the bytes at the given address are shifted under compression */
def deltaAt(at: Addr): Int = {
val idx = bestFit(offsets, numOffsets, at.index - 1)
if (idx < 0) 0 else delta(idx)
}
-
+
/** The address to which `x` is translated under compression */
def adjusted(x: Addr): Addr = x - deltaAt(x)
@@ -77,11 +77,11 @@ class TreeBuffer extends TastyBuffer(1000000) {
val skippedCount = skippedOff.index - off.index
assert(skippedCount < AddrWidth, s"unset field at position $off")
lastDelta += skippedCount
- delta(i) = lastDelta
+ delta(i) = lastDelta
i += 1
}
}
-
+
/** The absolute or relative adjusted address at index `i` of `offsets` array*/
private def adjustedOffset(i: Int): Addr = {
val at = offset(i)
@@ -90,12 +90,12 @@ class TreeBuffer extends TastyBuffer(1000000) {
val start = skipNat(at)
val len1 = original + delta(i) - deltaAt(original + start.index)
val len2 = adjusted(original + start.index) - adjusted(start).index
- assert(len1 == len2,
+ assert(len1 == len2,
s"adjusting offset #$i: $at, original = $original, len1 = $len1, len2 = $len2")
len1
} else adjusted(original)
}
-
+
/** Adjust all offsets according to previously computed deltas */
private def adjustOffsets(): Unit = {
for (i <- 0 until numOffsets) {
@@ -103,7 +103,7 @@ class TreeBuffer extends TastyBuffer(1000000) {
fillAddr(offset(i), corrected)
}
}
-
+
/** Adjust deltas to also take account references that will shrink (and thereby
* generate additional zeroes that can be skipped) due to previously
* computed adjustements.
@@ -118,13 +118,13 @@ class TreeBuffer extends TastyBuffer(1000000) {
delta1(i) = lastDelta
i += 1
}
- val saved =
+ val saved =
if (numOffsets == 0) 0
else delta1(numOffsets - 1) - delta(numOffsets - 1)
delta = delta1
saved
}
-
+
/** Compress pickle buffer, shifting bytes to close all skipped zeroes. */
private def compress(): Int = {
var lastDelta = 0
@@ -147,7 +147,7 @@ class TreeBuffer extends TastyBuffer(1000000) {
length -= lastDelta
wasted
}
-
+
def adjustPickledTrees(): Unit = {
val it = pickledTrees.keySet.iterator
while (it.hasNext) {
@@ -155,7 +155,7 @@ class TreeBuffer extends TastyBuffer(1000000) {
pickledTrees.put(tree, adjusted(pickledTrees.get(tree).asInstanceOf[Addr]))
}
}
-
+
/** Final assembly, involving the following steps:
* - compute deltas
* - adjust deltas until additional savings are < 1% of total
diff --git a/src/dotty/tools/dotc/core/pickling/TreePickler.scala b/src/dotty/tools/dotc/core/pickling/TreePickler.scala
index 85addc563..53dd34094 100644
--- a/src/dotty/tools/dotc/core/pickling/TreePickler.scala
+++ b/src/dotty/tools/dotc/core/pickling/TreePickler.scala
@@ -33,7 +33,7 @@ class TreePickler(pickler: TastyPickler) {
}
def preRegister(tree: Tree)(implicit ctx: Context): Unit = tree match {
- case tree: MemberDef =>
+ case tree: MemberDef =>
if (!symRefs.contains(tree.symbol)) symRefs(tree.symbol) = NoAddr
case _ =>
}
@@ -54,13 +54,13 @@ class TreePickler(pickler: TastyPickler) {
val Signature(params, result) = sig
pickleName(TastyName.Signed(nameIndex(name), params.map(fullNameIndex), fullNameIndex(result)))
}
-
+
private def pickleName(sym: Symbol)(implicit ctx: Context): Unit =
- if (sym is Flags.ExpandedName)
+ if (sym is Flags.ExpandedName)
pickleName(TastyName.Expanded(
nameIndex(sym.name.expandedPrefix), nameIndex(sym.name.unexpandedName)))
else pickleName(sym.name)
-
+
private def pickleSymRef(sym: Symbol)(implicit ctx: Context) = symRefs.get(sym) match {
case Some(label) =>
if (label != NoAddr) writeRef(label) else pickleForwardSymRef(sym)
@@ -68,20 +68,20 @@ class TreePickler(pickler: TastyPickler) {
ctx.log(i"pickling reference to as yet undefined $sym in ${sym.owner}", sym.pos)
pickleForwardSymRef(sym)
}
-
+
private def pickleForwardSymRef(sym: Symbol)(implicit ctx: Context) = {
val ref = reserveRef(relative = false)
assert(!sym.is(Flags.Package), sym)
- forwardSymRefs(sym) = ref :: forwardSymRefs.getOrElse(sym, Nil)
+ forwardSymRefs(sym) = ref :: forwardSymRefs.getOrElse(sym, Nil)
}
-
+
private def isLocallyDefined(sym: Symbol)(implicit ctx: Context) = symRefs.get(sym) match {
case Some(label) => assert(sym.exists); label != NoAddr
case None => false
}
def pickle(trees: List[Tree])(implicit ctx: Context) = {
-
+
def qualifiedName(sym: Symbol): TastyName =
if (sym.isRoot || sym.owner.isRoot) TastyName.Simple(sym.name.toTermName)
else TastyName.Qualified(nameIndex(qualifiedName(sym.owner)), nameIndex(sym.name))
@@ -141,9 +141,9 @@ class TreePickler(pickler: TastyPickler) {
println(i"error when pickling type $tpe0")
throw ex
}
-
+
def pickleNewType(tpe: Type, richTypes: Boolean): Unit = try { tpe match {
- case ConstantType(value) =>
+ case ConstantType(value) =>
pickleConstant(value)
case tpe: TypeRef if tpe.info.isAlias && tpe.symbol.is(Flags.AliasPreferred) =>
pickleType(tpe.info.bounds.hi)
@@ -152,12 +152,12 @@ class TreePickler(pickler: TastyPickler) {
if (sym.is(Flags.Package)) {
writeByte(if (tpe.isType) TYPEREFpkg else TERMREFpkg)
pickleName(qualifiedName(sym))
- }
+ }
else {
assert(tpe.prefix == NoPrefix)
def pickleRef() = {
writeByte(if (tpe.isType) TYPEREFdirect else TERMREFdirect)
- pickleSymRef(sym)
+ pickleSymRef(sym)
}
if (sym is Flags.BindDefinedType) {
registerDef(sym)
@@ -175,14 +175,14 @@ class TreePickler(pickler: TastyPickler) {
pickleNameAndSig(tpe.name, tpe.signature); pickleType(tpe.prefix)
case tpe: NamedType =>
if (tpe.name == tpnme.Apply && tpe.prefix.argInfos.nonEmpty && tpe.prefix.isInstantiatedLambda)
- // instantiated lambdas are pickled as APPLIEDTYPE; #Apply will
+ // instantiated lambdas are pickled as APPLIEDTYPE; #Apply will
// be reconstituted when unpickling.
pickleType(tpe.prefix)
else if (isLocallyDefined(tpe.symbol)) {
writeByte(if (tpe.isType) TYPEREFsymbol else TERMREFsymbol)
pickleSymRef(tpe.symbol); pickleType(tpe.prefix)
}
- else {
+ else {
writeByte(if (tpe.isType) TYPEREF else TERMREF)
pickleName(tpe.name); pickleType(tpe.prefix)
}
@@ -199,10 +199,10 @@ class TreePickler(pickler: TastyPickler) {
val args = tpe.argInfos(interpolate = false)
if (args.isEmpty) {
writeByte(REFINEDtype)
- withLength {
+ withLength {
pickleType(tpe.parent)
pickleName(tpe.refinedName)
- pickleType(tpe.refinedInfo, richTypes = true)
+ pickleType(tpe.refinedInfo, richTypes = true)
}
}
else {
@@ -211,8 +211,8 @@ class TreePickler(pickler: TastyPickler) {
}
case tpe: TypeAlias =>
writeByte(TYPEALIAS)
- withLength {
- pickleType(tpe.alias, richTypes)
+ withLength {
+ pickleType(tpe.alias, richTypes)
tpe.variance match {
case 1 => writeByte(COVARIANT)
case -1 => writeByte(CONTRAVARIANT)
@@ -237,7 +237,7 @@ class TreePickler(pickler: TastyPickler) {
case tpe: PolyType if richTypes =>
writeByte(POLYtype)
pickleMethodic(tpe.resultType, tpe.paramNames, tpe.paramBounds)
- case tpe: PolyParam =>
+ case tpe: PolyParam =>
if (!pickleParamType(tpe))
// TODO figure out why this case arises in e.g. pickling AbstractFileReader.
ctx.typerState.constraint.entry(tpe) match {
@@ -249,19 +249,19 @@ class TreePickler(pickler: TastyPickler) {
case tpe: LazyRef =>
pickleType(tpe.ref)
}} catch {
- case ex: AssertionError =>
+ case ex: AssertionError =>
println(i"error while pickling type $tpe")
throw ex
}
-
- def pickleMethodic(result: Type, names: List[Name], types: List[Type]) =
+
+ def pickleMethodic(result: Type, names: List[Name], types: List[Type]) =
withLength {
pickleType(result, richTypes = true)
(names, types).zipped.foreach { (name, tpe) =>
- pickleName(name); pickleType(tpe)
+ pickleName(name); pickleType(tpe)
}
}
-
+
def pickleParamType(tpe: ParamType): Boolean = {
val binder = pickledTypes.get(tpe.binder)
val pickled = binder != null
@@ -271,10 +271,10 @@ class TreePickler(pickler: TastyPickler) {
}
pickled
}
-
+
def pickleTpt(tpt: Tree): Unit = pickleType(tpt.tpe) // TODO correlate with original when generating positions
-
- def pickleTreeUnlessEmpty(tree: Tree): Unit =
+
+ def pickleTreeUnlessEmpty(tree: Tree): Unit =
if (!tree.isEmpty) pickleTree(tree)
def pickleTree(tree: Tree): Unit = try {
@@ -283,14 +283,14 @@ class TreePickler(pickler: TastyPickler) {
case Ident(name) =>
tree.tpe match {
case tp: TermRef => pickleType(tp)
- case _ =>
+ case _ =>
writeByte(IDENT)
pickleName(name)
pickleType(tree.tpe)
}
- case This(_) =>
+ case This(_) =>
pickleType(tree.tpe)
- case Select(qual, name) =>
+ case Select(qual, name) =>
writeByte(SELECT)
val realName = tree.tpe match {
case tp: NamedType if tp.name.isShadowedName => tp.name
@@ -321,7 +321,7 @@ class TreePickler(pickler: TastyPickler) {
}
case Super(qual, mix) =>
writeByte(SUPER)
- withLength {
+ withLength {
pickleTree(qual);
if (!mix.isEmpty) {
val SuperType(_, mixinType) = tree.tpe
@@ -350,12 +350,12 @@ class TreePickler(pickler: TastyPickler) {
case If(cond, thenp, elsep) =>
writeByte(IF)
withLength{ pickleTree(cond); pickleTree(thenp); pickleTree(elsep) }
- case Closure(env, meth, tpt) =>
+ case Closure(env, meth, tpt) =>
writeByte(LAMBDA)
assert(env.isEmpty)
- withLength{
+ withLength{
pickleTree(meth)
- if (tpt.tpe.exists) pickleTpt(tpt)
+ if (tpt.tpe.exists) pickleTpt(tpt)
}
case Match(selector, cases) =>
writeByte(MATCH)
@@ -383,14 +383,14 @@ class TreePickler(pickler: TastyPickler) {
withLength { alts.foreach(pickleTree) }
case UnApply(fun, implicits, patterns) =>
writeByte(UNAPPLY)
- withLength {
+ withLength {
pickleTree(fun)
for (implicitArg <- implicits) {
writeByte(IMPLICITarg)
pickleTree(implicitArg)
}
pickleType(tree.tpe)
- patterns.foreach(pickleTree)
+ patterns.foreach(pickleTree)
}
case tree: ValDef =>
pickleDef(VALDEF, tree.symbol, tree.tpt, tree.rhs)
@@ -400,7 +400,7 @@ class TreePickler(pickler: TastyPickler) {
for (vparams <- tree.vparamss) {
writeByte(PARAMS)
withLength { pickleParams(vparams) }
- }
+ }
}
pickleDef(DEFDEF, tree.symbol, tree.tpt, tree.rhs, pickleAllParams)
case tree: TypeDef =>
@@ -410,7 +410,7 @@ class TreePickler(pickler: TastyPickler) {
writeByte(TEMPLATE)
val (params, rest) = tree.body partition {
case stat: TypeDef => stat.symbol is Flags.Param
- case stat: ValOrDefDef =>
+ case stat: ValOrDefDef =>
stat.symbol.is(Flags.ParamAccessor) && !stat.symbol.isSetter
case _ => false
}
@@ -435,7 +435,7 @@ class TreePickler(pickler: TastyPickler) {
withLength {
pickleTree(expr)
selectors foreach {
- case Pair(Ident(from), Ident(to)) =>
+ case Pair(Ident(from), Ident(to)) =>
writeByte(RENAMED)
withLength { pickleName(from); pickleName(to) }
case Ident(name) =>
@@ -468,13 +468,13 @@ class TreePickler(pickler: TastyPickler) {
pickleModifiers(sym)
}
}
-
+
def pickleParam(tree: Tree): Unit = tree match {
case tree: ValDef => pickleDef(PARAM, tree.symbol, tree.tpt)
case tree: DefDef => pickleDef(PARAM, tree.symbol, tree.tpt, tree.rhs)
- case tree: TypeDef => pickleDef(TYPEPARAM, tree.symbol, tree.rhs)
+ case tree: TypeDef => pickleDef(TYPEPARAM, tree.symbol, tree.rhs)
}
-
+
def pickleParams(trees: List[Tree]): Unit = {
trees.foreach(preRegister)
trees.foreach(pickleParam)
@@ -504,7 +504,7 @@ class TreePickler(pickler: TastyPickler) {
if (flags is Local) writeByte(LOCAL)
if (flags is Synthetic) writeByte(SYNTHETIC)
if (flags is Artifact) writeByte(ARTIFACT)
- if (flags is Scala2x) writeByte(SCALA2X)
+ if (flags is Scala2x) writeByte(SCALA2X)
if (flags is InSuperCall) writeByte(INSUPERCALL)
if (sym.isTerm) {
if (flags is Implicit) writeByte(IMPLICIT)
@@ -512,18 +512,18 @@ class TreePickler(pickler: TastyPickler) {
if (flags is AbsOverride) writeByte(ABSOVERRIDE)
if (flags is Mutable) writeByte(MUTABLE)
if (flags is Accessor) writeByte(FIELDaccessor)
- if (flags is CaseAccessor) writeByte(CASEaccessor)
+ if (flags is CaseAccessor) writeByte(CASEaccessor)
if (flags is DefaultParameterized) writeByte(DEFAULTparameterized)
} else {
if (flags is Sealed) writeByte(SEALED)
- if (flags is Abstract) writeByte(ABSTRACT)
+ if (flags is Abstract) writeByte(ABSTRACT)
if (flags is Trait) writeByte(TRAIT)
if (flags is Covariant) writeByte(COVARIANT)
if (flags is Contravariant) writeByte(CONTRAVARIANT)
}
sym.annotations.foreach(pickleAnnotation)
}
-
+
def pickleAnnotation(ann: Annotation) = {
writeByte(ANNOTATION)
withLength { pickleType(ann.symbol.typeRef); pickleTree(ann.tree) }
diff --git a/src/dotty/tools/dotc/core/pickling/TreeUnpickler.scala b/src/dotty/tools/dotc/core/pickling/TreeUnpickler.scala
index c46ff8d6f..a58fc9071 100644
--- a/src/dotty/tools/dotc/core/pickling/TreeUnpickler.scala
+++ b/src/dotty/tools/dotc/core/pickling/TreeUnpickler.scala
@@ -24,7 +24,7 @@ class TreeUnpickler(reader: TastyReader, tastyName: TastyName.Table) {
import dotty.tools.dotc.core.pickling.PickleFormat._
import TastyName._
import tpd._
-
+
private var readPositions = false
private var totalRange = NoPosition
private var positions: collection.Map[Addr, Position] = _
@@ -40,12 +40,12 @@ class TreeUnpickler(reader: TastyReader, tastyName: TastyName.Table) {
this.totalRange = totalRange
this.positions = positions
}
-
+
private val symAtAddr = new mutable.HashMap[Addr, Symbol]
private val treeAtAddr = new mutable.HashMap[Addr, Tree]
private val typeAtAddr = new mutable.HashMap[Addr, Type] // currently populated only for types that are known to be SHAREd.
- private var stubs: Set[Symbol] = Set()
-
+ private var stubs: Set[Symbol] = Set()
+
private var roots: Set[SymDenotation] = null
/** Enter all toplevel classes and objects into their scopes
@@ -64,13 +64,13 @@ class TreeUnpickler(reader: TastyReader, tastyName: TastyName.Table) {
normalizePos(stats, totalRange)
stats
}
-
+
def toTermName(tname: TastyName): TermName = tname match {
case Simple(name) => name
case Qualified(qual, name) => toTermName(qual) ++ "." ++ toTermName(name)
case Signed(original, params, result) => toTermName(original)
case Shadowed(original) => toTermName(original).shadowedName
- case Expanded(prefix, original) => toTermName(original).expandedName(toTermName(prefix))
+ case Expanded(prefix, original) => toTermName(original).expandedName(toTermName(prefix))
case ModuleClass(original) => toTermName(original).moduleClassName.toTermName
case SuperAccessor(accessed) => ???
case DefaultGetter(meth, num) => ???
@@ -88,17 +88,17 @@ class TreeUnpickler(reader: TastyReader, tastyName: TastyName.Table) {
class TreeReader(val reader: TastyReader) {
import reader._
-
+
def forkAt(start: Addr) = new TreeReader(subReader(start, endAddr))
def fork = forkAt(currentAddr)
-
+
def skipTree(tag: Int): Unit =
if (tag >= firstLengthTreeTag) goto(readEnd())
else if (tag >= firstNatASTTreeTag) { readNat(); skipTree() }
else if (tag >= firstASTTreeTag) skipTree()
else if (tag >= firstNatTreeTag) readNat()
def skipTree(): Unit = skipTree(readByte())
-
+
def skipParams(): Unit =
while (nextByte == PARAMS || nextByte == TYPEPARAM) skipTree()
@@ -112,7 +112,7 @@ class TreeUnpickler(reader: TastyReader, tastyName: TastyName.Table) {
}
else tag
}
-
+
def readName(): TermName = toTermName(readNameRef())
def readNameSplitSig()(implicit ctx: Context): Any /* TermName | (TermName, Signature) */ =
@@ -124,19 +124,19 @@ class TreeUnpickler(reader: TastyReader, tastyName: TastyName.Table) {
case name =>
toTermName(name)
}
-
+
// ------ Reading types -----------------------------------------------------
-
+
/** Read names in an interleaved sequence of (parameter) names and types/bounds */
- def readParamNames[N <: Name](end: Addr): List[N] =
- until(end) {
+ def readParamNames[N <: Name](end: Addr): List[N] =
+ until(end) {
val name = readName().asInstanceOf[N]
- skipTree()
+ skipTree()
name
}
/** Read types or bounds in an interleaved sequence of (parameter) names and types/bounds */
- def readParamTypes[T <: Type](end: Addr)(implicit ctx: Context): List[T] =
+ def readParamTypes[T <: Type](end: Addr)(implicit ctx: Context): List[T] =
until(end) { readNat(); readType().asInstanceOf[T] }
/** Read referece to definition and return symbol created at that definition */
@@ -165,17 +165,17 @@ class TreeUnpickler(reader: TastyReader, tastyName: TastyName.Table) {
typeAtAddr(start) = tp
op
}
-
+
def readLengthType(): Type = {
val end = readEnd()
-
+
def readNamesSkipParams[N <: Name]: (List[N], TreeReader) = {
val nameReader = fork
nameReader.skipTree() // skip result
val paramReader = nameReader.fork
(nameReader.readParamNames[N](end), paramReader)
}
-
+
val result =
(tag: @switch) match {
case SUPERtype =>
@@ -194,7 +194,7 @@ class TreeUnpickler(reader: TastyReader, tastyName: TastyName.Table) {
TypeBounds(readType(), readType())
case TYPEALIAS =>
val alias = readType()
- val variance =
+ val variance =
if (nextByte == COVARIANT) { readByte(); 1 }
else if (nextByte == CONTRAVARIANT) { readByte(); -1 }
else 0
@@ -235,9 +235,9 @@ class TreeUnpickler(reader: TastyReader, tastyName: TastyName.Table) {
assert(currentAddr == end, s"$start $currentAddr $end ${astTagToString(tag)}")
result
}
-
+
def readSimpleType(): Type = (tag: @switch) match {
- case TYPEREFdirect | TERMREFdirect =>
+ case TYPEREFdirect | TERMREFdirect =>
NamedType.withFixedSym(NoPrefix, readSymRef())
case TYPEREFsymbol | TERMREFsymbol =>
readSymNameRef()
@@ -287,10 +287,10 @@ class TreeUnpickler(reader: TastyReader, tastyName: TastyName.Table) {
case BYNAMEtype =>
ExprType(readType())
}
-
+
if (tag < firstLengthTreeTag) readSimpleType() else readLengthType()
}
-
+
private def readSymNameRef()(implicit ctx: Context): Type = {
val sym = readSymRef()
val prefix = readType()
@@ -300,17 +300,17 @@ class TreeUnpickler(reader: TastyReader, tastyName: TastyName.Table) {
// without this precaution we get an infinite cycle when unpickling pos/extmethods.scala
// the problem arises when a self type of a trait is a type parameter of the same trait.
case _ => res
- }
+ }
}
-
+
private def readPackageRef()(implicit ctx: Context): TermSymbol = {
val name = readName()
if (name == nme.ROOT) defn.RootPackage
else if (name == nme.EMPTY_PACKAGE) defn.EmptyPackageVal
else ctx.requiredPackage(name)
}
-
- def readTypeRef(): Type =
+
+ def readTypeRef(): Type =
typeAtAddr(readAddr())
def readPath()(implicit ctx: Context): Type = {
@@ -318,23 +318,23 @@ class TreeUnpickler(reader: TastyReader, tastyName: TastyName.Table) {
assert(tp.isInstanceOf[SingletonType])
tp
}
-
- def readTermRef()(implicit ctx: Context): TermRef =
+
+ def readTermRef()(implicit ctx: Context): TermRef =
readType().asInstanceOf[TermRef]
// ------ Reading definitions -----------------------------------------------------
-
- private def noRhs(end: Addr): Boolean =
+
+ private def noRhs(end: Addr): Boolean =
currentAddr == end || isModifierTag(nextByte)
-
+
private def localContext(owner: Symbol)(implicit ctx: Context) = {
val lctx = ctx.fresh.setOwner(owner)
if (owner.isClass) lctx.setScope(owner.unforcedDecls) else lctx.setNewScope
}
-
+
private def normalizeFlags(tag: Int, givenFlags: FlagSet, name: Name, isAbstractType: Boolean, rhsIsEmpty: Boolean)(implicit ctx: Context): FlagSet = {
val lacksDefinition =
- rhsIsEmpty &&
+ rhsIsEmpty &&
name.isTermName && !name.isConstructorName && !givenFlags.is(ParamOrAccessor) ||
isAbstractType
var flags = givenFlags
@@ -350,7 +350,7 @@ class TreeUnpickler(reader: TastyReader, tastyName: TastyName.Table) {
flags
}
- /** Create symbol of definition node and enter in symAtAddr map
+ /** Create symbol of definition node and enter in symAtAddr map
* @return true iff the definition does not contain initialization code
*/
def createSymbol()(implicit ctx: Context): Boolean = {
@@ -372,7 +372,7 @@ class TreeUnpickler(reader: TastyReader, tastyName: TastyName.Table) {
val expandedFlag = if (rawName.isInstanceOf[TastyName.Expanded]) ExpandedName else EmptyFlags
pickling.println(i"creating symbol $name at $start with flags $givenFlags")
val flags = normalizeFlags(tag, givenFlags | expandedFlag, name, isAbstractType, rhsIsEmpty)
- def adjustIfModule(completer: LazyType) =
+ def adjustIfModule(completer: LazyType) =
if (flags is Module) ctx.adjustModuleCompleter(completer, name) else completer
val sym =
roots.find(root => (root.owner eq ctx.owner) && root.name == name) match {
@@ -386,7 +386,7 @@ class TreeUnpickler(reader: TastyReader, tastyName: TastyName.Table) {
case _ =>
val completer = adjustIfModule(new Completer(subReader(start, end)))
if (isClass)
- ctx.newClassSymbol(ctx.owner, name.asTypeName, flags, completer,
+ ctx.newClassSymbol(ctx.owner, name.asTypeName, flags, completer,
privateWithin, coord = start.index)
else {
val sym = symAtAddr.get(start) match {
@@ -412,7 +412,7 @@ class TreeUnpickler(reader: TastyReader, tastyName: TastyName.Table) {
tag != VALDEF || rhsIsEmpty
}
- /** Read modifier list into triplet of flags, annotations and a privateWithin
+ /** Read modifier list into triplet of flags, annotations and a privateWithin
* boindary symbol.
*/
def readModifiers(end: Addr)(implicit ctx: Context): (FlagSet, List[Annotation], Symbol) = {
@@ -452,7 +452,7 @@ class TreeUnpickler(reader: TastyReader, tastyName: TastyName.Table) {
case SCALA2X => addFlag(Scala2x)
case DEFAULTparameterized => addFlag(DefaultParameterized)
case INSUPERCALL => addFlag(InSuperCall)
- case PRIVATEqualified =>
+ case PRIVATEqualified =>
readByte()
privateWithin = readType().typeSymbol
case PROTECTEDqualified =>
@@ -470,23 +470,23 @@ class TreeUnpickler(reader: TastyReader, tastyName: TastyName.Table) {
}
(flags, annots.toList, privateWithin)
}
-
+
/** Create symbols for a definitions in statement sequence between
* current address and `end`.
* @return true iff none of the statements contains initialization code
*/
def indexStats(end: Addr)(implicit ctx: Context): Boolean = {
- val noInitss =
- until(end) {
+ val noInitss =
+ until(end) {
nextByte match {
- case VALDEF | DEFDEF | TYPEDEF | TYPEPARAM | PARAM =>
- createSymbol()
- case IMPORT =>
+ case VALDEF | DEFDEF | TYPEDEF | TYPEPARAM | PARAM =>
+ createSymbol()
+ case IMPORT =>
skipTree()
true
- case PACKAGE =>
+ case PACKAGE =>
processPackage { (pid, end) => implicit ctx => indexStats(end) }
- case _ =>
+ case _ =>
skipTree()
false
}
@@ -505,13 +505,13 @@ class TreeUnpickler(reader: TastyReader, tastyName: TastyName.Table) {
val pid = ref(readTermRef()).asInstanceOf[RefTree]
op(pid, end)(localContext(pid.symbol.moduleClass))
}
-
+
/** Create symbols the longest consecutive sequence of parameters with given
* `tag starting at current address.
*/
def indexParams(tag: Int)(implicit ctx: Context) =
while (nextByte == tag) createSymbol()
-
+
/** Create symbols for all type and value parameters of template starting
* at current address.
*/
@@ -523,13 +523,13 @@ class TreeUnpickler(reader: TastyReader, tastyName: TastyName.Table) {
}
/** If definition was already read by a completer, return the previously read tree
- * or else read definition.
+ * or else read definition.
*/
def readIndexedDef()(implicit ctx: Context): Tree = treeAtAddr.remove(currentAddr) match {
case Some(tree) => skipTree(); tree
case none => readNewDef()
}
-
+
private def readNewDef()(implicit ctx: Context): Tree = {
val start = currentAddr
val sym = symAtAddr(start)
@@ -540,7 +540,7 @@ class TreeUnpickler(reader: TastyReader, tastyName: TastyName.Table) {
fork.indexParams(tag)
readIndexedParams(tag)
}
-
+
def readParamss(implicit ctx: Context): List[List[ValDef]] = {
collectWhile(nextByte == PARAMS) {
readByte()
@@ -548,19 +548,19 @@ class TreeUnpickler(reader: TastyReader, tastyName: TastyName.Table) {
readParams[ValDef](PARAM)
}
}
-
- def readRhs(implicit ctx: Context) =
+
+ def readRhs(implicit ctx: Context) =
if (noRhs(end)) EmptyTree
else readLater(end, rdr => ctx => rdr.readTerm()(ctx))
def localCtx = localContext(sym)
-
- def DefDef(tparams: List[TypeDef], vparamss: List[List[ValDef]], tpt: Tree) =
+
+ def DefDef(tparams: List[TypeDef], vparamss: List[List[ValDef]], tpt: Tree) =
ta.assignType(
untpd.DefDef(
sym.name.asTermName, tparams, vparamss, tpt, readRhs(localCtx)),
sym)
-
+
def ta = ctx.typeAssigner
val name = readName()
@@ -599,18 +599,18 @@ class TreeUnpickler(reader: TastyReader, tastyName: TastyName.Table) {
DefDef(Nil, Nil, TypeTree(info))
}
}
- val mods =
+ val mods =
if (sym.annotations.isEmpty) EmptyModifiers
else Modifiers(annotations = sym.annotations.map(_.tree))
tree.withMods(mods) // record annotations in tree so that tree positions can be filled in.
goto(end)
setPos(start, tree)
}
-
+
private def readTemplate(implicit ctx: Context): Template = {
val start = currentAddr
val cls = ctx.owner.asClass
- def setClsInfo(parents: List[TypeRef], selfType: Type) =
+ def setClsInfo(parents: List[TypeRef], selfType: Type) =
cls.info = ClassInfo(cls.owner.thisType, cls, parents, cls.unforcedDecls, selfType)
setClsInfo(Nil, NoType)
val localDummy = ctx.newLocalDummy(cls)
@@ -625,7 +625,7 @@ class TreeUnpickler(reader: TastyReader, tastyName: TastyName.Table) {
}
}
val parentRefs = ctx.normalizeToClassRefs(parents.map(_.tpe), cls, cls.unforcedDecls)
- val self =
+ val self =
if (nextByte == SELFDEF) {
readByte()
untpd.ValDef(readName(), readTpt(), EmptyTree).withType(NoType)
@@ -636,16 +636,16 @@ class TreeUnpickler(reader: TastyReader, tastyName: TastyName.Table) {
if (noInits) cls.setFlag(NoInits)
val constr = readIndexedDef().asInstanceOf[DefDef]
- def mergeTypeParamsAndAliases(tparams: List[TypeDef], stats: List[Tree]): (List[Tree], List[Tree]) =
+ def mergeTypeParamsAndAliases(tparams: List[TypeDef], stats: List[Tree]): (List[Tree], List[Tree]) =
(tparams, stats) match {
- case (tparam :: tparams1, (alias: TypeDef) :: stats1)
+ case (tparam :: tparams1, (alias: TypeDef) :: stats1)
if tparam.name == alias.name.expandedName(cls) =>
val (tas, stats2) = mergeTypeParamsAndAliases(tparams1, stats1)
(tparam :: alias :: tas, stats2)
case _ =>
(tparams, stats)
- }
-
+ }
+
val lazyStats = readLater(end, rdr => implicit ctx => {
val stats0 = rdr.readIndexedStats(localDummy, end)
val (tparamsAndAliases, stats) = mergeTypeParamsAndAliases(tparams, stats0)
@@ -655,29 +655,29 @@ class TreeUnpickler(reader: TastyReader, tastyName: TastyName.Table) {
untpd.Template(constr, parents, self, lazyStats)
.withType(localDummy.nonMemberTermRef))
}
-
+
def readIndexedStat(exprOwner: Symbol)(implicit ctx: Context): Tree = nextByte match {
- case TYPEDEF | VALDEF | DEFDEF | IMPORT =>
+ case TYPEDEF | VALDEF | DEFDEF | IMPORT =>
readIndexedDef()
- case IMPORT =>
+ case IMPORT =>
???
case PACKAGE =>
val start = currentAddr
processPackage { (pid, end) => implicit ctx =>
setPos(start, PackageDef(pid, readIndexedStats(exprOwner, end)(ctx)))
}
- case _ =>
+ case _ =>
readTerm()(ctx.withOwner(exprOwner))
}
-
+
def readIndexedStats(exprOwner: Symbol, end: Addr)(implicit ctx: Context): List[Tree] =
until(end)(readIndexedStat(exprOwner))
-
+
def readStats(exprOwner: Symbol, end: Addr)(implicit ctx: Context): List[Tree] = {
fork.indexStats(end)
readIndexedStats(exprOwner, end)
- }
-
+ }
+
def readIndexedParams[T <: MemberDef](tag: Int)(implicit ctx: Context): List[T] =
collectWhile(nextByte == tag) { readIndexedDef().asInstanceOf[T] }
@@ -702,7 +702,7 @@ class TreeUnpickler(reader: TastyReader, tastyName: TastyName.Table) {
untpd.Ident(readName()).withType(readType())
case SELECT =>
def readQual(name: Name) = {
- val localCtx =
+ val localCtx =
if (name == nme.CONSTRUCTOR) ctx.fresh.addMode(Mode.InSuperCall) else ctx
readTerm()(localCtx)
}
@@ -716,7 +716,7 @@ class TreeUnpickler(reader: TastyReader, tastyName: TastyName.Table) {
case name: Name => readRest(name, Signature.NotAMethod)
case (name: Name, sig: Signature) => readRest(name, sig)
}
-
+
case NEW =>
New(readTpt())
case _ =>
@@ -728,12 +728,12 @@ class TreeUnpickler(reader: TastyReader, tastyName: TastyName.Table) {
val result =
(tag: @switch) match {
- case SUPER =>
+ case SUPER =>
val qual = readTerm()
val mixClass = ifBefore(end)(readType().typeSymbol, NoSymbol)
val mixName = if (mixClass.exists) mixClass.name.asTypeName else tpnme.EMPTY
tpd.Super(qual, mixName, ctx.mode.is(Mode.InSuperCall), mixClass)
- case APPLY =>
+ case APPLY =>
val fn = readTerm()
val isJava = fn.tpe.isInstanceOf[JavaMethodType]
def readArg() = readTerm() match {
@@ -784,7 +784,7 @@ class TreeUnpickler(reader: TastyReader, tastyName: TastyName.Table) {
Alternative(until(end)(readTerm()))
case UNAPPLY =>
val fn = readTerm()
- val implicitArgs =
+ val implicitArgs =
collectWhile(nextByte == IMPLICITarg) {
readByte()
readTerm()
@@ -798,19 +798,19 @@ class TreeUnpickler(reader: TastyReader, tastyName: TastyName.Table) {
assert(currentAddr == end, s"$start $currentAddr $end ${astTagToString(tag)}")
result
}
-
+
val tree = if (tag < firstLengthTreeTag) readSimpleTerm() else readLengthTerm()
tree.overwriteType(tree.tpe.simplified)
setPos(start, tree)
}
-
+
def readTpt()(implicit ctx: Context) = {
val start = currentAddr
val tp = readType()
if (tp.exists) setPos(start, TypeTree(tp)) else EmptyTree
}
- def readCases(end: Addr)(implicit ctx: Context): List[CaseDef] =
+ def readCases(end: Addr)(implicit ctx: Context): List[CaseDef] =
collectWhile(nextByte == CASEDEF && currentAddr != end) { readCase()(ctx.fresh.setNewScope) }
def readCase()(implicit ctx: Context): CaseDef = {
@@ -822,18 +822,18 @@ class TreeUnpickler(reader: TastyReader, tastyName: TastyName.Table) {
val guard = ifBefore(end)(readTerm(), EmptyTree)
setPos(start, CaseDef(pat, guard, rhs))
}
-
+
def readLater[T <: AnyRef](end: Addr, op: TreeReader => Context => T): Trees.Lazy[T] = {
val localReader = fork
goto(end)
new LazyReader(localReader, op)
}
-
+
// ------ Hooks for positions ------------------------------------------------
-
- /** Record address from which tree was created as a temporary position in the tree.
+
+ /** Record address from which tree was created as a temporary position in the tree.
* The temporary position contains deltas relative to the position of the (as yet unknown)
- * parent node. It is marked as a non-synthetic source position.
+ * parent node. It is marked as a non-synthetic source position.
*/
def setPos[T <: Tree](addr: Addr, tree: T): T = {
if (readPositions)
@@ -841,13 +841,13 @@ class TreeUnpickler(reader: TastyReader, tastyName: TastyName.Table) {
tree
}
}
-
+
private def setNormalized(tree: Tree, parentPos: Position): Unit = {
assert(tree.pos.exists)
val absPos = Position(parentPos.start + offsetToInt(tree.pos.start), parentPos.end - tree.pos.end)
tree.setPosUnchecked(absPos)
}
-
+
def normalizePos(x: Any, parentPos: Position)(implicit ctx: Context): Unit =
traverse(x, parentPos, setNormalized)
@@ -857,10 +857,10 @@ class TreeUnpickler(reader: TastyReader, tastyName: TastyName.Table) {
val res = op(reader)(ctx.addMode(Mode.AllowDependentFunctions))
normalizePos(res, parentPos)
res
- }
+ }
}
-
- class LazyAnnotationReader(sym: Symbol, reader: TreeReader)
+
+ class LazyAnnotationReader(sym: Symbol, reader: TreeReader)
extends LazyAnnotation(sym) with DeferredPosition {
def complete(implicit ctx: Context) = {
val res = reader.readTerm()
diff --git a/src/dotty/tools/dotc/core/pickling/UnPickler.scala b/src/dotty/tools/dotc/core/pickling/UnPickler.scala
index a47b8bda2..3c3ec4a70 100644
--- a/src/dotty/tools/dotc/core/pickling/UnPickler.scala
+++ b/src/dotty/tools/dotc/core/pickling/UnPickler.scala
@@ -357,9 +357,9 @@ class UnPickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClassRoot:
val denot1 = denot.disambiguate(d => p(d.symbol))
val sym = denot1.symbol
if (denot.exists && !denot1.exists) { // !!!DEBUG
- val alts = denot.alternatives map (d => d+":"+d.info+"/"+d.signature)
+ val alts = denot.alternatives map (d => d + ":" + d.info + "/" + d.signature)
System.err.println(s"!!! disambiguation failure: $alts")
- val members = denot.alternatives.head.symbol.owner.info.decls.toList map (d => d+":"+d.info+"/"+d.signature)
+ val members = denot.alternatives.head.symbol.owner.info.decls.toList map (d => d + ":" + d.info + "/" + d.signature)
System.err.println(s"!!! all members: $members")
}
if (tag == EXTref) sym else sym.moduleClass
@@ -368,7 +368,7 @@ class UnPickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClassRoot:
def fromName(name: Name): Symbol = name.toTermName match {
case nme.ROOT => loadingMirror.RootClass
case nme.ROOTPKG => loadingMirror.RootPackage
- case _ =>
+ case _ =>
def declIn(owner: Symbol) = adjust(owner.info.decl(name))
val sym = declIn(owner)
if (sym.exists || owner.ne(defn.ObjectClass)) sym else declIn(defn.AnyClass)
@@ -532,7 +532,7 @@ class UnPickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClassRoot:
inforef = readNat()
pw
}
- // println("reading type for "+denot) // !!! DEBUG
+ // println("reading type for " + denot) // !!! DEBUG
val tp = at(inforef, readType)
denot match {
case denot: ClassDenotation =>
@@ -687,7 +687,7 @@ class UnPickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClassRoot:
if (decls.isEmpty) parent
else {
def addRefinement(tp: Type, sym: Symbol) = {
- def subst(info: Type, rt: RefinedType) =
+ def subst(info: Type, rt: RefinedType) =
if (clazz.isClass) info.substThis(clazz.asClass, SkolemType(rt))
else info // turns out some symbols read into `clazz` are not classes, not sure why this is the case.
RefinedType(tp, sym.name, subst(sym.info, _))
diff --git a/src/dotty/tools/dotc/parsing/JavaParsers.scala b/src/dotty/tools/dotc/parsing/JavaParsers.scala
index ab805f261..297f3c0f9 100644
--- a/src/dotty/tools/dotc/parsing/JavaParsers.scala
+++ b/src/dotty/tools/dotc/parsing/JavaParsers.scala
@@ -121,11 +121,11 @@ object JavaParsers {
case nil => (EmptyTree, nil)
}
var (constr1, stats1) = pullOutFirstConstr(stats)
- if(constr1 == EmptyTree) constr1 = makeConstructor(List(), tparams)
+ if (constr1 == EmptyTree) constr1 = makeConstructor(List(), tparams)
// A dummy first constructor is needed for Java classes so that the real constructors see the
// import of the companion object. The constructor has parameter of type Unit so no Java code
// can call it.
- if(needsDummyConstr) {
+ if (needsDummyConstr) {
stats1 = constr1 :: stats1
constr1 = makeConstructor(List(scalaDot(tpnme.Unit)), tparams, Flags.JavaDefined | Flags.PrivateLocal)
}
@@ -314,7 +314,7 @@ object JavaParsers {
/*
TypeDef(
Modifiers(Flags.JavaDefined | Flags.Deferred),
- typeName("_$"+(wildnum += 1)),
+ typeName("_$" +(wildnum += 1)),
List(),
TypeBoundsTree(lo, hi))
*/
@@ -579,9 +579,9 @@ object JavaParsers {
def varDecl(pos: Position, mods: Modifiers, tpt: Tree, name: TermName): ValDef = {
val tpt1 = optArrayBrackets(tpt)
if (in.token == EQUALS && !(mods is Flags.Param)) skipTo(COMMA, SEMI)
- val mods1 = if(mods is Flags.Final) mods else mods | Flags.Mutable
+ val mods1 = if (mods is Flags.Final) mods else mods | Flags.Mutable
atPos(pos) {
- ValDef(name, tpt1, if(mods is Flags.Param) EmptyTree else unimplementedExpr).withMods(mods1)
+ ValDef(name, tpt1, if (mods is Flags.Param) EmptyTree else unimplementedExpr).withMods(mods1)
}
}
diff --git a/src/dotty/tools/dotc/parsing/MarkupParsers.scala b/src/dotty/tools/dotc/parsing/MarkupParsers.scala
index 3afaf171c..f648b9e2c 100644
--- a/src/dotty/tools/dotc/parsing/MarkupParsers.scala
+++ b/src/dotty/tools/dotc/parsing/MarkupParsers.scala
@@ -394,7 +394,7 @@ object MarkupParsers {
op
}
if (parser.in.token != RBRACE)
- reportSyntaxError(" expected end of Scala "+kind)
+ reportSyntaxError(" expected end of Scala " + kind)
res
}
@@ -421,7 +421,9 @@ object MarkupParsers {
xSpaceOpt
val ts = new ArrayBuffer[Tree]
- val isEmptyTag = (ch == '/') && { nextch ; true }
+
+ val isEmptyTag = ch == '/'
+ if (isEmptyTag) nextch()
xToken('>')
if (!isEmptyTag) {
@@ -448,7 +450,7 @@ object MarkupParsers {
case _ => // text
appendText(Position(start1, curOffset, start1), ts, xText)
// here xEmbeddedBlock might be true:
- // if (xEmbeddedBlock) throw new ApplicationError("after:"+text); // assert
+ // if (xEmbeddedBlock) throw new ApplicationError("after:" + text); // assert
}
true
}
diff --git a/src/dotty/tools/dotc/parsing/Parsers.scala b/src/dotty/tools/dotc/parsing/Parsers.scala
index 2bb6b974b..cbefb81fe 100644
--- a/src/dotty/tools/dotc/parsing/Parsers.scala
+++ b/src/dotty/tools/dotc/parsing/Parsers.scala
@@ -184,11 +184,11 @@ object Parsers {
return
skippedParens.change(LBRACKET, -1)
case LBRACE =>
- skippedParens.change(LBRACE, +1)
+ skippedParens.change(LBRACE, + 1)
case LPAREN =>
- skippedParens.change(LPAREN, +1)
+ skippedParens.change(LPAREN, + 1)
case LBRACKET=>
- skippedParens.change(LBRACKET, +1)
+ skippedParens.change(LBRACKET, + 1)
case _ =>
if (mustStartStat &&
in.isAfterLineEnd() &&
@@ -1984,7 +1984,7 @@ object Parsers {
stats += defOrDcl(in.offset, Modifiers())
} else if (!isStatSep) {
syntaxErrorOrIncomplete(
- "illegal start of declaration"+
+ "illegal start of declaration" +
(if (inFunReturnType) " (possible cause: missing `=' in front of current method body)"
else ""))
}
diff --git a/src/dotty/tools/dotc/parsing/Scanners.scala b/src/dotty/tools/dotc/parsing/Scanners.scala
index 5eb8357a4..46274bcc9 100644
--- a/src/dotty/tools/dotc/parsing/Scanners.scala
+++ b/src/dotty/tools/dotc/parsing/Scanners.scala
@@ -290,7 +290,7 @@ object Scanners {
}
postProcessToken()
- // print("["+this+"]")
+ // print("[" + this +"]")
}
def postProcessToken() = {
@@ -375,7 +375,7 @@ object Scanners {
case ' ' | '\t' | '\n' | '{' | '(' | '>' if isNameStart(ch) || ch == '!' || ch == '?' =>
token = XMLSTART
case _ =>
- // Console.println("found '<', but last is '"+in.last+"'"); // DEBUG
+ // Console.println("found '<', but last is '" + in.last +"'"); // DEBUG
putChar('<')
getOperatorRest()
}
diff --git a/src/dotty/tools/dotc/parsing/ScriptParsers.scala b/src/dotty/tools/dotc/parsing/ScriptParsers.scala
index 8b5c51c34..afa7fefab 100644
--- a/src/dotty/tools/dotc/parsing/ScriptParsers.scala
+++ b/src/dotty/tools/dotc/parsing/ScriptParsers.scala
@@ -142,4 +142,4 @@ object ScriptParsers {
makePackaging(0, emptyPkg, List(moduleDef))
}*/
}
-} \ No newline at end of file
+}
diff --git a/src/dotty/tools/dotc/parsing/Utility.scala b/src/dotty/tools/dotc/parsing/Utility.scala
index 44ed268e1..f522492f8 100644
--- a/src/dotty/tools/dotc/parsing/Utility.scala
+++ b/src/dotty/tools/dotc/parsing/Utility.scala
@@ -69,7 +69,7 @@ object Utility {
else sb append c
}
- if(!sb.isEmpty) // flush buffer
+ if (!sb.isEmpty) // flush buffer
nb += text(sb.toString())
nb.toList
@@ -83,7 +83,8 @@ object Utility {
* See [66]
*/
def parseCharRef(ch: () => Char, nextch: () => Unit, reportSyntaxError: String => Unit, reportTruncatedError: String => Unit): String = {
- val hex = (ch() == 'x') && { nextch(); true }
+ val hex = ch() == 'x'
+ if (hex) nextch()
val base = if (hex) 16 else 10
var i = 0
while (ch() != ';') {
diff --git a/src/dotty/tools/dotc/parsing/package.scala b/src/dotty/tools/dotc/parsing/package.scala
index 0f64f9e1f..8b113ed96 100644
--- a/src/dotty/tools/dotc/parsing/package.scala
+++ b/src/dotty/tools/dotc/parsing/package.scala
@@ -30,4 +30,4 @@ package object parsing {
def minInfixPrec = 1
def maxPrec = 11
-} \ No newline at end of file
+}
diff --git a/src/dotty/tools/dotc/printing/Disambiguation.scala b/src/dotty/tools/dotc/printing/Disambiguation.scala
index baacee42f..29b290f03 100644
--- a/src/dotty/tools/dotc/printing/Disambiguation.scala
+++ b/src/dotty/tools/dotc/printing/Disambiguation.scala
@@ -83,4 +83,4 @@ object Disambiguation {
case _ => res
}
}
-} \ No newline at end of file
+}
diff --git a/src/dotty/tools/dotc/printing/PlainPrinter.scala b/src/dotty/tools/dotc/printing/PlainPrinter.scala
index ce33132ab..2762d9b51 100644
--- a/src/dotty/tools/dotc/printing/PlainPrinter.scala
+++ b/src/dotty/tools/dotc/printing/PlainPrinter.scala
@@ -33,17 +33,17 @@ class PlainPrinter(_ctx: Context) extends Printer {
ctx.warning("Exceeded recursion depth attempting to print.")
(new Throwable).printStackTrace
}
-
+
/** If true, tweak output so it is the same before and after pickling */
protected def homogenizedView: Boolean = ctx.settings.YtestPickler.value
-
- def homogenize(tp: Type): Type =
+
+ def homogenize(tp: Type): Type =
if (homogenizedView)
tp match {
case tp: TypeVar if tp.isInstantiated => homogenize(tp.instanceOpt)
case AndType(tp1, tp2) => homogenize(tp1) & homogenize(tp2)
case OrType(tp1, tp2) => homogenize(tp1) | homogenize(tp2)
- case _ =>
+ case _ =>
val tp1 = tp.simplifyApply
if (tp1 eq tp) tp else homogenize(tp1)
}
@@ -159,9 +159,9 @@ class PlainPrinter(_ctx: Context) extends Printer {
toTextLocal(tp.instanceOpt) ~ "'" // debug for now, so that we can see where the TypeVars are.
else {
val constr = ctx.typerState.constraint
- val bounds =
- if (constr.contains(tp)) constr.fullBounds(tp.origin)
- else TypeBounds.empty
+ val bounds =
+ if (constr.contains(tp)) constr.fullBounds(tp.origin)
+ else TypeBounds.empty
"(" ~ toText(tp.origin) ~ "?" ~ toText(bounds) ~ ")"
}
case _ =>
diff --git a/src/dotty/tools/dotc/printing/RefinedPrinter.scala b/src/dotty/tools/dotc/printing/RefinedPrinter.scala
index d341ce00f..423c62044 100644
--- a/src/dotty/tools/dotc/printing/RefinedPrinter.scala
+++ b/src/dotty/tools/dotc/printing/RefinedPrinter.scala
@@ -22,7 +22,7 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) {
def withEnclosingDef(enclDef: Tree[_ >: Untyped])(op: => Text): Text = {
val savedCtx = myCtx
- if (enclDef.hasType && enclDef.symbol.exists)
+ if (enclDef.hasType && enclDef.symbol.exists)
myCtx = ctx.withOwner(enclDef.symbol)
val savedDef = enclosingDef
enclosingDef = enclDef
@@ -158,7 +158,7 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) {
import untpd.{modsDeco => _, _}
/** Print modifiers form symbols if tree has type, overriding the untpd behavior. */
- implicit def modsDeco(mdef: untpd.MemberDef)(implicit ctx: Context): untpd.ModsDeco =
+ implicit def modsDeco(mdef: untpd.MemberDef)(implicit ctx: Context): untpd.ModsDeco =
tpd.modsDeco(mdef.asInstanceOf[tpd.MemberDef]).asInstanceOf[untpd.ModsDeco]
def isLocalThis(tree: Tree) = tree.typeOpt match {
@@ -202,12 +202,12 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) {
def useSymbol =
tree.hasType && tree.symbol.exists && ctx.settings.YprintSyms.value
-
+
def modText(mods: untpd.Modifiers, kw: String): Text = { // DD
val suppressKw = if (enclDefIsClass) mods is ParamAndLocal else mods is Param
- var flagMask =
+ var flagMask =
if (ctx.settings.debugFlags.value) AllFlags
- else if (suppressKw) PrintableFlags &~ Private
+ else if (suppressKw) PrintableFlags &~ Private
else PrintableFlags
if (homogenizedView && mods.flags.isTypeFlags) flagMask &~= Implicit // drop implicit from classes
val flagsText = (mods.flags & flagMask).toString
@@ -257,7 +257,7 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) {
prefix ~ (" extends" provided !ofNew) ~~ parentsText ~~ bodyText
}
- def toTextPackageId(pid: Tree): Text =
+ def toTextPackageId(pid: Tree): Text =
if (homogenizedView) toTextLocal(pid.tpe)
else toTextLocal(pid)
@@ -364,7 +364,7 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) {
("(" ~ toTextGlobal(implicits, ", ") ~ ")" provided implicits.nonEmpty)
case tree @ ValDef(name, tpt, _) =>
dclTextOr {
- modText(tree.mods, if (tree.mods is Mutable) "var" else "val") ~~
+ modText(tree.mods, if (tree.mods is Mutable) "var" else "val") ~~
nameIdText(tree) ~ optAscription(tpt) ~
withEnclosingDef(tree) { optText(tree.rhs)(" = " ~ _) }
}
@@ -372,14 +372,14 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) {
dclTextOr {
val prefix = modText(tree.mods, "def") ~~ nameIdText(tree)
withEnclosingDef(tree) {
- addVparamssText(prefix ~ tparamsText(tparams), vparamss) ~ optAscription(tpt) ~
+ addVparamssText(prefix ~ tparamsText(tparams), vparamss) ~ optAscription(tpt) ~
optText(tree.rhs)(" = " ~ _)
}
}
case tree @ TypeDef(name, rhs) =>
def typeDefText(rhsText: Text) =
dclTextOr {
- modText(tree.mods, "type") ~~ nameIdText(tree) ~
+ modText(tree.mods, "type") ~~ nameIdText(tree) ~
withEnclosingDef(tree) {
val rhsText1 = if (tree.hasType) toText(tree.symbol.info) else rhsText
tparamsText(tree.tparams) ~ rhsText1
@@ -387,7 +387,7 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) {
}
rhs match {
case impl: Template =>
- modText(tree.mods, if ((tree).mods is Trait) "trait" else "class") ~~
+ modText(tree.mods, if ((tree).mods is Trait) "trait" else "class") ~~
nameIdText(tree) ~ withEnclosingDef(tree) { toTextTemplate(impl) } ~
(if (tree.hasType && ctx.settings.verbose.value) s"[decls = ${tree.symbol.info.decls}]" else "")
case rhs: TypeBoundsTree =>
@@ -413,7 +413,7 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) {
val bodyText =
if (currentPrecedence == TopLevelPrec) "\n" ~ statsText else " {" ~ statsText ~ "}"
"package " ~ toTextPackageId(pid) ~ bodyText
- case tree: Template =>
+ case tree: Template =>
toTextTemplate(tree)
case Annotated(annot, arg) =>
toTextLocal(arg) ~~ annotText(annot)
@@ -517,7 +517,7 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) {
def optText[T >: Untyped](tree: List[Tree[T]])(encl: Text => Text): Text =
if (tree.exists(!_.isEmpty)) encl(blockText(tree)) else ""
-
+
override protected def polyParamName(name: TypeName): TypeName =
name.unexpandedName
diff --git a/src/dotty/tools/dotc/printing/Texts.scala b/src/dotty/tools/dotc/printing/Texts.scala
index a45e7af1f..db81cab7a 100644
--- a/src/dotty/tools/dotc/printing/Texts.scala
+++ b/src/dotty/tools/dotc/printing/Texts.scala
@@ -165,4 +165,4 @@ object Texts {
class Closed(relems: List[Text]) extends Fluid(relems)
implicit def stringToText(s: String): Text = Str(s)
-} \ No newline at end of file
+}
diff --git a/src/dotty/tools/dotc/reporting/Reporter.scala b/src/dotty/tools/dotc/reporting/Reporter.scala
index 223fc33d5..71a908397 100644
--- a/src/dotty/tools/dotc/reporting/Reporter.scala
+++ b/src/dotty/tools/dotc/reporting/Reporter.scala
@@ -89,7 +89,7 @@ trait Reporting { this: Context =>
def warning(msg: => String, pos: SourcePosition = NoSourcePosition): Unit =
reporter.report(new Warning(msg, pos))
-
+
def strictWarning(msg: => String, pos: SourcePosition = NoSourcePosition): Unit =
if (this.settings.strict.value) error(msg, pos)
else warning(msg + "\n(This would be an error under strict mode)", pos)
diff --git a/src/dotty/tools/dotc/transform/CapturedVars.scala b/src/dotty/tools/dotc/transform/CapturedVars.scala
index 77b912f08..86cf80073 100644
--- a/src/dotty/tools/dotc/transform/CapturedVars.scala
+++ b/src/dotty/tools/dotc/transform/CapturedVars.scala
@@ -102,4 +102,4 @@ class CapturedVars extends MiniPhase with IdentityDenotTransformer { thisTransfo
cpy.Assign(tree)(lhs1, tree.rhs)
}
}
-} \ No newline at end of file
+}
diff --git a/src/dotty/tools/dotc/transform/CollectEntryPoints.scala b/src/dotty/tools/dotc/transform/CollectEntryPoints.scala
index 1109d1f90..2ccadddc3 100644
--- a/src/dotty/tools/dotc/transform/CollectEntryPoints.scala
+++ b/src/dotty/tools/dotc/transform/CollectEntryPoints.scala
@@ -113,4 +113,4 @@ class SymbolOrdering(implicit ctx: Context) extends Ordering[Symbol] {
override def compare(x: Symbol, y: Symbol): Int = {
x.fullName.toString.compareTo(y.fullName.toString)
}
-} \ No newline at end of file
+}
diff --git a/src/dotty/tools/dotc/transform/Constructors.scala b/src/dotty/tools/dotc/transform/Constructors.scala
index 165210cfb..cd64497e9 100644
--- a/src/dotty/tools/dotc/transform/Constructors.scala
+++ b/src/dotty/tools/dotc/transform/Constructors.scala
@@ -185,7 +185,7 @@ class Constructors extends MiniPhaseTransform with SymTransformer { thisTransfor
if (acc.name != nme.OUTER) assigns
else {
// insert test: if ($outer eq null) throw new NullPointerException
- val nullTest =
+ val nullTest =
If(ref(param).select(defn.Object_eq).appliedTo(Literal(Constant(null))),
Throw(New(defn.NullPointerExceptionClass.typeRef, Nil)),
unitLiteral)
@@ -214,4 +214,4 @@ class Constructors extends MiniPhaseTransform with SymTransformer { thisTransfor
rhs = Block(superCalls ::: copyParams ::: followConstrStats, unitLiteral)),
body = clsStats.toList)
}
-} \ No newline at end of file
+}
diff --git a/src/dotty/tools/dotc/transform/ElimByName.scala b/src/dotty/tools/dotc/transform/ElimByName.scala
index 5bd9c045a..2d0ecaf99 100644
--- a/src/dotty/tools/dotc/transform/ElimByName.scala
+++ b/src/dotty/tools/dotc/transform/ElimByName.scala
@@ -73,7 +73,7 @@ class ElimByName extends MiniPhaseTransform with InfoTransformer { thisTransform
case formalExpr: ExprType =>
val argType = arg.tpe.widen
val argFun = arg match {
- case Apply(Select(qual, nme.apply), Nil)
+ case Apply(Select(qual, nme.apply), Nil)
if qual.tpe.derivesFrom(defn.FunctionClass(0)) && isPureExpr(qual) =>
qual
case _ =>
diff --git a/src/dotty/tools/dotc/transform/ElimRepeated.scala b/src/dotty/tools/dotc/transform/ElimRepeated.scala
index 28131e1e9..414d09541 100644
--- a/src/dotty/tools/dotc/transform/ElimRepeated.scala
+++ b/src/dotty/tools/dotc/transform/ElimRepeated.scala
@@ -59,9 +59,9 @@ class ElimRepeated extends MiniPhaseTransform with InfoTransformer with Annotati
transformTypeOfTree(tree)
override def transformApply(tree: Apply)(implicit ctx: Context, info: TransformerInfo): Tree = {
- val args1 = tree.args.map {
+ val args1 = tree.args.map {
case arg: Typed if isWildcardStarArg(arg) =>
- if (tree.fun.symbol.is(JavaDefined) && arg.expr.tpe.derivesFrom(defn.SeqClass))
+ if (tree.fun.symbol.is(JavaDefined) && arg.expr.tpe.derivesFrom(defn.SeqClass))
seqToArray(arg.expr)
else arg.expr
case arg => arg
@@ -71,7 +71,7 @@ class ElimRepeated extends MiniPhaseTransform with InfoTransformer with Annotati
/** Convert sequence argument to Java array */
private def seqToArray(tree: Tree)(implicit ctx: Context): Tree = tree match {
- case SeqLiteral(elems) =>
+ case SeqLiteral(elems) =>
JavaSeqLiteral(elems)
case _ =>
val elemType = tree.tpe.firstBaseArgInfo(defn.SeqClass)
@@ -84,7 +84,7 @@ class ElimRepeated extends MiniPhaseTransform with InfoTransformer with Annotati
.ensureConforms(defn.ArrayType(elemType))
// Because of phantomclasses, the Java array's type might not conform to the resturn type
}
-
+
override def transformTypeApply(tree: TypeApply)(implicit ctx: Context, info: TransformerInfo): Tree =
transformTypeOfTree(tree)
diff --git a/src/dotty/tools/dotc/transform/Erasure.scala b/src/dotty/tools/dotc/transform/Erasure.scala
index 3674f7375..640d5f13e 100644
--- a/src/dotty/tools/dotc/transform/Erasure.scala
+++ b/src/dotty/tools/dotc/transform/Erasure.scala
@@ -258,7 +258,7 @@ object Erasure extends TypeTestsCasts{
override def typedLiteral(tree: untpd.Literal)(implicit ctc: Context): Literal =
if (tree.typeOpt.isRef(defn.UnitClass)) tree.withType(tree.typeOpt)
else super.typedLiteral(tree)
-
+
/** Type check select nodes, applying the following rewritings exhaustively
* on selections `e.m`, where `OT` is the type of the owner of `m` and `ET`
* is the erased type of the selection's original qualifier expression.
@@ -395,24 +395,24 @@ object Erasure extends TypeTestsCasts{
}
// The following four methods take as the proto-type the erasure of the pre-existing type,
- // if the original proto-type is not a value type.
+ // if the original proto-type is not a value type.
// This makes all branches be adapted to the correct type.
override def typedSeqLiteral(tree: untpd.SeqLiteral, pt: Type)(implicit ctx: Context) =
super.typedSeqLiteral(tree, erasure(tree.typeOpt))
- // proto type of typed seq literal is original type;
+ // proto type of typed seq literal is original type;
override def typedIf(tree: untpd.If, pt: Type)(implicit ctx: Context) =
super.typedIf(tree, adaptProto(tree, pt))
-
+
override def typedMatch(tree: untpd.Match, pt: Type)(implicit ctx: Context) =
super.typedMatch(tree, adaptProto(tree, pt))
-
- override def typedTry(tree: untpd.Try, pt: Type)(implicit ctx: Context) =
+
+ override def typedTry(tree: untpd.Try, pt: Type)(implicit ctx: Context) =
super.typedTry(tree, adaptProto(tree, pt))
private def adaptProto(tree: untpd.Tree, pt: Type)(implicit ctx: Context) = {
if (pt.isValueType) pt else {
- if(tree.typeOpt.derivesFrom(ctx.definitions.UnitClass))
+ if (tree.typeOpt.derivesFrom(ctx.definitions.UnitClass))
tree.typeOpt
else erasure(tree.typeOpt)
}
diff --git a/src/dotty/tools/dotc/transform/ExplicitOuter.scala b/src/dotty/tools/dotc/transform/ExplicitOuter.scala
index 13462693b..8baaa3a11 100644
--- a/src/dotty/tools/dotc/transform/ExplicitOuter.scala
+++ b/src/dotty/tools/dotc/transform/ExplicitOuter.scala
@@ -297,9 +297,9 @@ object ExplicitOuter {
case ex: ClassCastException =>
throw new ClassCastException(i"no path exists from ${ctx.owner.enclosingClass} to $toCls")
}
-
- /** The outer parameter definition of a constructor if it needs one */
- def paramDefs(constr: Symbol): List[ValDef] =
+
+ /** The outer parameter definition of a constructor if it needs one */
+ def paramDefs(constr: Symbol): List[ValDef] =
if (constr.isConstructor && hasOuterParam(constr.owner.asClass)) {
val MethodType(outerName :: _, outerType :: _) = constr.info
val outerSym = ctx.newSymbol(constr, outerName, Param, outerType)
@@ -307,4 +307,4 @@ object ExplicitOuter {
}
else Nil
}
-} \ No newline at end of file
+}
diff --git a/src/dotty/tools/dotc/transform/ExtensionMethods.scala b/src/dotty/tools/dotc/transform/ExtensionMethods.scala
index a006f04a7..26f26fc2f 100644
--- a/src/dotty/tools/dotc/transform/ExtensionMethods.scala
+++ b/src/dotty/tools/dotc/transform/ExtensionMethods.scala
@@ -88,12 +88,12 @@ class ExtensionMethods extends MiniPhaseTransform with DenotTransformer with Ful
case decl: MultiDenotation =>
val alts = decl.alternatives
val index = alts indexOf imeth.denot
- assert(index >= 0, alts+" does not contain "+imeth)
- def altName(index: Int) = (imeth.name+"$extension"+index).toTermName
+ assert(index >= 0, alts + " does not contain " + imeth)
+ def altName(index: Int) = (imeth.name + "$extension" + index).toTermName
altName(index) #:: ((0 until alts.length).toStream filter (index != _) map altName)
case decl =>
- assert(decl.exists, imeth.name+" not found in "+imeth.owner+"'s decls: "+imeth.owner.info.decls)
- Stream((imeth.name+"$extension").toTermName)
+ assert(decl.exists, imeth.name + " not found in " + imeth.owner + "'s decls: " + imeth.owner.info.decls)
+ Stream((imeth.name + "$extension").toTermName)
}
}
diff --git a/src/dotty/tools/dotc/transform/FirstTransform.scala b/src/dotty/tools/dotc/transform/FirstTransform.scala
index b8d2b44f3..cfe650b99 100644
--- a/src/dotty/tools/dotc/transform/FirstTransform.scala
+++ b/src/dotty/tools/dotc/transform/FirstTransform.scala
@@ -87,7 +87,7 @@ class FirstTransform extends MiniPhaseTransform with IdentityDenotTransformer wi
case stat: TypeDef if singleClassDefs contains stat.name =>
val objName = stat.name.toTermName
val nameClash = stats.exists {
- case other: MemberDef =>
+ case other: MemberDef =>
other.name == objName && other.symbol.info.isParameterless
case _ =>
false
@@ -99,7 +99,7 @@ class FirstTransform extends MiniPhaseTransform with IdentityDenotTransformer wi
def skipJava(stats: List[Tree]): List[Tree] = // packages get a JavaDefined flag. Dont skip them
stats.filter(t => !(t.symbol is(Flags.JavaDefined, Flags.Package)))
-
+
addMissingCompanions(reorder(skipJava(stats)))
}
diff --git a/src/dotty/tools/dotc/transform/FullParameterization.scala b/src/dotty/tools/dotc/transform/FullParameterization.scala
index 2e3015275..acfeda48e 100644
--- a/src/dotty/tools/dotc/transform/FullParameterization.scala
+++ b/src/dotty/tools/dotc/transform/FullParameterization.scala
@@ -91,7 +91,7 @@ trait FullParameterization {
case info: ExprType => (0, info.resultType)
case _ => (0, info)
}
- val ctparams = if(abstractOverClass) clazz.typeParams else Nil
+ val ctparams = if (abstractOverClass) clazz.typeParams else Nil
val ctnames = ctparams.map(_.name.unexpandedName)
/** The method result type */
@@ -234,4 +234,4 @@ trait FullParameterization {
.appliedTo(This(originalDef.symbol.enclosingClass.asClass))
.appliedToArgss(originalDef.vparamss.nestedMap(vparam => ref(vparam.symbol)))
.withPos(originalDef.rhs.pos)
-} \ No newline at end of file
+}
diff --git a/src/dotty/tools/dotc/transform/LazyVals.scala b/src/dotty/tools/dotc/transform/LazyVals.scala
index 87644d2f9..a28102d7b 100644
--- a/src/dotty/tools/dotc/transform/LazyVals.scala
+++ b/src/dotty/tools/dotc/transform/LazyVals.scala
@@ -27,7 +27,7 @@ class LazyVals extends MiniPhaseTransform with SymTransformer {
def transformSym(d: SymDenotation)(implicit ctx: Context): SymDenotation = {
- if(d is(Flags.Lazy, butNot = Flags.ModuleVal | Flags.Method)) {
+ if (d is(Flags.Lazy, butNot = Flags.ModuleVal | Flags.Method)) {
// Method flag is set on lazy vals coming from Unpickler. They are already methods and shouldn't be transformed twice
d.copySymDenotation(
initFlags = d.flags | Flags.Method,
@@ -301,7 +301,7 @@ class LazyVals extends MiniPhaseTransform with SymTransformer {
info.ord += 1
ord = info.ord % flagsPerLong
val id = info.ord / flagsPerLong
- if(ord != 0) { // there are unused bits in already existing flag
+ if (ord != 0) { // there are unused bits in already existing flag
offsetSymbol = companion.moduleClass.info.decl((StdNames.nme.LAZY_FIELD_OFFSET + id.toString).toTermName)
.suchThat(sym => (sym is Flags.Synthetic) && sym.isTerm)
.symbol.asTerm
@@ -335,7 +335,7 @@ class LazyVals extends MiniPhaseTransform with SymTransformer {
val cas = Select(ref(helperModule), RLazyVals.Names.cas.toTermName)
val accessor = mkThreadSafeDef(x.symbol.asTerm, claz, ord, containerSymbol, x.rhs, tpe, offset, getFlag, state, cas, setFlag, wait)
- if(flag eq EmptyTree)
+ if (flag eq EmptyTree)
Thicket(List(containerTree, accessor))
else Thicket(List(containerTree, flag, accessor))
}
diff --git a/src/dotty/tools/dotc/transform/Literalize.scala b/src/dotty/tools/dotc/transform/Literalize.scala
index c5b6f1c21..4a223e912 100644
--- a/src/dotty/tools/dotc/transform/Literalize.scala
+++ b/src/dotty/tools/dotc/transform/Literalize.scala
@@ -88,4 +88,4 @@ class Literalize extends MiniPhaseTransform { thisTransform =>
case _ =>
}
}
-} \ No newline at end of file
+}
diff --git a/src/dotty/tools/dotc/transform/Memoize.scala b/src/dotty/tools/dotc/transform/Memoize.scala
index 721857516..75a195032 100644
--- a/src/dotty/tools/dotc/transform/Memoize.scala
+++ b/src/dotty/tools/dotc/transform/Memoize.scala
@@ -88,4 +88,4 @@ import Decorators._
else tree
}
private val NoFieldNeeded = Lazy | Deferred | JavaDefined
-} \ No newline at end of file
+}
diff --git a/src/dotty/tools/dotc/transform/NormalizeFlags.scala b/src/dotty/tools/dotc/transform/NormalizeFlags.scala
index bdec800c7..2f5907b75 100644
--- a/src/dotty/tools/dotc/transform/NormalizeFlags.scala
+++ b/src/dotty/tools/dotc/transform/NormalizeFlags.scala
@@ -9,7 +9,7 @@ import SymDenotations.SymDenotation
import TreeTransforms.MiniPhaseTransform
import Flags._, Symbols._
-/** 1. Widens all private[this] and protected[this] qualifiers to just private/protected
+/** 1. Widens all private[this] and protected[this] qualifiers to just private/protected
* 2. Sets PureInterface flag for traits that only have pure interface members and that
* do not have initialization code. A pure interface member is either an abstract
* or alias type definition or a deferred val or def.
@@ -19,12 +19,12 @@ class NormalizeFlags extends MiniPhaseTransform with SymTransformer { thisTransf
def transformSym(ref: SymDenotation)(implicit ctx: Context) = {
var newFlags = ref.flags &~ Local
- if (ref.is(NoInitsTrait) && ref.info.decls.forall(isPureInterfaceMember))
+ if (ref.is(NoInitsTrait) && ref.info.decls.forall(isPureInterfaceMember))
newFlags |= PureInterface
if (newFlags != ref.flags) ref.copySymDenotation(initFlags = newFlags)
else ref
}
-
- private def isPureInterfaceMember(sym: Symbol)(implicit ctx: Context) =
+
+ private def isPureInterfaceMember(sym: Symbol)(implicit ctx: Context) =
if (sym.isTerm) sym.is(Deferred) else !sym.isClass
}
diff --git a/src/dotty/tools/dotc/transform/PatternMatcher.scala b/src/dotty/tools/dotc/transform/PatternMatcher.scala
index 1336d39e4..0a019e1c5 100644
--- a/src/dotty/tools/dotc/transform/PatternMatcher.scala
+++ b/src/dotty/tools/dotc/transform/PatternMatcher.scala
@@ -179,7 +179,7 @@ class PatternMatcher extends MiniPhaseTransform with DenotTransformer {thisTrans
override def codegen: AbsCodegen = optimizedCodegen
// when we know we're targetting Option, do some inlining the optimizer won't do
- // for example, `o.flatMap(f)` becomes `if(o == None) None else f(o.get)`, similarly for orElse and guard
+ // for example, `o.flatMap(f)` becomes `if (o == None) None else f(o.get)`, similarly for orElse and guard
// this is a special instance of the advanced inlining optimization that takes a method call on
// an object of a type that only has two concrete subclasses, and inlines both bodies, guarded by an if to distinguish the two cases
object optimizedCodegen extends CommonCodegen {
@@ -325,7 +325,7 @@ class PatternMatcher extends MiniPhaseTransform with DenotTransformer {thisTrans
private[TreeMakers] def incorporateOuterRebinding(outerSubst: Rebindings): Unit = {
if (currSub ne null) {
- ctx.debuglog("BUG: incorporateOuterRebinding called more than once for "+ ((this, currSub, outerSubst)))
+ ctx.debuglog("BUG: incorporateOuterRebinding called more than once for " + ((this, currSub, outerSubst)))
Thread.dumpStack()
}
else currSub = outerSubst >> rebindings
@@ -364,7 +364,7 @@ class PatternMatcher extends MiniPhaseTransform with DenotTransformer {thisTrans
def chainBefore(next: Tree)(casegen: Casegen): Tree = // assert(next eq EmptyTree)
/*atPos(body.pos)*/(casegen.one(body)) // since SubstOnly treemakers are dropped, need to do it here
- override def toString = "B"+((body, matchPt))
+ override def toString = "B" + ((body, matchPt))
}
/**
@@ -407,11 +407,11 @@ class PatternMatcher extends MiniPhaseTransform with DenotTransformer {thisTrans
val nextBinder: Symbol
lazy val introducedRebindings = /*
- if(nextBinder ne prevBinder) Rebindings(prevBinder, nextBinder)
+ if (nextBinder ne prevBinder) Rebindings(prevBinder, nextBinder)
else */ NoRebindings
def chainBefore(next: Tree)(casegen: Casegen): Tree =
- if(prevBinder ne nextBinder) // happens when typeTest is known to succeed
+ if (prevBinder ne nextBinder) // happens when typeTest is known to succeed
/*atPos(pos)(*/casegen.flatMapCond(cond, res, nextBinder, next)//)
else casegen.flatMapGuard(cond, next)
}
@@ -477,7 +477,7 @@ class PatternMatcher extends MiniPhaseTransform with DenotTransformer {thisTrans
def refTpeWiden = ref.tpe.widen
def bindInfoWiden = bind.info.widen
def loc = bind.showFullName
- if(!(ref.tpe <:< bind.info.widen)) {
+ if (!(ref.tpe <:< bind.info.widen)) {
ctx.debuglog(s"here ${bind.showFullName} expected: ${bindInfoWiden.show} got: ${refTpeWiden.show}")
}
val refCasted = ref.ensureConforms(bind.info)
@@ -532,7 +532,7 @@ class PatternMatcher extends MiniPhaseTransform with DenotTransformer {thisTrans
}
- override def toString = "X"+((extractor, nextBinder.name))
+ override def toString = "X" + ((extractor, nextBinder.name))
}
/**
@@ -584,7 +584,7 @@ class PatternMatcher extends MiniPhaseTransform with DenotTransformer {thisTrans
}
}
- override def toString = "P"+((prevBinder.name, extraCond getOrElse "", introducedRebindings))
+ override def toString = "P" + ((prevBinder.name, extraCond getOrElse "", introducedRebindings))
}
object IrrefutableExtractorTreeMaker {
@@ -706,7 +706,7 @@ class PatternMatcher extends MiniPhaseTransform with DenotTransformer {thisTrans
case class TypeTestTreeMaker(afterTest: Symbol, testedBinder: Symbol, expectedTp: Type, nextBinderTp: Type)(override val pos: Position, extractorArgTypeTest: Boolean = false) extends CondTreeMaker {
import TypeTestTreeMaker._
- ctx.debuglog("TTTM"+((prevBinder, extractorArgTypeTest, testedBinder, expectedTp, nextBinderTp)))
+ ctx.debuglog("TTTM" + ((prevBinder, extractorArgTypeTest, testedBinder, expectedTp, nextBinderTp)))
val prevBinder = testedBinder
@@ -802,7 +802,7 @@ class PatternMatcher extends MiniPhaseTransform with DenotTransformer {thisTrans
// dotty deviation
renderCondition(nonNullImpliedByTestChecker(binder)).asInstanceOf[Boolean]
- override def toString = "TT"+((expectedTp, testedBinder.name, nextBinderTp))
+ override def toString = "TT" + ((expectedTp, testedBinder.name, nextBinderTp))
}
// need to substitute to deal with existential types -- TODO: deal with existentials better, don't substitute (see RichClass during quick.comp)
@@ -814,7 +814,7 @@ class PatternMatcher extends MiniPhaseTransform with DenotTransformer {thisTrans
// equals need not be well-behaved, so don't intersect with pattern's (stabilized) type (unlike MaybeBoundTyped's accumType, where it's required)
val cond = codegen._equals(patTree, prevBinder)
val res = ref(prevBinder).ensureConforms(nextBinderTp)
- override def toString = "ET"+((prevBinder.name, patTree))
+ override def toString = "ET" + ((prevBinder.name, patTree))
}
case class AlternativesTreeMaker(prevBinder: Symbol, var altss: List[List[TreeMaker]], pos: Position) extends TreeMaker with NoNewBinders {
@@ -843,7 +843,7 @@ class PatternMatcher extends MiniPhaseTransform with DenotTransformer {thisTrans
val pos = guardTree.pos
def chainBefore(next: Tree)(casegen: Casegen): Tree = casegen.flatMapGuard(guardTree, next)
- override def toString = "G("+ guardTree +")"
+ override def toString = "G(" + guardTree + ")"
}
// combineExtractors changes the current substitution's of the tree makers in `treeMakers`
@@ -873,7 +873,7 @@ class PatternMatcher extends MiniPhaseTransform with DenotTransformer {thisTrans
def matchFailGen = matchFailGenOverride orElse Some((arg: Symbol) => Throw(New(defn.MatchErrorType, List(ref(arg)))))
- ctx.debuglog("combining cases: "+ (casesRebindingPropagated.map(_.mkString(" >> ")).mkString("{", "\n", "}")))
+ ctx.debuglog("combining cases: " + (casesRebindingPropagated.map(_.mkString(" >> ")).mkString("{", "\n", "}")))
val (suppression, requireSwitch): (Suppression, Boolean) =
/*if (settings.XnoPatmatAnalysis)*/ (Suppression.NoSuppression, false)
@@ -954,7 +954,7 @@ class PatternMatcher extends MiniPhaseTransform with DenotTransformer {thisTrans
object WildcardPattern {
def unapply(pat: Tree): Boolean = pat match {
case Bind(nme.WILDCARD, WildcardPattern()) => true // don't skip when binding an interesting symbol!
- case t if(tpd.isWildcardArg(t)) => true
+ case t if (tpd.isWildcardArg(t)) => true
case x: Ident => isVarPattern(x)
case Alternative(ps) => ps forall unapply
case EmptyTree => true
@@ -1173,7 +1173,7 @@ class PatternMatcher extends MiniPhaseTransform with DenotTransformer {thisTrans
/*if (phase.id >= currentRun.uncurryPhase.id)
devWarning(s"running translateMatch past uncurry (at $phase) on $selector match $cases")*/
- ctx.debuglog("translating "+ cases.mkString("{", "\n", "}"))
+ ctx.debuglog("translating " + cases.mkString("{", "\n", "}"))
//val start = if (Statistics.canEnable) Statistics.startTimer(patmatNanos) else null
@@ -1240,7 +1240,7 @@ class PatternMatcher extends MiniPhaseTransform with DenotTransformer {thisTrans
* 2) is easy -- it looks like: `translatePattern_1.flatMap(translatePattern_2....flatMap(translatePattern_N.flatMap(translateGuard.flatMap((x_i) => success(Xbody(x_i)))))...)`
* this must be right-leaning tree, as can be seen intuitively by considering the scope of bound variables:
* variables bound by pat_1 must be visible from the function inside the left-most flatMap right up to Xbody all the way on the right
- * 1) is tricky because translatePattern_i determines the shape of translatePattern_i+1:
+ * 1) is tricky because translatePattern_i determines the shape of translatePattern_i + 1:
* zoom in on `translatePattern_1.flatMap(translatePattern_2)` for example -- it actually looks more like:
* `translatePattern_1(x_scrut).flatMap((x_1) => {y_i -> x_1._i}translatePattern_2)`
*
@@ -1424,7 +1424,7 @@ class PatternMatcher extends MiniPhaseTransform with DenotTransformer {thisTrans
else genDrop(binder, expectedLength)
)
// this error-condition has already been checked by checkStarPatOK:
- // if(isSeq) assert(firstIndexingBinder + nbIndexingIndices + (if(lastIsStar) 1 else 0) == totalArity, "(resultInMonad, ts, subPatTypes, subPats)= "+(resultInMonad, ts, subPatTypes, subPats))
+ // if (isSeq) assert(firstIndexingBinder + nbIndexingIndices + (if (lastIsStar) 1 else 0) == totalArity, "(resultInMonad, ts, subPatTypes, subPats)= " +(resultInMonad, ts, subPatTypes, subPats))
// [1] there are `firstIndexingBinder` non-seq tuple elements preceding the Seq
// [2] then we have to index the binder that represents the sequence for the remaining subpatterns, except for...
@@ -1440,7 +1440,7 @@ class PatternMatcher extends MiniPhaseTransform with DenotTransformer {thisTrans
// require (nbSubPats > 0 && (!lastIsStar || isSeq))
protected def subPatRefs(binder: Symbol): List[Tree] = {
val refs = if (totalArity > 0 && isSeq) subPatRefsSeq(binder)
- else if(defn.isProductSubType(binder.info)) productElemsToN(binder, totalArity)
+ else if (defn.isProductSubType(binder.info)) productElemsToN(binder, totalArity)
else ref(binder):: Nil
val refsSymbols = refs.map(_.symbol) // just for debugging
refs
@@ -1551,7 +1551,7 @@ class PatternMatcher extends MiniPhaseTransform with DenotTransformer {thisTrans
// the trees that select the subpatterns on the extractor's result, referenced by `binder`
// require (totalArity > 0 && (!lastIsStar || isSeq))
protected def subPatRefs(binder: Symbol, subpatBinders: List[Symbol], binderTypeTested: Type): List[Tree] = {
- if(aligner.isSingle && aligner.extractor.prodArity == 1 && defn.isTupleType(binder.info)) {
+ if (aligner.isSingle && aligner.extractor.prodArity == 1 && defn.isTupleType(binder.info)) {
// special case for extractor
// comparing with scalac additional assertions added
val subpw = subpatBinders.head.info.widen
@@ -1815,8 +1815,8 @@ class PatternMatcher extends MiniPhaseTransform with DenotTransformer {thisTrans
//println(s"${_id}unapplyArgs(${result.widen}")
val expanded:List[Type] = /*(
if (result =:= defn.BooleanType) Nil
- else if(defn.isProductSubType(result)) productSelectorTypes(result)
- else if(result.classSymbol is Flags.CaseClass) result.decls.filter(x => x.is(Flags.CaseAccessor) && x.is(Flags.Method)).map(_.info).toList
+ else if (defn.isProductSubType(result)) productSelectorTypes(result)
+ else if (result.classSymbol is Flags.CaseClass) result.decls.filter(x => x.is(Flags.CaseAccessor) && x.is(Flags.Method)).map(_.info).toList
else result.select(nme.get) :: Nil
)*/
if ((extractorMemberType(resultType, nme.isDefined) isRef defn.BooleanClass) && resultOfGet.exists)
@@ -1915,4 +1915,4 @@ class PatternMatcher extends MiniPhaseTransform with DenotTransformer {thisTrans
}
}
}
-} \ No newline at end of file
+}
diff --git a/src/dotty/tools/dotc/transform/Pickler.scala b/src/dotty/tools/dotc/transform/Pickler.scala
index 626a75139..445fed2c4 100644
--- a/src/dotty/tools/dotc/transform/Pickler.scala
+++ b/src/dotty/tools/dotc/transform/Pickler.scala
@@ -16,15 +16,15 @@ class Pickler extends Phase {
import ast.tpd._
override def phaseName: String = "pickler"
-
+
private def output(name: String, msg: String) = {
val s = new PrintStream(name)
s.print(msg)
s.close
}
-
+
private val beforePickling = new mutable.HashMap[CompilationUnit, String]
-
+
override def run(implicit ctx: Context): Unit = {
val unit = ctx.compilationUnit
if (!unit.isJava) {
@@ -46,25 +46,25 @@ class Pickler extends Phase {
}
// println(i"rawBytes = \n$rawBytes%\n%") // DEBUG
if (pickling ne noPrinter) new TastyPrinter(pickler.assembleParts()).printContents()
- }
+ }
}
-
+
override def runOn(units: List[CompilationUnit])(implicit ctx: Context): List[CompilationUnit] = {
val result = super.runOn(units)
- if (ctx.settings.YtestPickler.value)
+ if (ctx.settings.YtestPickler.value)
testUnpickler(units)(ctx.fresh.setPeriod(Period(ctx.runId + 1, FirstPhaseId)))
result
}
-
+
private def testUnpickler(units: List[CompilationUnit])(implicit ctx: Context): Unit = {
pickling.println(i"testing unpickler at run ${ctx.runId}")
ctx.definitions.init
- val unpicklers =
+ val unpicklers =
for (unit <- units) yield {
val unpickler = new DottyUnpickler(unit.pickler.assembleParts())
unpickler.enter(roots = Set())
unpickler
- }
+ }
pickling.println("************* entered toplevel ***********")
for ((unpickler, unit) <- unpicklers zip units) {
val unpickled = unpickler.body(readPositions = false)
@@ -72,7 +72,7 @@ class Pickler extends Phase {
}
}
- private def testSame(unpickled: String, previous: String, unit: CompilationUnit)(implicit ctx: Context) =
+ private def testSame(unpickled: String, previous: String, unit: CompilationUnit)(implicit ctx: Context) =
if (previous != unpickled) {
output("before-pickling.txt", previous)
output("after-pickling.txt", unpickled)
@@ -80,4 +80,4 @@ class Pickler extends Phase {
|
| diff before-pickling.txt after-pickling.txt""".stripMargin)
}
-} \ No newline at end of file
+}
diff --git a/src/dotty/tools/dotc/transform/ResolveSuper.scala b/src/dotty/tools/dotc/transform/ResolveSuper.scala
index 921a5f1fa..9c0e34256 100644
--- a/src/dotty/tools/dotc/transform/ResolveSuper.scala
+++ b/src/dotty/tools/dotc/transform/ResolveSuper.scala
@@ -48,7 +48,7 @@ class ResolveSuper extends MiniPhaseTransform with IdentityDenotTransformer { th
override def phaseName: String = "resolveSuper"
- override def runsAfter = Set(classOf[ElimByName]) // verified empirically, need to figure out what the reason is.
+ override def runsAfter = Set(classOf[ElimByName]) // verified empirically, need to figure out what the reason is.
/** Returns the symbol that is accessed by a super-accessor in a mixin composition.
*
diff --git a/src/dotty/tools/dotc/transform/RestoreScopes.scala b/src/dotty/tools/dotc/transform/RestoreScopes.scala
index 0eb1935a6..3a168b1fe 100644
--- a/src/dotty/tools/dotc/transform/RestoreScopes.scala
+++ b/src/dotty/tools/dotc/transform/RestoreScopes.scala
@@ -24,7 +24,7 @@ class RestoreScopes extends MiniPhaseTransform with IdentityDenotTransformer { t
override def transformTypeDef(tree: TypeDef)(implicit ctx: Context, info: TransformerInfo) = {
val TypeDef(_, impl: Template) = tree
- //
+ //
val restoredDecls = newScope
for (stat <- impl.constr :: impl.body)
if (stat.isInstanceOf[MemberDef] && stat.symbol.exists)
@@ -33,7 +33,7 @@ class RestoreScopes extends MiniPhaseTransform with IdentityDenotTransformer { t
// For top-level classes this does nothing.
val cls = tree.symbol.asClass
val pkg = cls.owner.asClass
- pkg.enter(cls)
+ pkg.enter(cls)
val cinfo = cls.classInfo
tree.symbol.copySymDenotation(
info = cinfo.derivedClassInfo( // Dotty deviation: Cannot expand cinfo inline without a type error
diff --git a/src/dotty/tools/dotc/transform/Splitter.scala b/src/dotty/tools/dotc/transform/Splitter.scala
index 0a1e1b238..62a080f37 100644
--- a/src/dotty/tools/dotc/transform/Splitter.scala
+++ b/src/dotty/tools/dotc/transform/Splitter.scala
@@ -124,4 +124,4 @@ class Splitter extends MiniPhaseTransform { thisTransform =>
private val typeApply = (fn: Tree, args: List[Tree]) => (ctx: Context) => TypeApply(fn, args)(ctx)
private val apply = (fn: Tree, args: List[Tree]) => (ctx: Context) => Apply(fn, args)(ctx)
-} \ No newline at end of file
+}
diff --git a/src/dotty/tools/dotc/transform/SuperAccessors.scala b/src/dotty/tools/dotc/transform/SuperAccessors.scala
index b55628868..8857b6921 100644
--- a/src/dotty/tools/dotc/transform/SuperAccessors.scala
+++ b/src/dotty/tools/dotc/transform/SuperAccessors.scala
@@ -72,7 +72,7 @@ class SuperAccessors extends MacroTransform with IdentityDenotTransformer { this
private val accDefs = mutable.Map[Symbol, ListBuffer[Tree]]()
private def storeAccessorDefinition(clazz: Symbol, tree: Tree) = {
- val buf = accDefs.getOrElse(clazz, sys.error("no acc def buf for "+clazz))
+ val buf = accDefs.getOrElse(clazz, sys.error("no acc def buf for " + clazz))
buf += tree
}
@@ -554,7 +554,7 @@ class SuperAccessors extends MacroTransform with IdentityDenotTransformer { this
assert(referencingClass.isClass, referencingClass)
referencingClass
}
- else if(referencingClass.owner.enclosingClass.exists)
+ else if (referencingClass.owner.enclosingClass.exists)
hostForAccessorOf(sym, referencingClass.owner.enclosingClass.asClass)
else
referencingClass
diff --git a/src/dotty/tools/dotc/transform/TailRec.scala b/src/dotty/tools/dotc/transform/TailRec.scala
index 2fd0c439c..09b4c735b 100644
--- a/src/dotty/tools/dotc/transform/TailRec.scala
+++ b/src/dotty/tools/dotc/transform/TailRec.scala
@@ -210,7 +210,7 @@ class TailRec extends MiniPhaseTransform with DenotTransformer with FullParamete
val reciever = noTailTransform(recv)
val callTargs: List[tpd.Tree] =
- if(abstractOverClass) {
+ if (abstractOverClass) {
val classTypeArgs = recv.tpe.baseTypeWithArgs(enclosingClass).argInfos
targs ::: classTypeArgs.map(x => ref(x.typeSymbol))
} else targs
diff --git a/src/dotty/tools/dotc/transform/TraitConstructors.scala b/src/dotty/tools/dotc/transform/TraitConstructors.scala
index 8c92f1f7b..a98f52ca4 100644
--- a/src/dotty/tools/dotc/transform/TraitConstructors.scala
+++ b/src/dotty/tools/dotc/transform/TraitConstructors.scala
@@ -22,7 +22,7 @@ class TraitConstructors extends MiniPhaseTransform with SymTransformer {
override def treeTransformPhase: Phase = this.phase
def transformSym(sym: SymDenotation)(implicit ctx: Context): SymDenotation = {
- if(sym.isPrimaryConstructor && (sym.owner is Flags.Trait))
+ if (sym.isPrimaryConstructor && (sym.owner is Flags.Trait))
sym.copySymDenotation(name = nme.INITIALIZER_PREFIX ++ sym.owner.fullName)
else sym
}
diff --git a/src/dotty/tools/dotc/transform/TreeChecker.scala b/src/dotty/tools/dotc/transform/TreeChecker.scala
index a282130f1..43e1ce8a6 100644
--- a/src/dotty/tools/dotc/transform/TreeChecker.scala
+++ b/src/dotty/tools/dotc/transform/TreeChecker.scala
@@ -116,7 +116,7 @@ class TreeChecker extends Phase with SymTransformer {
val sym = tree.symbol
everDefinedSyms.get(sym) match {
case Some(t) =>
- if(t ne tree)
+ if (t ne tree)
ctx.warning(i"symbol ${sym.fullName} is defined at least twice in different parts of AST")
// should become an error
case None =>
@@ -124,7 +124,7 @@ class TreeChecker extends Phase with SymTransformer {
}
assert(!nowDefinedSyms.contains(sym), i"doubly defined symbol: ${sym.fullName} in $tree")
- if(ctx.settings.YcheckMods.value) {
+ if (ctx.settings.YcheckMods.value) {
tree match {
case t: MemberDef =>
if (t.name ne sym.name) ctx.warning(s"symbol ${sym.fullName} name doesn't correspond to AST: ${t}")
@@ -183,13 +183,13 @@ class TreeChecker extends Phase with SymTransformer {
phasesToCheck.foreach(_.checkPostCondition(res))
res
}
-
+
/** Check that PolyParams and MethodParams refer to an enclosing type */
def checkNoOrphans(tp: Type)(implicit ctx: Context) = new TypeMap() {
val definedBinders = mutable.Set[Type]()
def apply(tp: Type): Type = {
tp match {
- case tp: BindingType =>
+ case tp: BindingType =>
definedBinders += tp
mapOver(tp)
definedBinders -= tp
@@ -293,4 +293,4 @@ class TreeChecker extends Phase with SymTransformer {
}
}
-object TreeChecker extends TreeChecker \ No newline at end of file
+object TreeChecker extends TreeChecker
diff --git a/src/dotty/tools/dotc/transform/TreeGen.scala b/src/dotty/tools/dotc/transform/TreeGen.scala
index 7997aa308..7e507d905 100644
--- a/src/dotty/tools/dotc/transform/TreeGen.scala
+++ b/src/dotty/tools/dotc/transform/TreeGen.scala
@@ -23,4 +23,4 @@ object TreeGen {
.select(wrapArrayMethodName(elemtp))
.appliedToTypes(if (elemtp.isPrimitiveValueType) Nil else elemtp :: Nil)
.appliedTo(tree)
-} \ No newline at end of file
+}
diff --git a/src/dotty/tools/dotc/transform/TreeTransform.scala b/src/dotty/tools/dotc/transform/TreeTransform.scala
index ce3e3ce63..3bd005991 100644
--- a/src/dotty/tools/dotc/transform/TreeTransform.scala
+++ b/src/dotty/tools/dotc/transform/TreeTransform.scala
@@ -186,7 +186,7 @@ object TreeTransforms {
case ref: SymDenotation =>
val annotTrees = ref.annotations.map(_.tree)
val annotTrees1 = annotTrees.mapConserve(annotationTransformer.macroTransform)
- val annots1 = if(annotTrees eq annotTrees1) ref.annotations else annotTrees1.map(new ConcreteAnnotation(_))
+ val annots1 = if (annotTrees eq annotTrees1) ref.annotations else annotTrees1.map(new ConcreteAnnotation(_))
if ((info1 eq ref.info) && (annots1 eq ref.annotations)) ref
else ref.copySymDenotation(info = info1, annotations = annots1)
case _ => if (info1 eq ref.info) ref else ref.derivedSingleDenotation(ref.symbol, info1)
diff --git a/src/dotty/tools/dotc/transform/TypeTestsCasts.scala b/src/dotty/tools/dotc/transform/TypeTestsCasts.scala
index c25e81af9..9d827d3e0 100644
--- a/src/dotty/tools/dotc/transform/TypeTestsCasts.scala
+++ b/src/dotty/tools/dotc/transform/TypeTestsCasts.scala
@@ -104,4 +104,4 @@ trait TypeTestsCasts {
tree
}
}
-} \ No newline at end of file
+}
diff --git a/src/dotty/tools/dotc/typer/Applications.scala b/src/dotty/tools/dotc/typer/Applications.scala
index 582642325..c5bd70c1e 100644
--- a/src/dotty/tools/dotc/typer/Applications.scala
+++ b/src/dotty/tools/dotc/typer/Applications.scala
@@ -1023,4 +1023,4 @@ trait Applications extends Compatibility { self: Typer =>
def typedApply(fun: Tree, methRef: TermRef, args: List[Tree], resultType: Type)(implicit ctx: Context): Tree =
typedApply(untpd.Apply(untpd.TypedSplice(fun), args), fun, methRef, args, resultType)
-*/ \ No newline at end of file
+*/
diff --git a/src/dotty/tools/dotc/typer/Checking.scala b/src/dotty/tools/dotc/typer/Checking.scala
index b8b4c9d2c..9303572d2 100644
--- a/src/dotty/tools/dotc/typer/Checking.scala
+++ b/src/dotty/tools/dotc/typer/Checking.scala
@@ -333,4 +333,4 @@ trait NoChecking extends Checking {
override def checkImplicitParamsNotSingletons(vparamss: List[List[ValDef]])(implicit ctx: Context): Unit = ()
override def checkFeasible(tp: Type, pos: Position, where: => String = "")(implicit ctx: Context): Type = tp
override def checkNoDoubleDefs(cls: Symbol)(implicit ctx: Context): Unit = ()
-} \ No newline at end of file
+}
diff --git a/src/dotty/tools/dotc/typer/ConstFold.scala b/src/dotty/tools/dotc/typer/ConstFold.scala
index 7930b5d4a..ac1c7260b 100644
--- a/src/dotty/tools/dotc/typer/ConstFold.scala
+++ b/src/dotty/tools/dotc/typer/ConstFold.scala
@@ -65,10 +65,10 @@ object ConstFold {
case (nme.UNARY_~ , IntTag ) => Constant(~x.intValue)
case (nme.UNARY_~ , LongTag ) => Constant(~x.longValue)
- case (nme.UNARY_+ , IntTag ) => Constant(+x.intValue)
- case (nme.UNARY_+ , LongTag ) => Constant(+x.longValue)
- case (nme.UNARY_+ , FloatTag ) => Constant(+x.floatValue)
- case (nme.UNARY_+ , DoubleTag ) => Constant(+x.doubleValue)
+ case (nme.UNARY_+ , IntTag ) => Constant(x.intValue)
+ case (nme.UNARY_+ , LongTag ) => Constant(x.longValue)
+ case (nme.UNARY_+ , FloatTag ) => Constant(x.floatValue)
+ case (nme.UNARY_+ , DoubleTag ) => Constant(x.doubleValue)
case (nme.UNARY_- , IntTag ) => Constant(-x.intValue)
case (nme.UNARY_- , LongTag ) => Constant(-x.longValue)
diff --git a/src/dotty/tools/dotc/typer/ErrorReporting.scala b/src/dotty/tools/dotc/typer/ErrorReporting.scala
index 2ed720f83..b3089c99c 100644
--- a/src/dotty/tools/dotc/typer/ErrorReporting.scala
+++ b/src/dotty/tools/dotc/typer/ErrorReporting.scala
@@ -148,4 +148,4 @@ object ErrorReporting {
final val nonSensicalEndTag = "</nonsensical>"
}
-} \ No newline at end of file
+}
diff --git a/src/dotty/tools/dotc/typer/EtaExpansion.scala b/src/dotty/tools/dotc/typer/EtaExpansion.scala
index b59748247..1c0e6a11f 100644
--- a/src/dotty/tools/dotc/typer/EtaExpansion.scala
+++ b/src/dotty/tools/dotc/typer/EtaExpansion.scala
@@ -37,7 +37,7 @@ object EtaExpansion {
case Apply(MaybePoly(fn @ Select(pre, name), targs), args) =>
cpy.Apply(tree)(
cpy.Select(fn)(
- lift(defs, pre), name).appliedToTypeTrees(targs),
+ lift(defs, pre), name).appliedToTypeTrees(targs),
liftArgs(defs, fn.tpe, args))
case Select(pre, name) =>
cpy.Select(tree)(lift(defs, pre), name)
diff --git a/src/dotty/tools/dotc/typer/FrontEnd.scala b/src/dotty/tools/dotc/typer/FrontEnd.scala
index f6f68d736..bb313501d 100644
--- a/src/dotty/tools/dotc/typer/FrontEnd.scala
+++ b/src/dotty/tools/dotc/typer/FrontEnd.scala
@@ -25,21 +25,21 @@ class FrontEnd extends Phase {
def parse(implicit ctx: Context) = monitor("parsing") {
val unit = ctx.compilationUnit
unit.untpdTree =
- if(unit.isJava) new JavaParser(unit.source).parse()
+ if (unit.isJava) new JavaParser(unit.source).parse()
else new Parser(unit.source).parse()
- typr.println("parsed:\n"+unit.untpdTree.show)
+ typr.println("parsed:\n" + unit.untpdTree.show)
}
def enterSyms(implicit ctx: Context) = monitor("indexing") {
val unit = ctx.compilationUnit
ctx.typer.index(unit.untpdTree)
- typr.println("entered: "+unit.source)
+ typr.println("entered: " + unit.source)
}
def typeCheck(implicit ctx: Context) = monitor("typechecking") {
val unit = ctx.compilationUnit
unit.tpdTree = ctx.typer.typedExpr(unit.untpdTree)
- typr.println("typed: "+unit.source)
+ typr.println("typed: " + unit.source)
record("retainedUntypedTrees", unit.untpdTree.treeSize)
record("retainedTypedTrees", unit.tpdTree.treeSize)
}
@@ -59,4 +59,4 @@ class FrontEnd extends Phase {
enterSyms
typeCheck
}
-} \ No newline at end of file
+}
diff --git a/src/dotty/tools/dotc/typer/Implicits.scala b/src/dotty/tools/dotc/typer/Implicits.scala
index 1f5fd3899..cd3b633ac 100644
--- a/src/dotty/tools/dotc/typer/Implicits.scala
+++ b/src/dotty/tools/dotc/typer/Implicits.scala
@@ -347,7 +347,7 @@ trait ImplicitRunInfo { self: RunInfo =>
if (liftedTp ne tp) iscope(liftedTp, isLifted = true)
else ofTypeImplicits(collectCompanions(tp))
if (ctx.typerState.ephemeral) record("ephemeral cache miss: implicitScope")
- else if(cacheResult) implicitScopeCache(tp) = result
+ else if (cacheResult) implicitScopeCache(tp) = result
result
}
finally ctx.typerState.ephemeral |= savedEphemeral
@@ -455,7 +455,7 @@ trait Implicits { self: Typer =>
private def nestedContext = ctx.fresh.setMode(ctx.mode &~ Mode.ImplicitsEnabled)
private def implicitProto(resultType: Type, f: Type => Type) =
- if (argument.isEmpty) f(resultType) else ViewProto(f(argument.tpe.widen), f(resultType))
+ if (argument.isEmpty) f(resultType) else ViewProto(f(argument.tpe.widen), f(resultType))
// Not clear whether we need to drop the `.widen` here. All tests pass with it in place, though.
assert(argument.isEmpty || argument.tpe.isValueType || argument.tpe.isInstanceOf[ExprType],
diff --git a/src/dotty/tools/dotc/typer/ImportInfo.scala b/src/dotty/tools/dotc/typer/ImportInfo.scala
index 9152a8d54..30ab19fd8 100644
--- a/src/dotty/tools/dotc/typer/ImportInfo.scala
+++ b/src/dotty/tools/dotc/typer/ImportInfo.scala
@@ -111,4 +111,4 @@ class ImportInfo(val sym: Symbol, val selectors: List[untpd.Tree], val isRootImp
}
i"import $exprStr.$selectorStr"
}
-} \ No newline at end of file
+}
diff --git a/src/dotty/tools/dotc/typer/Inferencing.scala b/src/dotty/tools/dotc/typer/Inferencing.scala
index 424575f8c..0223260a0 100644
--- a/src/dotty/tools/dotc/typer/Inferencing.scala
+++ b/src/dotty/tools/dotc/typer/Inferencing.scala
@@ -163,7 +163,7 @@ trait Inferencing { this: Checking =>
* If such a variable appears covariantly in type `tp` or does not appear at all,
* approximate it by its lower bound. Otherwise, if it appears contravariantly
* in type `tp` approximate it by its upper bound.
- * @param ownedBy if it is different from NoSymbol, all type variables owned by
+ * @param ownedBy if it is different from NoSymbol, all type variables owned by
* `ownedBy` qualify, independent of position.
* Without that second condition, it can be that certain variables escape
* interpolation, for instance when their tree was eta-lifted, so
@@ -173,7 +173,7 @@ trait Inferencing { this: Checking =>
*/
def interpolateUndetVars(tree: Tree, ownedBy: Symbol)(implicit ctx: Context): Unit = {
val constraint = ctx.typerState.constraint
- val qualifies = (tvar: TypeVar) =>
+ val qualifies = (tvar: TypeVar) =>
(tree contains tvar.owningTree) || ownedBy.exists && tvar.owner == ownedBy
def interpolate() = Stats.track("interpolateUndetVars") {
val tp = tree.tpe.widen
diff --git a/src/dotty/tools/dotc/typer/Mode.scala b/src/dotty/tools/dotc/typer/Mode.scala
index d7f3420cc..997741819 100644
--- a/src/dotty/tools/dotc/typer/Mode.scala
+++ b/src/dotty/tools/dotc/typer/Mode.scala
@@ -31,8 +31,8 @@ object Mode {
val ImplicitsEnabled = newMode(2, "ImplicitsEnabled")
val InferringReturnType = newMode(3, "InferringReturnType")
- /** This mode bit is set if we collect information without reference to a valid
- * context with typerstate and constraint. This is typically done when we
+ /** This mode bit is set if we collect information without reference to a valid
+ * context with typerstate and constraint. This is typically done when we
* cache the eligibility of implicits. Caching needs to be done across different constraints.
* Therefore, if TypevarsMissContext is set, subtyping becomes looser, and assumes
* that PolyParams can be sub- and supertypes of anything. See TypeComparer.
@@ -56,7 +56,7 @@ object Mode {
/** Allow GADTFlexType labelled types to have their bounds adjusted */
val GADTflexible = newMode(8, "GADTflexible")
-
+
/** Allow dependent functions. This is currently necessary for unpickling, because
* some dependent functions are passed through from the front end(s?), even though they
* are technically speaking illegal.
@@ -64,4 +64,4 @@ object Mode {
val AllowDependentFunctions = newMode(9, "AllowDependentFunctions")
val PatternOrType = Pattern | Type
-} \ No newline at end of file
+}
diff --git a/src/dotty/tools/dotc/typer/Namer.scala b/src/dotty/tools/dotc/typer/Namer.scala
index e9e4d4c87..10667f884 100644
--- a/src/dotty/tools/dotc/typer/Namer.scala
+++ b/src/dotty/tools/dotc/typer/Namer.scala
@@ -18,7 +18,7 @@ import config.Printers._
import language.implicitConversions
trait NamerContextOps { this: Context =>
-
+
/** Enter symbol into current class, if current class is owner of current context,
* or into current scope, if not. Should always be called instead of scope.enter
* in order to make sure that updates to class members are reflected in
@@ -83,7 +83,7 @@ trait NamerContextOps { this: Context =>
/** The given type, unless `sym` is a constructor, in which case the
* type of the constructed instance is returned
*/
- def effectiveResultType(sym: Symbol, typeParams: List[Symbol], given: Type) =
+ def effectiveResultType(sym: Symbol, typeParams: List[Symbol], given: Type) =
if (sym.name == nme.CONSTRUCTOR) sym.owner.typeRef.appliedTo(typeParams map (_.typeRef))
else given
@@ -112,19 +112,19 @@ trait NamerContextOps { this: Context =>
else if (valueParamss.isEmpty) ExprType(monotpe)
else monotpe
}
-
+
/** Find moduleClass/sourceModule in effective scope */
private def findModuleBuddy(name: Name)(implicit ctx: Context) = {
val scope = effectiveScope
val it = scope.lookupAll(name).filter(_ is Module)
assert(it.hasNext, s"no companion $name in $scope")
it.next
- }
+ }
/** Add moduleClass or sourceModule functionality to completer
* for a module or module class
*/
- def adjustModuleCompleter(completer: LazyType, name: Name) =
+ def adjustModuleCompleter(completer: LazyType, name: Name) =
if (name.isTermName)
completer withModuleClass (_ => findModuleBuddy(name.moduleClassName))
else
@@ -233,9 +233,9 @@ class Namer { typer: Typer =>
/** Add moduleClass/sourceModule to completer if it is for a module val or class */
def adjustIfModule(completer: LazyType, tree: MemberDef) =
- if (tree.mods is Module) ctx.adjustModuleCompleter(completer, tree.name.encode)
+ if (tree.mods is Module) ctx.adjustModuleCompleter(completer, tree.name.encode)
else completer
-
+
typr.println(i"creating symbol for $tree in ${ctx.mode}")
def checkNoConflict(name: Name): Unit = {
@@ -689,11 +689,11 @@ class Namer { typer: Typer =>
lhsType orElse WildcardType
}
}
-
+
val tptProto = mdef.tpt match {
- case _: untpd.DerivedTypeTree =>
+ case _: untpd.DerivedTypeTree =>
WildcardType
- case TypeTree(untpd.EmptyTree) =>
+ case TypeTree(untpd.EmptyTree) =>
inferredType
case TypedSplice(tpt: TypeTree) if !isFullyDefined(tpt.tpe, ForceDegree.none) =>
val rhsType = typedAheadExpr(mdef.rhs, tpt.tpe).tpe
@@ -708,7 +708,7 @@ class Namer { typer: Typer =>
case _ =>
}
WildcardType
- case _ =>
+ case _ =>
WildcardType
}
paramFn(typedAheadType(mdef.tpt, tptProto).tpe)
@@ -742,7 +742,7 @@ class Namer { typer: Typer =>
val isDerived = tdef.rhs.isInstanceOf[untpd.DerivedTypeTree]
val toParameterize = tparamSyms.nonEmpty && !isDerived
val needsLambda = sym.allOverriddenSymbols.exists(_ is HigherKinded) && !isDerived
- def abstracted(tp: Type): Type =
+ def abstracted(tp: Type): Type =
if (needsLambda) tp.LambdaAbstract(tparamSyms)
else if (toParameterize) tp.parameterizeWith(tparamSyms)
else tp
@@ -765,4 +765,4 @@ class Namer { typer: Typer =>
sym.info = NoCompleter
checkNonCyclic(sym, unsafeInfo, reportErrors = true)
}
-} \ No newline at end of file
+}
diff --git a/src/dotty/tools/dotc/typer/ProtoTypes.scala b/src/dotty/tools/dotc/typer/ProtoTypes.scala
index f646f7ecd..eb5758662 100644
--- a/src/dotty/tools/dotc/typer/ProtoTypes.scala
+++ b/src/dotty/tools/dotc/typer/ProtoTypes.scala
@@ -165,7 +165,7 @@ object ProtoTypes {
case class FunProto(args: List[untpd.Tree], resType: Type, typer: Typer)(implicit ctx: Context)
extends UncachedGroundType with ApplyingProto {
private var myTypedArgs: List[Tree] = Nil
-
+
override def resultType(implicit ctx: Context) = resType
/** A map in which typed arguments can be stored to be later integrated in `typedArgs`. */
@@ -247,9 +247,9 @@ object ProtoTypes {
extends CachedGroundType with ApplyingProto {
override def resultType(implicit ctx: Context) = resType
-
+
def isMatchedBy(tp: Type)(implicit ctx: Context): Boolean =
- ctx.typer.isApplicable(tp, argType :: Nil, resultType)
+ ctx.typer.isApplicable(tp, argType :: Nil, resultType)
def derivedViewProto(argType: Type, resultType: Type)(implicit ctx: Context) =
if ((argType eq this.argType) && (resultType eq this.resultType)) this
@@ -383,7 +383,7 @@ object ProtoTypes {
tp.derivedRefinedType(wildApprox(tp.parent, theMap), tp.refinedName, wildApprox(tp.refinedInfo, theMap))
case tp: TypeAlias => // default case, inlined for speed
tp.derivedTypeAlias(wildApprox(tp.alias, theMap))
- case tp @ PolyParam(poly, pnum) =>
+ case tp @ PolyParam(poly, pnum) =>
ctx.typerState.constraint.entry(tp) match {
case bounds: TypeBounds => wildApprox(WildcardType(bounds))
case NoType => WildcardType(wildApprox(poly.paramBounds(pnum)).bounds)
@@ -433,4 +433,4 @@ object ProtoTypes {
case _ => None
}
}
-} \ No newline at end of file
+}
diff --git a/src/dotty/tools/dotc/typer/ReTyper.scala b/src/dotty/tools/dotc/typer/ReTyper.scala
index 901542f21..d3a5fd05f 100644
--- a/src/dotty/tools/dotc/typer/ReTyper.scala
+++ b/src/dotty/tools/dotc/typer/ReTyper.scala
@@ -98,4 +98,4 @@ class ReTyper extends Typer {
}
override def checkVariance(tree: Tree)(implicit ctx: Context) = ()
-} \ No newline at end of file
+}
diff --git a/src/dotty/tools/dotc/typer/RefChecks.scala b/src/dotty/tools/dotc/typer/RefChecks.scala
index 8a778a38d..9b14fffc0 100644
--- a/src/dotty/tools/dotc/typer/RefChecks.scala
+++ b/src/dotty/tools/dotc/typer/RefChecks.scala
@@ -875,7 +875,7 @@ class RefChecks extends MiniPhase with SymTransformer { thisTransformer =>
def onSyms[T](f: List[Symbol] => T) = f(List(receiver, actual))
// @MAT normalize for consistency in error message, otherwise only part is normalized due to use of `typeSymbol`
- def typesString = normalizeAll(qual.tpe.widen)+" and "+normalizeAll(other.tpe.widen)
+ def typesString = normalizeAll(qual.tpe.widen)+" and " + normalizeAll(other.tpe.widen)
/* Symbols which limit the warnings we can issue since they may be value types */
val isMaybeValue = Set[Symbol](AnyClass, AnyRefClass, AnyValClass, ObjectClass, ComparableClass, JavaSerializableClass)
@@ -1057,7 +1057,7 @@ class RefChecks extends MiniPhase with SymTransformer { thisTransformer =>
// FIXME: reconcile this check with one in resetAttrs
case _ => checkUndesiredProperties(sym, tree.pos)
}
- if(sym.isJavaDefined)
+ if (sym.isJavaDefined)
sym.typeParams foreach (_.cookJavaRawInfo())
if (!tp.isHigherKinded && !skipBounds)
checkBounds(tree, pre, sym.owner, sym.typeParams, args)
@@ -1101,7 +1101,7 @@ class RefChecks extends MiniPhase with SymTransformer { thisTransformer =>
}
case tpt@TypeTree() =>
- if(tpt.original != null) {
+ if (tpt.original != null) {
tpt.original foreach {
case dc@TypeTreeWithDeferredRefCheck() =>
applyRefchecksToAnnotations(dc.check()) // #2416
@@ -1384,7 +1384,7 @@ class RefChecks extends MiniPhase with SymTransformer { thisTransformer =>
tree
case treeInfo.WildcardStarArg(_) if !isRepeatedParamArg(tree) =>
- unit.error(tree.pos, "no `: _*' annotation allowed here\n"+
+ unit.error(tree.pos, "no `: _*' annotation allowed here\n" +
"(such annotations are only allowed in arguments to *-parameters)")
tree
diff --git a/src/dotty/tools/dotc/typer/TypeAssigner.scala b/src/dotty/tools/dotc/typer/TypeAssigner.scala
index 5ef3a768c..2ec510a3d 100644
--- a/src/dotty/tools/dotc/typer/TypeAssigner.scala
+++ b/src/dotty/tools/dotc/typer/TypeAssigner.scala
@@ -30,10 +30,10 @@ trait TypeAssigner {
/** An upper approximation of the given type `tp` that does not refer to any symbol in `symsToAvoid`.
* Approximation steps are:
- *
+ *
* - follow aliases if the original refers to a forbidden symbol
* - widen termrefs that refer to a forbidden symbol
- * - replace ClassInfos of forbidden classes by the intersection of their parents, refined by all
+ * - replace ClassInfos of forbidden classes by the intersection of their parents, refined by all
* non-private fields, methods, and type members.
* - drop refinements referring to a forbidden symbol.
*/
diff --git a/src/dotty/tools/dotc/typer/Typer.scala b/src/dotty/tools/dotc/typer/Typer.scala
index 58d866ac1..a2b280c6e 100644
--- a/src/dotty/tools/dotc/typer/Typer.scala
+++ b/src/dotty/tools/dotc/typer/Typer.scala
@@ -309,7 +309,7 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit
}
}
- if(ctx.compilationUnit.isJava && tree.name.isTypeName) {
+ if (ctx.compilationUnit.isJava && tree.name.isTypeName) {
// SI-3120 Java uses the same syntax, A.B, to express selection from the
// value A and from the type A. We have to try both.
tryEither(tryCtx => asSelect(tryCtx))((_,_) => asJavaSelectFromTypeTree(ctx))
@@ -347,8 +347,8 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit
val clsDef = TypeDef(x, templ).withFlags(Final)
typed(cpy.Block(tree)(clsDef :: Nil, New(Ident(x), Nil)), pt)
case _ =>
- val tpt1 = typedType(tree.tpt)
- checkClassTypeWithStablePrefix(tpt1.tpe, tpt1.pos, traitReq = false)
+ val tpt1 = typedType(tree.tpt)
+ checkClassTypeWithStablePrefix(tpt1.tpe, tpt1.pos, traitReq = false)
assignType(cpy.New(tree)(tpt1), tpt1)
// todo in a later phase: checkInstantiatable(cls, tpt1.pos)
}
@@ -402,8 +402,8 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit
typed(cpy.Apply(lhs)(untpd.Select(fn, nme.update), args :+ tree.rhs), pt)
case untpd.TypedSplice(Apply(MaybePoly(Select(fn, app), targs), args)) if app == nme.apply =>
val rawUpdate: untpd.Tree = untpd.Select(untpd.TypedSplice(fn), nme.update)
- val wrappedUpdate =
- if (targs.isEmpty) rawUpdate
+ val wrappedUpdate =
+ if (targs.isEmpty) rawUpdate
else untpd.TypeApply(rawUpdate, targs map untpd.TypedSplice)
val appliedUpdate = cpy.Apply(fn)(wrappedUpdate, (args map untpd.TypedSplice) :+ tree.rhs)
typed(appliedUpdate, pt)
@@ -471,7 +471,7 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit
*/
protected def ensureNoLocalRefs(tree: Tree, pt: Type, localSyms: => List[Symbol], forcedDefined: Boolean = false)(implicit ctx: Context): Tree = {
def ascribeType(tree: Tree, pt: Type): Tree = tree match {
- case block @ Block(stats, expr) =>
+ case block @ Block(stats, expr) =>
val expr1 = ascribeType(expr, pt)
cpy.Block(block)(stats, expr1) withType expr1.tpe // no assignType here because avoid is redundant
case _ =>
@@ -722,8 +722,8 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit
if (tree.from.isEmpty) enclMethInfo(ctx)
else {
val from = tree.from.asInstanceOf[tpd.Tree]
- val proto =
- if (ctx.erasedTypes) from.symbol.info.finalResultType
+ val proto =
+ if (ctx.erasedTypes) from.symbol.info.finalResultType
else WildcardType // We cannot reliably detect the internal type view of polymorphic or dependent methods
// because we do not know the internal type params and method params.
// Hence no adaptation is possible, and we assume WildcardType as prototype.
@@ -1400,4 +1400,4 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit
}
}
}
-} \ No newline at end of file
+}
diff --git a/src/dotty/tools/dotc/typer/Variances.scala b/src/dotty/tools/dotc/typer/Variances.scala
index 0fec1e5a7..0cc9e74cc 100644
--- a/src/dotty/tools/dotc/typer/Variances.scala
+++ b/src/dotty/tools/dotc/typer/Variances.scala
@@ -50,8 +50,8 @@ object Variances {
for ((tp, tparam1) <- tps zip tparams1) {
val v1 = varianceInType(tp)(tparam)
v = v & (if (tparam1.is(Covariant)) v1
- else if (tparam1.is(Contravariant)) flip(v1)
- else cut(v1))
+ else if (tparam1.is(Contravariant)) flip(v1)
+ else cut(v1))
}
v
}
diff --git a/src/dotty/tools/dotc/util/Attachment.scala b/src/dotty/tools/dotc/util/Attachment.scala
index ec3019bab..e29bf13fe 100644
--- a/src/dotty/tools/dotc/util/Attachment.scala
+++ b/src/dotty/tools/dotc/util/Attachment.scala
@@ -95,4 +95,4 @@ object Attachment {
next = new Link(key, value, next)
}
}
-} \ No newline at end of file
+}
diff --git a/src/dotty/tools/dotc/util/DotClass.scala b/src/dotty/tools/dotc/util/DotClass.scala
index 7839fc37d..cdb697a45 100644
--- a/src/dotty/tools/dotc/util/DotClass.scala
+++ b/src/dotty/tools/dotc/util/DotClass.scala
@@ -9,4 +9,4 @@ class DotClass {
def unsupported(methodName: String): Nothing =
throw new UnsupportedOperationException(s"$getClass.$methodName")
-} \ No newline at end of file
+}
diff --git a/src/dotty/tools/dotc/util/NameTransformer.scala b/src/dotty/tools/dotc/util/NameTransformer.scala
index 2c3520236..330d513fe 100644
--- a/src/dotty/tools/dotc/util/NameTransformer.scala
+++ b/src/dotty/tools/dotc/util/NameTransformer.scala
@@ -108,9 +108,9 @@ object NameTransformer {
var unicode = false
val c = name charAt i
if (c == '$' && i + 2 < len) {
- val ch1 = name.charAt(i+1)
+ val ch1 = name.charAt(i + 1)
if ('a' <= ch1 && ch1 <= 'z') {
- val ch2 = name.charAt(i+2)
+ val ch2 = name.charAt(i + 2)
if ('a' <= ch2 && ch2 <= 'z') {
ops = code2op((ch1 - 'a') * 26 + ch2 - 'a')
while ((ops ne null) && !name.startsWith(ops.code, i)) ops = ops.next
@@ -129,7 +129,7 @@ object NameTransformer {
((Character.isDigit(ch2)) ||
('A' <= ch2 && ch2 <= 'F'))) {
/* Skip past "$u", next four should be hexadecimal */
- val hex = name.substring(i+2, i+6)
+ val hex = name.substring(i + 2, i + 6)
try {
val str = Integer.parseInt(hex, 16).toChar
if (buf eq null) {
diff --git a/src/dotty/tools/dotc/util/Positions.scala b/src/dotty/tools/dotc/util/Positions.scala
index 1f9e24897..60274c71f 100644
--- a/src/dotty/tools/dotc/util/Positions.scala
+++ b/src/dotty/tools/dotc/util/Positions.scala
@@ -21,7 +21,7 @@ object Positions {
/** Convert offset `x` to an integer by sign extending the original
* field of `StartEndBits` width.
*/
- def offsetToInt(x: Int) =
+ def offsetToInt(x: Int) =
x << (32 - StartEndBits) >> (32 - StartEndBits)
/** A position indicates a range between a start offset and an end offset.
@@ -124,7 +124,7 @@ object Positions {
((end & StartEndMask).toLong << StartEndBits) |
(pointDelta.toLong << (StartEndBits * 2)))
}
-
+
/** A synthetic position with given start and end */
def Position(start: Int, end: Int): Position = {
val pos = fromOffsets(start, end, SyntheticPointDelta)
@@ -170,4 +170,4 @@ object Positions {
/** A sentinel for a missing coordinate */
val NoCoord = new Coord(0)
-} \ No newline at end of file
+}
diff --git a/src/dotty/tools/dotc/util/SixteenNibbles.scala b/src/dotty/tools/dotc/util/SixteenNibbles.scala
index 59d1b0301..93817604e 100644
--- a/src/dotty/tools/dotc/util/SixteenNibbles.scala
+++ b/src/dotty/tools/dotc/util/SixteenNibbles.scala
@@ -25,4 +25,4 @@ object SixteenNibbles {
final val Width = 4
final val Mask = (1 << Width) - 1
final val LongMask = Mask.toLong
-} \ No newline at end of file
+}
diff --git a/src/dotty/tools/dotc/util/SourcePosition.scala b/src/dotty/tools/dotc/util/SourcePosition.scala
index 3b16c3685..3d6352a1a 100644
--- a/src/dotty/tools/dotc/util/SourcePosition.scala
+++ b/src/dotty/tools/dotc/util/SourcePosition.scala
@@ -14,7 +14,7 @@ case class SourcePosition(source: SourceFile, pos: Position) {
def column: Int = source.column(point)
override def toString =
- if (source.exists) s"${source.file}:${line+1}"
+ if (source.exists) s"${source.file}:${line + 1}"
else s"(no source file, offset = ${pos.point})"
}
diff --git a/src/dotty/tools/dotc/util/Stats.scala b/src/dotty/tools/dotc/util/Stats.scala
index 09dfd894d..d899e9546 100644
--- a/src/dotty/tools/dotc/util/Stats.scala
+++ b/src/dotty/tools/dotc/util/Stats.scala
@@ -67,4 +67,4 @@ object Stats {
}
} else op
}
-} \ No newline at end of file
+}
diff --git a/src/dotty/tools/dotc/util/Util.scala b/src/dotty/tools/dotc/util/Util.scala
index 98f0b62db..0d37f687b 100644
--- a/src/dotty/tools/dotc/util/Util.scala
+++ b/src/dotty/tools/dotc/util/Util.scala
@@ -2,31 +2,31 @@ package dotty.tools.dotc.util
import reflect.ClassTag
object Util {
-
+
/** The index `i` in `candidates.indices` such that `candidates(i) <= x` and
* `candidates(i)` is closest to `x`, determined by binary search, or -1
* if `x < candidates(0)`.
* @param hint If between 0 and `candidates.length` use this
- * as the first search point, otherwise use
+ * as the first search point, otherwise use
* `candidates.length/2`.
* @pre candidates is sorted
*/
def bestFit(candidates: Array[Int], length: Int, x: Int, hint: Int = -1): Int = {
def recur(lo: Int, hi: Int, mid: Int): Int =
- if (x < candidates(mid))
+ if (x < candidates(mid))
recur(lo, mid - 1, (lo + mid - 1) / 2)
- else if (mid + 1 < length && x >= candidates(mid + 1))
+ else if (mid + 1 < length && x >= candidates(mid + 1))
recur(mid + 1, hi, (mid + 1 + hi) / 2)
else mid
val initMid = if (0 <= hint && hint < length) hint else length / 2
if (length == 0 || x < candidates(0)) -1
else recur(0, length, initMid)
}
-
+
/** An array twice the size of given array, with existing elements copied over */
def dble[T: ClassTag](arr: Array[T]) = {
val arr1 = new Array[T](arr.length * 2)
Array.copy(arr, 0, arr1, 0, arr.length)
arr1
}
-} \ No newline at end of file
+}
diff --git a/src/dotty/tools/dotc/util/common.scala b/src/dotty/tools/dotc/util/common.scala
index d7596c28e..d9798aec5 100644
--- a/src/dotty/tools/dotc/util/common.scala
+++ b/src/dotty/tools/dotc/util/common.scala
@@ -11,4 +11,4 @@ object common {
val alwaysZero = Function.const(0) _
val alwaysWildcardType = Function.const(WildcardType) _
-} \ No newline at end of file
+}
diff --git a/src/dotty/tools/io/ClassPath.scala b/src/dotty/tools/io/ClassPath.scala
index 055ee9f88..ecb063469 100644
--- a/src/dotty/tools/io/ClassPath.scala
+++ b/src/dotty/tools/io/ClassPath.scala
@@ -283,7 +283,7 @@ class SourcePath(dir: AbstractFile, val context: ClassPathContext) extends Class
}
lazy val (packages, classes) = traverse()
- override def toString() = "sourcepath: "+ dir.toString()
+ override def toString() = "sourcepath: " + dir.toString()
}
/**
@@ -310,7 +310,7 @@ class DirectoryClassPath(val dir: AbstractFile, val context: ClassPathContext) e
}
lazy val (packages, classes) = traverse()
- override def toString() = "directory classpath: "+ origin.getOrElse("?")
+ override def toString() = "directory classpath: " + origin.getOrElse("?")
}
class DeltaClassPath(original: MergedClassPath, subst: Map[ClassPath, ClassPath])
@@ -393,7 +393,7 @@ extends ClassPath {
println("ClassPath %s has %d entries and results in:\n".format(name, entries.size))
asClasspathString split ':' foreach (x => println(" " + x))
}
- override def toString() = "merged classpath "+ entries.mkString("(", "\n", ")")
+ override def toString() = "merged classpath " + entries.mkString("(", "\n", ")")
}
/**
diff --git a/src/typedapply.scala b/src/typedapply.scala
index e28e59d4f..8496d528b 100644
--- a/src/typedapply.scala
+++ b/src/typedapply.scala
@@ -8,4 +8,4 @@ object typedapply {
foo[Int, String] _
-} \ No newline at end of file
+}
diff --git a/test/dotc/tests.scala b/test/dotc/tests.scala
index c36552536..a015b9efe 100644
--- a/test/dotc/tests.scala
+++ b/test/dotc/tests.scala
@@ -40,7 +40,7 @@ class tests extends CompilerTest {
@Test def pickle_pickleOK = compileDir(picklingDir, testPickling)
@Test def pickle_pickling = compileDir(dotcDir + "tools/dotc/core/pickling/", testPickling)
@Test def pickle_ast = compileDir(dotcDir + "tools/dotc/ast/", testPickling)
-
+
//@Test def pickle_core = compileDir(dotcDir + "tools/dotc/core", testPickling, xerrors = 2) // two spurious comparison errors in Types and TypeOps
@Test def pos_t2168_pat = compileFile(posDir, "t2168", twice)
@@ -141,7 +141,7 @@ class tests extends CompilerTest {
// demanding denotation of module class ClassfileParser$$anon$1$ at phase frontend(1) outside defined interval:
// defined periods are Period(31..36, run = 2) Period(3..24, run = 2) Period(25..26, run = 2)
// Period(27..28, run = 2) Period(29..29, run = 2) Period(30..30, run = 2)
- // inside FirstTransform at dotty.tools.dotc.transform.FirstTransform.transform(FirstTransform.scala:33)
+ // inside FirstTransform at dotty.tools.dotc.transform.FirstTransform.transform(FirstTransform.scala:33)
// weird.
@Test def dotc_transform = compileDir(dotcDir + "tools/dotc/transform")// twice omitted to make tests run faster
@@ -183,6 +183,6 @@ class tests extends CompilerTest {
val javaDir = "./tests/pos/java-interop/"
@Test def java_all = compileFiles(javaDir, twice)
-
+
//@Test def dotc_compilercommand = compileFile(dotcDir + "tools/dotc/config/", "CompilerCommand")
}
diff --git a/test/test/ContravariantTrees.scala b/test/test/ContravariantTrees.scala
index 5fd45bce2..999224042 100644
--- a/test/test/ContravariantTrees.scala
+++ b/test/test/ContravariantTrees.scala
@@ -62,4 +62,4 @@ object ContravariantTrees {
type ThisTree[T >: Untyped] = DefDef[T]
}
-} \ No newline at end of file
+}
diff --git a/test/test/DeSugarTest.scala b/test/test/DeSugarTest.scala
index 016ab5361..2c74abae7 100644
--- a/test/test/DeSugarTest.scala
+++ b/test/test/DeSugarTest.scala
@@ -84,11 +84,11 @@ class DeSugarTest extends ParserTest {
def firstClass(tree: Tree): String = tree match {
case PackageDef(pid, stats) =>
pid.show + "." + firstClass(stats)
- case _ => "??? "+tree.getClass
+ case _ => "??? " + tree.getClass
}
def desugarTree(tree: Tree): Tree = {
- //println("***** desugaring "+firstClass(tree))
+ //println("***** desugaring " + firstClass(tree))
DeSugar.transform(tree)
}
diff --git a/test/test/ParserTest.scala b/test/test/ParserTest.scala
index 291b27983..524be272d 100644
--- a/test/test/ParserTest.scala
+++ b/test/test/ParserTest.scala
@@ -39,4 +39,4 @@ class ParserTest extends DottyTest {
for (d <- dir.dirs)
parseDir(d.path)
}
-} \ No newline at end of file
+}
diff --git a/test/test/ScannerTest.scala b/test/test/ScannerTest.scala
index 78a54301f..15ae41c1c 100644
--- a/test/test/ScannerTest.scala
+++ b/test/test/ScannerTest.scala
@@ -21,7 +21,7 @@ class ScannerTest extends DottyTest {
val scanner = new Scanner(source)
var i = 0
while (scanner.token != EOF) {
-// print("["+scanner.token.show+"]")
+// print("[" + scanner.token.show +"]")
scanner.nextToken
// i += 1
// if (i % 10 == 0) println()
diff --git a/test/test/parseFile.scala b/test/test/parseFile.scala
index 21cc2f381..c82fe22e7 100644
--- a/test/test/parseFile.scala
+++ b/test/test/parseFile.scala
@@ -6,8 +6,8 @@ object parseFile extends ParserTest {
if (args.isEmpty) println("usage: scala test.parseFile file1.scala ... fileN.scala")
for (arg <- args) {
val tree = parse(arg)
- println("parsed: "+arg)
+ println("parsed: " + arg)
println(tree.show)
}
}
-} \ No newline at end of file
+}
diff --git a/test/test/showTree.scala b/test/test/showTree.scala
index 9476d994b..2c3316ac9 100644
--- a/test/test/showTree.scala
+++ b/test/test/showTree.scala
@@ -13,12 +13,12 @@ object showTree extends DeSugarTest {
def test(arg: String) = {
val tree: Tree = parse(arg)
- println("result = "+tree.show)
- println("desugared = "+DeSugar.transform(tree).show)
+ println("result = " + tree.show)
+ println("desugared = " + DeSugar.transform(tree).show)
}
def main(args: Array[String]): Unit = {
test("src/dotty/tools/dotc/core/Types.scala")
for (arg <- args) test(arg)
}
-} \ No newline at end of file
+}
diff --git a/test/x/PatMat.scala b/test/x/PatMat.scala
index 279373674..131136a1e 100644
--- a/test/x/PatMat.scala
+++ b/test/x/PatMat.scala
@@ -13,4 +13,4 @@ trait PatMat {
case "hi" => ???
}
-} \ No newline at end of file
+}
diff --git a/tests/disabled/java-interop/failing/t1459/App.scala b/tests/disabled/java-interop/failing/t1459/App.scala
index 36e5022e9..36e5022e9 100755..100644
--- a/tests/disabled/java-interop/failing/t1459/App.scala
+++ b/tests/disabled/java-interop/failing/t1459/App.scala
diff --git a/tests/disabled/not-representable/MailBox.scala b/tests/disabled/not-representable/MailBox.scala
index 8e27bd362..a2145c2c2 100644
--- a/tests/disabled/not-representable/MailBox.scala
+++ b/tests/disabled/not-representable/MailBox.scala
@@ -49,10 +49,10 @@ class MailBox {
if (s1 != null) {
s.next = s1.next; s1.elem
} else {
- val r = insert(lastReceiver, new Receiver {
+ val r = insert(lastReceiver, new Receiver {
def isDefined(msg: Any) = f.isDefinedAt(msg);
});
- lastReceiver = r;
+ lastReceiver = r;
r.elem.wait();
r.elem.msg
}
diff --git a/tests/disabled/not-representable/pos/annotated-treecopy/Impls_Macros_1.scala b/tests/disabled/not-representable/pos/annotated-treecopy/Impls_Macros_1.scala
index 986287dfa..00ddb5314 100644
--- a/tests/disabled/not-representable/pos/annotated-treecopy/Impls_Macros_1.scala
+++ b/tests/disabled/not-representable/pos/annotated-treecopy/Impls_Macros_1.scala
@@ -39,7 +39,7 @@ object Macros {
val reifiedExpr = c.Expr[scala.reflect.runtime.universe.Expr[T => U]](reifiedTree)
val template =
c.universe.reify(new (T => U) with TypedFunction {
- override def toString = c.Expr[String](q"""${tp+" => "+ttag.tpe+" { "+b1.toString+" } "}""").splice // DEBUG
+ override def toString = c.Expr[String](q"""${tp +" => " + ttag.tpe +" { "+ b1.toString +" } "}""").splice // DEBUG
def tree = reifiedExpr.splice.tree
val typeIn = c.Expr[String](q"${tp.toString}").splice
val typeOut = c.Expr[String](q"${ttag.tpe.toString}").splice
diff --git a/tests/disabled/structural-type/pos/t3363-old.scala b/tests/disabled/structural-type/pos/t3363-old.scala
index 0088eff3d..de4fcf68a 100644
--- a/tests/disabled/structural-type/pos/t3363-old.scala
+++ b/tests/disabled/structural-type/pos/t3363-old.scala
@@ -7,12 +7,12 @@ object TestCase {
//if you inherit from MapOps[T] instead of MapOps[F] then code compiles fine
implicit def map2ops[T,F](fs: Map[T,F]): TestCase.MapOps[F]{val m: Manifest[T]; def is(xs: List[T]): List[List[T]]} = new MapOps[F] {
//if you remove this line, then code compiles
- lazy val m: Manifest[T] = sys.error("just something to make it compile")
- def is(xs: List[T]) = List(xs)
- }
+ lazy val m: Manifest[T] = sys.error("just something to make it compile")
+ def is(xs: List[T]) = List(xs)
+ }
- def main(args: Array[String]): Unit = {
- println(Map(1 -> "2") is List(2))
- }
+ def main(args: Array[String]): Unit = {
+ println(Map(1 -> "2") is List(2))
+ }
- }
+ }
diff --git a/tests/neg/assignments.scala b/tests/neg/assignments.scala
index 2f7db275a..2314783fe 100644
--- a/tests/neg/assignments.scala
+++ b/tests/neg/assignments.scala
@@ -3,8 +3,8 @@ object assignments {
var a = Array(1, 2, 3)
var i = 0
a(i) = a(i) * 2
- a(i+1) += 1
-
+ a(i + 1) += 1
+
class C {
var myX = 0
def x = myX
@@ -12,10 +12,10 @@ object assignments {
x = x + 1
x *= 2
-
+
x_= = 2 // should give missing arguments + reassignment to val
}
-
+
var c = new C
import c._ // should give: prefix is not stable
x = x + 1
diff --git a/tests/neg/patternUnsoundness.scala b/tests/neg/patternUnsoundness.scala
index b3d699a5c..4620f6c7d 100644
--- a/tests/neg/patternUnsoundness.scala
+++ b/tests/neg/patternUnsoundness.scala
@@ -1,17 +1,17 @@
object patternUnsoundness extends App {
-
+
class C[+T]
-
+
case class D[S](_s: S) extends C[S] {
var s: S = _s
}
-
+
val x = new D[String]("abc")
val y: C[Object] = x
-
+
y match {
case d @ D(x) => d.s = new Integer(1)
}
val z: String = x.s // ClassCast exception
-} \ No newline at end of file
+}
diff --git a/tests/neg/t1164.scala b/tests/neg/t1164.scala
index 6deedfbff..7775b5e86 100644
--- a/tests/neg/t1164.scala
+++ b/tests/neg/t1164.scala
@@ -7,23 +7,23 @@ object test {
def unapply [a](m : Foo[a]) = Some (m.arg)
}
- def matchAndGetArgFromFoo[a]( e:Foo[a]):a = {e match { case Foo(x) => x }}
+ def matchAndGetArgFromFoo[a]( e:Foo[a]):a = {e match { case Foo(x) => x }}
// Unapply node here will have type argument [a] instantiated to scala.Nothing:
// UnApply(TypeApply(Select(Ident(Foo),unapply),List(TypeTree[TypeVar(PolyParam(a) -> TypeRef(ThisType(TypeRef(NoPrefix,scala)),Nothing))])),List(),List(Bind(x,Ident(_))))
// but the type of the UnApply node itself is correct: RefinedType(TypeRef(ThisType(TypeRef(ThisType(TypeRef(NoPrefix,<empty>)),test$)),Foo), test$$Foo$$a, TypeAlias(TypeRef(NoPrefix,a)))
//
- // Try the same thing as above but use function as argument to Bar
- // constructor
+ // Try the same thing as above but use function as argument to Bar
+ // constructor
- type FunIntToA [a] = (Int) => a
+ type FunIntToA [a] = (Int) => a
class Bar[a] (var f: FunIntToA[a])
- object Bar {
- def apply[a](f: FunIntToA[a]) = new Bar[a](f)
- def unapply[a](m: Bar[a]) = Some (m.f)
- }
+ object Bar {
+ def apply[a](f: FunIntToA[a]) = new Bar[a](f)
+ def unapply[a](m: Bar[a]) = Some (m.f)
+ }
- def matchAndGetFunFromBar[a](b:Bar[a]) : FunIntToA[a] = { b match { case Bar(x) => x}}
+ def matchAndGetFunFromBar[a](b:Bar[a]) : FunIntToA[a] = { b match { case Bar(x) => x}}
}
diff --git a/tests/neg/tailcall/t6574.scala b/tests/neg/tailcall/t6574.scala
index 59f3108ad..e81c8cd07 100644
--- a/tests/neg/tailcall/t6574.scala
+++ b/tests/neg/tailcall/t6574.scala
@@ -4,7 +4,7 @@ class Bad[X, Y](val v: Int) extends AnyVal {
println("tail")
}
- @annotation.tailrec final def differentTypeArgs : Unit = {
+ @annotation.tailrec final def differentTypeArgs : Unit = {
{(); new Bad[String, Unit](0)}.differentTypeArgs
}
}
diff --git a/tests/neg/templateParents.scala b/tests/neg/templateParents.scala
index 6ecc8c384..a03962525 100644
--- a/tests/neg/templateParents.scala
+++ b/tests/neg/templateParents.scala
@@ -13,4 +13,4 @@ object templateParentsNeg1 {
trait E extends C[Int]
val x = new D with E // error no type fits between inferred bounds
-} \ No newline at end of file
+}
diff --git a/tests/neg/typedIdents.scala b/tests/neg/typedIdents.scala
index b664da42d..cb7cca743 100644
--- a/tests/neg/typedIdents.scala
+++ b/tests/neg/typedIdents.scala
@@ -18,17 +18,17 @@ package P { // `X' bound by package clause
println("L12: " + x) // `x' refers to constant `3' here
locally {
import Q.X._ // `x' and `y' bound by wildcard import
- println("L14: "+x) // reference to `x' is ambiguous here
+ println("L14: " + x) // reference to `x' is ambiguous here
import X.y // `y' bound by explicit import
println("L16: " + y) // `y' refers to `Q.X.y' here
locally {
import P.X._ // `x' and `y' bound by wildcard import
val x = "abc" // `x' bound by local definition
- println("L19: "+y) // reference to `y' is ambiguous here
+ println("L19: " + y) // reference to `y' is ambiguous here
println("L20: " + x) // `x' refers to string ``abc'' here
}
}
}
}
}
-} \ No newline at end of file
+}
diff --git a/tests/neg/typedapply.scala b/tests/neg/typedapply.scala
index ae168bcd9..b80281c9f 100644
--- a/tests/neg/typedapply.scala
+++ b/tests/neg/typedapply.scala
@@ -5,13 +5,13 @@ object typedapply {
foo[Int](1, "abc")
foo[Int, String, String](1, "abc")
-
+
def bar(x: Int) = x
-
+
bar[Int](1)
-
+
def baz[X >: Y, Y <: String](x: X, y: Y) = (x, y)
-
+
baz[Int, String](1, "abc")
-
-} \ No newline at end of file
+
+}
diff --git a/tests/pending/pos/depmet_implicit_oopsla_session.scala b/tests/pending/pos/depmet_implicit_oopsla_session.scala
index aa8478056..a9c8e56ce 100644
--- a/tests/pending/pos/depmet_implicit_oopsla_session.scala
+++ b/tests/pending/pos/depmet_implicit_oopsla_session.scala
@@ -34,7 +34,7 @@ object Sessions {
def addServer =
In{x: Int =>
In{y: Int => System.out.println("Thinking")
- Out(x+y,
+ Out(x + y,
Stop())}}
def addClient =
diff --git a/tests/pending/pos/depmet_implicit_oopsla_session_2.scala b/tests/pending/pos/depmet_implicit_oopsla_session_2.scala
index 4951c10a1..29a76d5cf 100644
--- a/tests/pending/pos/depmet_implicit_oopsla_session_2.scala
+++ b/tests/pending/pos/depmet_implicit_oopsla_session_2.scala
@@ -54,7 +54,7 @@ object Sessions {
def addServer =
In{x: Int =>
In{y: Int => System.out.println("Thinking")
- Out(x+y,
+ Out(x + y,
Stop())}}
def addClient =
diff --git a/tests/pending/pos/depmet_implicit_oopsla_session_simpler.scala b/tests/pending/pos/depmet_implicit_oopsla_session_simpler.scala
index 04b8f94e6..fad5eba40 100644
--- a/tests/pending/pos/depmet_implicit_oopsla_session_simpler.scala
+++ b/tests/pending/pos/depmet_implicit_oopsla_session_simpler.scala
@@ -31,7 +31,7 @@ object Sessions {
def addServer =
In{x: Int =>
In{y: Int => System.out.println("Thinking")
- Out(x+y,
+ Out(x + y,
Stop())}}
def addClient =
diff --git a/tests/pending/pos/existentials-harmful.scala b/tests/pending/pos/existentials-harmful.scala
index 8722852e8..91dbd4dfd 100644
--- a/tests/pending/pos/existentials-harmful.scala
+++ b/tests/pending/pos/existentials-harmful.scala
@@ -17,7 +17,7 @@ object ExistentialsConsideredHarmful {
// 1.
def carry[A <: Animal](box: TransportBox[A]): Unit = {
- println(box.animal.name+" got carried away")
+ println(box.animal.name +" got carried away")
}
val aBox =
@@ -37,7 +37,7 @@ object ExistentialsConsideredHarmful {
abstract class BoxCarrier[R <: Animal](box: TransportBox[R]) {
def speed: Int
- def talkToAnimal: Unit = println("The carrier says hello to"+box.animal.name)
+ def talkToAnimal: Unit = println("The carrier says hello to" + box.animal.name)
}
// 3.
diff --git a/tests/pending/pos/lambdalift1.scala b/tests/pending/pos/lambdalift1.scala
index 01b224c3b..7353c32c6 100644
--- a/tests/pending/pos/lambdalift1.scala
+++ b/tests/pending/pos/lambdalift1.scala
@@ -6,10 +6,10 @@ object test {
def print() = java.lang.System.out.println(x);
class A() {
def g() = {
- class B() {
- def h() = print()
- }
- new B().h()
+ class B() {
+ def h() = print()
+ }
+ new B().h()
}
}
new A().g()
diff --git a/tests/pending/pos/t1756.scala b/tests/pending/pos/t1756.scala
index 1d067c3b0..58f56ccb9 100755..100644
--- a/tests/pending/pos/t1756.scala
+++ b/tests/pending/pos/t1756.scala
@@ -30,7 +30,7 @@ class A extends Ring[A] {
}
class Poly[C <: Ring[C]](val c: C) extends Ring[Poly[C]] {
- def +(that: Poly[C]) = new Poly(this.c+that.c)
+ def +(that: Poly[C]) = new Poly(this.c + that.c)
def *(that: Poly[C]) = new Poly(this.c*that.c)
}
@@ -41,14 +41,14 @@ object Test extends App {
val a = new A
val x = new Poly(new A)
- println(x+a) // works
- println(a+x) // works
+ println(x + a) // works
+ println(a + x) // works
val y = new Poly(new Poly(new A))
- println(x+y*x) // works
- println(x*y+x) // works
- println(y*x+x) // works
+ println(x + y*x) // works
+ println(x*y + x) // works
+ println(y*x + x) // works
- println(x+x*y) // failed before
+ println(x + x*y) // failed before
}
diff --git a/tests/pending/pos/t2913.scala b/tests/pending/pos/t2913.scala
index 21700e71a..21700e71a 100755..100644
--- a/tests/pending/pos/t2913.scala
+++ b/tests/pending/pos/t2913.scala
diff --git a/tests/pending/pos/t3480.scala b/tests/pending/pos/t3480.scala
index f04ea2933..ba2e1a4b8 100644
--- a/tests/pending/pos/t3480.scala
+++ b/tests/pending/pos/t3480.scala
@@ -1,4 +1,4 @@
object Test {
val List(_: _*) = List(1)
- val Array( who, what : _* ) = "Eclipse plugin cannot not handle this" split (" ")
+ val Array( who, what : _* ) = "Eclipse plugin cannot not handle this" split (" ")
}
diff --git a/tests/pending/pos/t3568.scala b/tests/pending/pos/t3568.scala
index 50f0cdb2e..50f0cdb2e 100755..100644
--- a/tests/pending/pos/t3568.scala
+++ b/tests/pending/pos/t3568.scala
diff --git a/tests/pending/pos/t4176b.scala b/tests/pending/pos/t4176b.scala
index 11914c50c..f7d83365c 100644
--- a/tests/pending/pos/t4176b.scala
+++ b/tests/pending/pos/t4176b.scala
@@ -1,5 +1,5 @@
object Test {
- def foo(a: String*) = a
- val fooEta = foo _
- (foo: Seq[String] => Seq[String])
+ def foo(a: String*) = a
+ val fooEta = foo _
+ (foo: Seq[String] => Seq[String])
}
diff --git a/tests/pending/pos/t4553.scala b/tests/pending/pos/t4553.scala
index e9bef4099..e9bef4099 100755..100644
--- a/tests/pending/pos/t4553.scala
+++ b/tests/pending/pos/t4553.scala
diff --git a/tests/pending/pos/t4579.scala b/tests/pending/pos/t4579.scala
index b298ee579..8ce657eff 100644
--- a/tests/pending/pos/t4579.scala
+++ b/tests/pending/pos/t4579.scala
@@ -312,7 +312,7 @@ object LispAny extends Lisp {
case 'cond :: ('else :: expr :: Nil) :: rest =>
normalize(expr);
case 'cond :: (test :: expr :: Nil) :: rest =>
- normalize('if :: test :: expr :: ('cond :: rest) :: Nil)
+ normalize('if :: test :: expr :: ('cond :: rest) :: Nil)
case 'cond :: 'else :: expr :: Nil =>
normalize(expr)
case h :: t =>
@@ -400,7 +400,7 @@ object LispAny extends Lisp {
val globalEnv = EmptyEnvironment
.extend("=", Lambda{
- case List(arg1, arg2) => if(arg1 == arg2) 1 else 0})
+ case List(arg1, arg2) => if (arg1 == arg2) 1 else 0})
.extend("+", Lambda{
case List(arg1: Int, arg2: Int) => arg1 + arg2
case List(arg1: String, arg2: String) => arg1 + arg2})
diff --git a/tests/pending/pos/t5012.scala b/tests/pending/pos/t5012.scala
index 772b8f448..84404495c 100644
--- a/tests/pending/pos/t5012.scala
+++ b/tests/pending/pos/t5012.scala
@@ -4,7 +4,7 @@ class D {
class C {
def m: D = {
- if("abc".length == 0) {
+ if ("abc".length == 0) {
object p // (program point 2)
}
null
diff --git a/tests/pending/pos/t5029.scala b/tests/pending/pos/t5029.scala
index b68fc0367..f32d0c650 100644
--- a/tests/pending/pos/t5029.scala
+++ b/tests/pending/pos/t5029.scala
@@ -1,3 +1,3 @@
object Test {
- (Vector(): Seq[_]) match { case List() => true; case Nil => false }
+ (Vector(): Seq[_]) match { case List() => true; case Nil => false }
}
diff --git a/tests/pending/pos/t5119.scala b/tests/pending/pos/t5119.scala
index 4a67244e5..39f626e53 100644
--- a/tests/pending/pos/t5119.scala
+++ b/tests/pending/pos/t5119.scala
@@ -4,8 +4,8 @@ object Test {
class IMap0[K[_], V[_]](backing: Map[K[_], V[_]]) {
def mapSeparate[VL[_], VR[_]](f: V[_] => ({type l[T] = Either[VL[T], VR[T]]})#l[_] ) = {
backing.view.map { case (k,v) => f(v) match {
- case Left(l) => Left((k, l))
- case Right(r) => Right((k, r))
+ case Left(l) => Left((k, l))
+ case Right(r) => Right((k, r))
}
}
}
diff --git a/tests/pending/pos/t5541.scala b/tests/pending/pos/t5541.scala
index 90e5e4130..54e2b6518 100644
--- a/tests/pending/pos/t5541.scala
+++ b/tests/pending/pos/t5541.scala
@@ -36,9 +36,9 @@ class HASkipListView[ S <: Sys[ S ], A ]( private val l: HASkipList[ S, A ])( im
val szm = sz - 1
val keys = IndexedSeq.tabulate( sz ) { i =>
val key = n.key( i )
- (key, if( isRight && i == szm ) "M" else key.toString)
+ (key, if ( isRight && i == szm ) "M" else key.toString)
}
- val chbo = if( n.isLeaf ) None else {
+ val chbo = if ( n.isLeaf ) None else {
val nb = n.asBranch
Some( IndexedSeq.tabulate( sz )( i => buildBoxMap( nb.down( i ), isRight && (i == szm) )))
}
diff --git a/tests/pending/pos/t573.scala b/tests/pending/pos/t573.scala
index 694d001e3..1aadb446c 100644
--- a/tests/pending/pos/t573.scala
+++ b/tests/pending/pos/t573.scala
@@ -20,7 +20,7 @@ abstract class Linked {
abstract class Node0 {
self: Node =>
- var next : Node = _;
+ var next : Node = _;
var prev : Node = _;
def get(dir : Dir) = if (dir == BEFORE) prev; else next;
diff --git a/tests/pending/pos/t5967.scala b/tests/pending/pos/t5967.scala
index eb9bd6dfa..cd219c031 100644
--- a/tests/pending/pos/t5967.scala
+++ b/tests/pending/pos/t5967.scala
@@ -1,6 +1,6 @@
object Test {
- def f(a: Int*) = a match {
- case 0 :: Nil => "List(0)! My favorite Seq!"
- case _ => a.toString
- }
+ def f(a: Int*) = a match {
+ case 0 :: Nil => "List(0)! My favorite Seq!"
+ case _ => a.toString
+ }
}
diff --git a/tests/pending/pos/t6260a.scala b/tests/pending/pos/t6260a.scala
index 194294e98..21b2fd43c 100644
--- a/tests/pending/pos/t6260a.scala
+++ b/tests/pending/pos/t6260a.scala
@@ -1,7 +1,7 @@
final class Option[+A](val value: A) extends AnyVal
// Was: sandbox/test.scala:21: error: bridge generated for member method f: ()Option[A] in class Bar
-// which overrides method f: ()Option[A] in class Foo"
+// which overrides method f: ()Option[A] in class Foo"
abstract class Foo[A] { def f(): Option[A] }
class Bar[A] extends Foo[A] { def f(): Option[A] = ??? }
diff --git a/tests/pending/pos/t6335.scala b/tests/pending/pos/t6335.scala
index 50e34092d..eb052db19 100644
--- a/tests/pending/pos/t6335.scala
+++ b/tests/pending/pos/t6335.scala
@@ -12,14 +12,14 @@ object E extends Z {
}
trait Z {
- def Z = 0
+ def Z = 0
}
object Test {
- import E._
- 0.xx
+ import E._
+ 0.xx
- "".yy
+ "".yy
true.zz
}
diff --git a/tests/pending/pos/t6966.scala b/tests/pending/pos/t6966.scala
index a43d7c501..cd91221a6 100644
--- a/tests/pending/pos/t6966.scala
+++ b/tests/pending/pos/t6966.scala
@@ -2,13 +2,13 @@ import Ordering.{Byte, comparatorToOrdering}
trait Format[T]
trait InputCache[T]
object CacheIvy {
- implicit def basicInputCache[I](implicit fmt: Format[I], eqv: Equiv[I]): InputCache[I] = null
- implicit def arrEquiv[T](implicit t: Equiv[T]): Equiv[Array[T]] = null
- implicit def hNilCache: InputCache[HNil] = null
- implicit def ByteArrayFormat: Format[Array[Byte]] = null
- type :+:[H, T <: HList] = HCons[H,T]
- implicit def hConsCache[H, T <: HList](implicit head: InputCache[H], tail: InputCache[T]): InputCache[H :+: T] = null
- hConsCache[Array[Byte], HNil]
+ implicit def basicInputCache[I](implicit fmt: Format[I], eqv: Equiv[I]): InputCache[I] = null
+ implicit def arrEquiv[T](implicit t: Equiv[T]): Equiv[Array[T]] = null
+ implicit def hNilCache: InputCache[HNil] = null
+ implicit def ByteArrayFormat: Format[Array[Byte]] = null
+ type :+:[H, T <: HList] = HCons[H,T]
+ implicit def hConsCache[H, T <: HList](implicit head: InputCache[H], tail: InputCache[T]): InputCache[H :+: T] = null
+ hConsCache[Array[Byte], HNil]
}
sealed trait HList
diff --git a/tests/pending/pos/t7011.scala b/tests/pending/pos/t7011.scala
index 18d7aeee7..f1f71b9be 100644
--- a/tests/pending/pos/t7011.scala
+++ b/tests/pending/pos/t7011.scala
@@ -1,5 +1,5 @@
object bar {
- def foo: Unit = {
+ def foo: Unit = {
lazy val x = 42
{()=>x}
diff --git a/tests/pending/pos/t7022.scala b/tests/pending/pos/t7022.scala
index 0609e2d25..c86602664 100644
--- a/tests/pending/pos/t7022.scala
+++ b/tests/pending/pos/t7022.scala
@@ -1,9 +1,9 @@
class Catch[+T] {
- def either[U >: T](body: => U): Either[Throwable, U] = ???
+ def either[U >: T](body: => U): Either[Throwable, U] = ???
}
object Test {
- implicit class RichCatch[T](val c: Catch[T]) extends AnyVal {
- def validation[U >: T](u: => U): Either[Throwable, U] = c.either(u)
- }
+ implicit class RichCatch[T](val c: Catch[T]) extends AnyVal {
+ def validation[U >: T](u: => U): Either[Throwable, U] = c.either(u)
+ }
}
diff --git a/tests/pending/pos/t796.scala b/tests/pending/pos/t796.scala
index 066625179..b1463ed92 100644
--- a/tests/pending/pos/t796.scala
+++ b/tests/pending/pos/t796.scala
@@ -16,7 +16,7 @@ object Test extends App {
}
def test(x: Int) = x match {
- case Twice(y) => "x is two times "+y
+ case Twice(y) => "x is two times " + y
case _ => "x is odd"
}
diff --git a/tests/pending/pos/t7983.scala b/tests/pending/pos/t7983.scala
index bae9f3333..fbeb7d3c5 100644
--- a/tests/pending/pos/t7983.scala
+++ b/tests/pending/pos/t7983.scala
@@ -1,15 +1,15 @@
package foo.bar.baz // the package nesting level material to this bug
-
+
class DivergenceTest {
-
+
trait ColumnBase[T]
-
+
trait ShapeLevel
trait Flat extends ShapeLevel
trait Lower extends Flat
-
+
class Shape2[Level <: ShapeLevel, -M, U]
-
+
implicit final def columnBaseShape[Level >: Flat <: ShapeLevel, T, C <: ColumnBase[_]]
(implicit ev: C <:< ColumnBase[T]
): Shape2[Level, C, T] = ???
@@ -19,12 +19,12 @@ class DivergenceTest {
(implicit u1: Shape2[_ <: Level, M1, U1],
u2: Shape2[_ <: Level, M2, U2]
): Shape2[Level, (M1,M2), (U1,U2)] = ???
-
+
def foo: Unit = {
class Coffees extends ColumnBase[Int]
-
+
def map1[F, T](f: F)(implicit shape: Shape2[_ <: Flat, F, T]) = ???
-
+
map1(((1, null: Coffees), 1))
map1(((null: Coffees, 1), 1)) // fails with implicit divergence error in 2.11.0-M6, works under 2.10.3
}
diff --git a/tests/pending/pos/t7987/Test_2.scala b/tests/pending/pos/t7987/Test_2.scala
index 5896fdb51..aaa21ec4c 100644
--- a/tests/pending/pos/t7987/Test_2.scala
+++ b/tests/pending/pos/t7987/Test_2.scala
@@ -1,12 +1,12 @@
-class C[T] {
- def foo = 0
-}
+class C[T] {
+ def foo = 0
+}
object Test {
- implicit def AnyToC[T](a: Any): C[T] = new C[T]
+ implicit def AnyToC[T](a: Any): C[T] = new C[T]
// was: "macro not expanded"
- Macro {
- "".foo
- ()
+ Macro {
+ "".foo
+ ()
}
}
diff --git a/tests/pending/pos/t8023.scala b/tests/pending/pos/t8023.scala
index 86824084e..502b5c55d 100644
--- a/tests/pending/pos/t8023.scala
+++ b/tests/pending/pos/t8023.scala
@@ -4,7 +4,7 @@ import language._
object Test {
def foo = (null: Any) match {
case a: A[k] =>
- // error: kinds of the type arguments (k) do not conform to the
+ // error: kinds of the type arguments (k) do not conform to the
// expected kinds of the type parameters (type K) in class B.
new B[k]()
}
diff --git a/tests/pending/pos/t8111.scala b/tests/pending/pos/t8111.scala
index 7ec002c9b..3f0e766ce 100644
--- a/tests/pending/pos/t8111.scala
+++ b/tests/pending/pos/t8111.scala
@@ -7,16 +7,16 @@ trait T {
foo((u: Unit) => ma)
foo(0, (u: Any) => ma) apply ()
- // crash due to side effects on the onwer of the symbol in the
+ // crash due to side effects on the onwer of the symbol in the
// qualifier or arguments of the application during an abandoned
// names/defaults transform. The code type checkes because of
// autp-tupling which promotes and empty parmater list to `(): Unit`
foo((u: Any) => ma)()
-
+
{{(u: Any) => ma}; this}.foo(0)()
-
+
foo({def foo = ma; 0})()
-
+
{def foo = ma; this}.foo(0)()
}
diff --git a/tests/pending/pos/t8301b.scala b/tests/pending/pos/t8301b.scala
index 5641547c1..4dd39139d 100644
--- a/tests/pending/pos/t8301b.scala
+++ b/tests/pending/pos/t8301b.scala
@@ -2,18 +2,18 @@
trait Universe {
type Name >: Null <: AnyRef with NameApi
trait NameApi
-
+
type TermName >: Null <: TermNameApi with Name
trait TermNameApi extends NameApi
}
-
+
object Test extends App {
val u: Universe = ???
import u._
-
+
val ScalaName: TermName = ???
locally {
-
+
??? match {
case Test.ScalaName => ???
}
@@ -25,7 +25,7 @@ object Test extends App {
import ScalaName._
// both the pattern and import led to
- // stable identifier required, but SN found. Note that value SN
+ // stable identifier required, but SN found. Note that value SN
// is not stable because its type, Test.u.TermName, is volatile.
val SN = ScalaName
??? match {
diff --git a/tests/pending/pos/t8367.scala b/tests/pending/pos/t8367.scala
index 9ac2ce7c2..f0c329211 100644
--- a/tests/pending/pos/t8367.scala
+++ b/tests/pending/pos/t8367.scala
@@ -2,10 +2,10 @@ package java.lang
// SI-8367 shows something is wrong with primaryConstructor and it was made worse with the fix for SI-8192
// perhaps primaryConstructor should not return NoSymbol when isJavaDefined
-// or, perhaps isJavaDefined should be refined (the package definition above is pretty sneaky)
+// or, perhaps isJavaDefined should be refined (the package definition above is pretty sneaky)
// also, why does this only happen for a (scala-defined!) class with this special name?
// (there are a couple of others: CloneNotSupportedException,InterruptedException)
class Throwable
-// class CloneNotSupportedException
+// class CloneNotSupportedException
// class InterruptedException
diff --git a/tests/pending/pos/tcpoly_seq.scala b/tests/pending/pos/tcpoly_seq.scala
index 731fe048a..e8711d1c4 100644
--- a/tests/pending/pos/tcpoly_seq.scala
+++ b/tests/pending/pos/tcpoly_seq.scala
@@ -125,7 +125,7 @@ trait HOSeq {
// TODO: the var tl approach does not seem to work because subtyping isn't fully working yet
final case class ::[+b](hd: b, private val tl: List[b]) extends List[b] {
def head = hd
- def tail = if(tl==null) this else tl // hack
+ def tail = if (tl==null) this else tl // hack
override def isEmpty: Boolean = false
}
diff --git a/tests/pending/pos/tcpoly_seq_typealias.scala b/tests/pending/pos/tcpoly_seq_typealias.scala
index 8d2f6e7c3..b758ecd99 100644
--- a/tests/pending/pos/tcpoly_seq_typealias.scala
+++ b/tests/pending/pos/tcpoly_seq_typealias.scala
@@ -129,7 +129,7 @@ trait HOSeq {
// TODO: the var tl approach does not seem to work because subtyping isn't fully working yet
final case class ::[+b](hd: b, private val tl: List[b]) extends List[b] {
def head = hd
- def tail = if(tl==null) this else tl // hack
+ def tail = if (tl==null) this else tl // hack
override def isEmpty: Boolean = false
}
diff --git a/tests/pending/pos/test5.scala b/tests/pending/pos/test5.scala
index 4dbafc9ac..c19478048 100644
--- a/tests/pending/pos/test5.scala
+++ b/tests/pending/pos/test5.scala
@@ -12,12 +12,12 @@ object test {
class M[P]() {
abstract class I[X]() {
- // Methods to check the type X and P as seen from instances of I
- def chk_ix(x: X): Unit = ();
- def chk_ip(p: P): Unit;
+ // Methods to check the type X and P as seen from instances of I
+ def chk_ix(x: X): Unit = ();
+ def chk_ip(p: P): Unit;
- // Value with type X as seen from instances of I
- def val_ix: X = val_ix;
+ // Value with type X as seen from instances of I
+ def val_ix: X = val_ix;
}
val i:I[G[P]] = null;
@@ -31,13 +31,13 @@ object test {
val j:J[G[Q]] = null;
abstract class J[Y]() extends I[G[Y]]() {
- // Values with types Y and X as seen from instances of J
- def val_jy: Y = val_jy;
- def val_jx: G[Y] = g[Y](val_jy);
+ // Values with types Y and X as seen from instances of J
+ def val_jy: Y = val_jy;
+ def val_jx: G[Y] = g[Y](val_jy);
- // Check type P
- chk_ip(val_mp);
- chk_ip(val_np);
+ // Check type P
+ chk_ip(val_mp);
+ chk_ip(val_np);
}
// Values with types Q, X.P, i.X, j.Y and j.X as seen from instances of N
diff --git a/tests/pending/pos/test5refine.scala b/tests/pending/pos/test5refine.scala
index 5459b3b97..09ea179da 100644
--- a/tests/pending/pos/test5refine.scala
+++ b/tests/pending/pos/test5refine.scala
@@ -13,14 +13,14 @@ object test {
abstract class M() {
type P;
abstract class I() {
- type X;
+ type X;
- // Methods to check the type X and P as seen from instances of I
- def chk_ix(x: X): Unit = {}
- def chk_ip(p: P): Unit = {}
+ // Methods to check the type X and P as seen from instances of I
+ def chk_ix(x: X): Unit = {}
+ def chk_ip(p: P): Unit = {}
- // Value with type X as seen from instances of I
- def val_ix: X = val_ix;
+ // Value with type X as seen from instances of I
+ def val_ix: X = val_ix;
}
val i: I { type X = G { type Ig = P } } = null;
@@ -36,15 +36,15 @@ object test {
val j:J { type Y = G { type Ig = Q } } = null;
abstract class J() extends I() {
- type Y;
- type X = G { type Ig = Y; };
- // Values with types Y and X as seen from instances of J
- def val_jy: Y = val_jy;
- def val_jx: G { type Ig = Y; } = g[Y](val_jy);
-
- // Check type P
- chk_ip(val_mp);
- chk_ip(val_np);
+ type Y;
+ type X = G { type Ig = Y; };
+ // Values with types Y and X as seen from instances of J
+ def val_jy: Y = val_jy;
+ def val_jx: G { type Ig = Y; } = g[Y](val_jy);
+
+ // Check type P
+ chk_ip(val_mp);
+ chk_ip(val_np);
}
// Values with types Q, X.P, i.X, j.Y and j.X as seen from instances of N
diff --git a/tests/pending/pos/unapplySeq.scala b/tests/pending/pos/unapplySeq.scala
index 6d13cc8b5..cefe1cb8f 100644
--- a/tests/pending/pos/unapplySeq.scala
+++ b/tests/pending/pos/unapplySeq.scala
@@ -1,6 +1,6 @@
object FooSeq {
def unapplySeq(x:Any): Option[Product2[Int,Seq[String]]] = {
- if(x.isInstanceOf[Bar]) {
+ if (x.isInstanceOf[Bar]) {
val y = x.asInstanceOf[Bar]
Some(y.size, y.name)
} else None
@@ -9,12 +9,12 @@ object FooSeq {
def main(args:Array[String]) = {
val b = new Bar
b match {
- case FooSeq(s:Int,_,n:String) => Console.println("size "+s+" name "+n)
+ case FooSeq(s:Int,_,n:String) => Console.println("size " + s +" name "+ n)
}
b.size = 54
b.name = List("large","L")
b match {
- case FooSeq(s:Int,_,n:String) => Console.println("size "+s+" name "+n)
+ case FooSeq(s:Int,_,n:String) => Console.println("size " + s +" name "+ n)
}
}
}
diff --git a/tests/pickling/Coder.scala b/tests/pickling/Coder.scala
index 77bbd134c..6eb1ad55a 100644
--- a/tests/pickling/Coder.scala
+++ b/tests/pickling/Coder.scala
@@ -7,15 +7,15 @@ class Coder(words: List[String]) {
private val mnemonics = Map(
'2' -> "ABC", '3' -> "DEF", '4' -> "GHI", '5' -> "JKL",
'6' -> "MNO", '7' -> "PQRS", '8' -> "TUV", '9' -> "WXYZ")
-
+
('1', "1") match {
- case (digit, str) => true
+ case (digit, str) => true
case _ => false
}
/** Invert the mnemonics map to give a map from chars 'A' ... 'Z' to '2' ... '9' */
- private val charCode0: Map[Char, Char] = mnemonics withFilter {
+ private val charCode0: Map[Char, Char] = mnemonics withFilter {
case (digit, str) => true
case _ => false
} flatMap { x$1 =>
@@ -24,18 +24,18 @@ class Coder(words: List[String]) {
}
}
- private val charCode: Map[Char, Char] =
+ private val charCode: Map[Char, Char] =
for ((digit, str) <- mnemonics; ltr <- str) yield ltr -> digit
/** Maps a word to the digit string it can represent */
private def wordCode(word: String): String = word map charCode
/** A map from digit strings to the words that represent them */
- private val wordsForNum: Map[String, List[String]] =
+ private val wordsForNum: Map[String, List[String]] =
words groupBy wordCode withDefaultValue Nil
/** All ways to encode a number as a list of words */
- def encode(number: String): Set[List[String]] =
+ def encode(number: String): Set[List[String]] =
if (number.isEmpty) Set(Nil)
else {
for {
@@ -44,7 +44,7 @@ class Coder(words: List[String]) {
rest <- encode(number drop splitPoint)
} yield word :: rest
}.toSet
-
+
/** Maps a number to a list of all word phrases that can represent it */
def translate(number: String): Set[String] = encode(number) map (_ mkString " ")
diff --git a/tests/pickling/Labels.scala b/tests/pickling/Labels.scala
index 4a84175af..3dc7304d6 100644
--- a/tests/pickling/Labels.scala
+++ b/tests/pickling/Labels.scala
@@ -1,18 +1,18 @@
object Labels {
def main(args: Array[String]): Unit = {
var i = 10
- while(i>0) {
+ while(i>0) {
var j = 0
while(j<i) {
- println(j+" " + i)
- j = j +1
- }
+ println(j +" " + i)
+ j = j + 1
+ }
i = i - 1}
pattern(1)
pattern(2)
- pattern(3)
+ pattern(3)
}
-
+
def pattern(a: Int) = a match {
case 1 if (a>0) => println("one")
case t@2 => println("two" + t)
diff --git a/tests/pickling/nameddefaults.scala b/tests/pickling/nameddefaults.scala
index 671f14a07..20a0eae47 100644
--- a/tests/pickling/nameddefaults.scala
+++ b/tests/pickling/nameddefaults.scala
@@ -1,7 +1,7 @@
object nameddefaults {
def foo(first: Int, second: Int = 2, third: Int = 3) = first + second
-
+
var x = 1
var y = 2
@@ -12,7 +12,7 @@ object nameddefaults {
foo(1)
// named and missing arguments
-
+
foo(first = 1, second = 3)
foo(second = 3, first = 1)
@@ -20,7 +20,7 @@ object nameddefaults {
foo(first = 2, third = 3)
foo(2, third = 3)
-
+
// same but with non-idempotent expressions
foo(first = x, second = y)
@@ -30,11 +30,11 @@ object nameddefaults {
foo(first = x, third = y)
foo(x, third = y)
-
+
// The same thing, but for classes
-
+
class C(first: Int, second: Int = 2, third: Int = 3) {}
-
+
new C(1, 2, 3)
new C(1, 2)
@@ -42,7 +42,7 @@ object nameddefaults {
new C(1)
// named and missing arguments
-
+
new C(first = 1, second = 3)
new C(second = 3, first = 1)
@@ -50,7 +50,7 @@ object nameddefaults {
new C(first = 2, third = 3)
new C(2, third = 3)
-
+
// same but with non-idempotent expressions
new C(first = x, second = y)
diff --git a/tests/pickling/selftypes.scala b/tests/pickling/selftypes.scala
index 243405f77..5180419d1 100644
--- a/tests/pickling/selftypes.scala
+++ b/tests/pickling/selftypes.scala
@@ -1,20 +1,20 @@
object selftypes {
-
+
trait A { self: AB =>
-
+
type AA = List[this.BX]
-
+
class AX
-
+
}
-
+
trait B { self: AB =>
-
+
type BB = AA
-
+
class BX
}
-
+
class AB extends A with B
-} \ No newline at end of file
+}
diff --git a/tests/pickling/tryTyping.scala b/tests/pickling/tryTyping.scala
index a2aeb17c8..cfa2c7a77 100644
--- a/tests/pickling/tryTyping.scala
+++ b/tests/pickling/tryTyping.scala
@@ -17,4 +17,4 @@ object tryTyping{
try{???; 1}
catch a3(3)
}
-} \ No newline at end of file
+}
diff --git a/tests/pickling/varargs.scala b/tests/pickling/varargs.scala
index 3739636b8..616456616 100644
--- a/tests/pickling/varargs.scala
+++ b/tests/pickling/varargs.scala
@@ -10,4 +10,4 @@ object varargs {
g(Nil: _*)
g(1)
g()
-} \ No newline at end of file
+}
diff --git a/tests/pos/Bridges.scala b/tests/pos/Bridges.scala
index a7350d785..6c2115d7e 100644
--- a/tests/pos/Bridges.scala
+++ b/tests/pos/Bridges.scala
@@ -1,6 +1,6 @@
abstract class X[T]{
def go2(x:T)(y:T = x): T = y
- def go: T
+ def go: T
def go1(x: T) = x
}
diff --git a/tests/pos/Coder.scala b/tests/pos/Coder.scala
index 77bbd134c..6eb1ad55a 100644
--- a/tests/pos/Coder.scala
+++ b/tests/pos/Coder.scala
@@ -7,15 +7,15 @@ class Coder(words: List[String]) {
private val mnemonics = Map(
'2' -> "ABC", '3' -> "DEF", '4' -> "GHI", '5' -> "JKL",
'6' -> "MNO", '7' -> "PQRS", '8' -> "TUV", '9' -> "WXYZ")
-
+
('1', "1") match {
- case (digit, str) => true
+ case (digit, str) => true
case _ => false
}
/** Invert the mnemonics map to give a map from chars 'A' ... 'Z' to '2' ... '9' */
- private val charCode0: Map[Char, Char] = mnemonics withFilter {
+ private val charCode0: Map[Char, Char] = mnemonics withFilter {
case (digit, str) => true
case _ => false
} flatMap { x$1 =>
@@ -24,18 +24,18 @@ class Coder(words: List[String]) {
}
}
- private val charCode: Map[Char, Char] =
+ private val charCode: Map[Char, Char] =
for ((digit, str) <- mnemonics; ltr <- str) yield ltr -> digit
/** Maps a word to the digit string it can represent */
private def wordCode(word: String): String = word map charCode
/** A map from digit strings to the words that represent them */
- private val wordsForNum: Map[String, List[String]] =
+ private val wordsForNum: Map[String, List[String]] =
words groupBy wordCode withDefaultValue Nil
/** All ways to encode a number as a list of words */
- def encode(number: String): Set[List[String]] =
+ def encode(number: String): Set[List[String]] =
if (number.isEmpty) Set(Nil)
else {
for {
@@ -44,7 +44,7 @@ class Coder(words: List[String]) {
rest <- encode(number drop splitPoint)
} yield word :: rest
}.toSet
-
+
/** Maps a number to a list of all word phrases that can represent it */
def translate(number: String): Set[String] = encode(number) map (_ mkString " ")
diff --git a/tests/pos/Labels.scala b/tests/pos/Labels.scala
index d82287313..f1a120920 100644
--- a/tests/pos/Labels.scala
+++ b/tests/pos/Labels.scala
@@ -5,18 +5,18 @@ import dotty.tools.dotc.ast.tpd._
object Labels {
def main(args: Array[String]): Unit = {
var i = 10
- while(i>0) {
+ while(i>0) {
var j = 0
while(j<i) {
- println(j+" " + i)
- j = j +1
- }
+ println(j +" " + i)
+ j = j + 1
+ }
i = i - 1}
pattern(1)
pattern(2)
- pattern(3)
+ pattern(3)
}
-
+
def pattern(a: Int) = a match {
case 1 if (a>0) => println("one")
case t@2 => println("two" + t)
diff --git a/tests/pos/List1.scala b/tests/pos/List1.scala
index 30ebf5e1e..733ef376d 100644
--- a/tests/pos/List1.scala
+++ b/tests/pos/List1.scala
@@ -21,7 +21,7 @@ object lists {
def foo = {
val intnil = Nil[Int];
- val intlist = intnil.prepend(1).prepend(1+1);
+ val intlist = intnil.prepend(1).prepend(1 + 1);
val x: Int = intlist.head;
val strnil = Nil[String];
val strlist = strnil.prepend("A").prepend("AA");
@@ -33,8 +33,8 @@ object lists {
def isEmpty: Boolean = false;
def head: Int = 1;
def foo: List[Int] { def isEmpty: Boolean; def head: Int; def tail: List[Int] } = Nil[Int];
- def tail0: List[Int] = foo.prepend(1).prepend(1+1);
- def tail: List[Int] = Nil[Int].prepend(1).prepend(1+1);
+ def tail0: List[Int] = foo.prepend(1).prepend(1 + 1);
+ def tail: List[Int] = Nil[Int].prepend(1).prepend(1 + 1);
}
def foo2 = {
diff --git a/tests/pos/Meter.scala b/tests/pos/Meter.scala
index 53be6f9d0..bbee710c4 100644
--- a/tests/pos/Meter.scala
+++ b/tests/pos/Meter.scala
@@ -12,7 +12,7 @@ package a {
def < (other: Meter): Boolean = this.underlying < other.underlying
def toFoot: Foot = new Foot(this.underlying * 0.3048)
override def print = { Console.print(">>>"); super.print; proprint }
- override def toString: String = underlying.toString+"m"
+ override def toString: String = underlying.toString +"m"
}
object Meter extends (Double => Meter) {
@@ -30,7 +30,7 @@ package a {
class Foot(val unbox: Double) extends AnyVal {
def + (other: Foot): Foot =
new Foot(this.unbox + other.unbox)
- override def toString = unbox.toString+"ft"
+ override def toString = unbox.toString +"ft"
}
object Foot {
implicit val boxings: BoxingConversions[Foot, Double] = new BoxingConversions[Foot, Double] {
@@ -65,23 +65,23 @@ object Test extends App {
//println((x + x) / x)
println((x + x) / 0.5)
println((x < x).toString)
- println("x.isInstanceOf[Meter]: "+x.isInstanceOf[Meter])
+ println("x.isInstanceOf[Meter]: " + x.isInstanceOf[Meter])
- println("x.hashCode: "+x.hashCode)
- println("x == 1: "+(x == 1))
- println("x == y: "+(x == y))
+ println("x.hashCode: " + x.hashCode)
+ println("x == 1: " +(x == 1))
+ println("x == y: " +(x == y))
assert(x.hashCode == (1.0).hashCode)
val a: Any = x
val b: Any = y
- println("a == b: "+(a == b))
+ println("a == b: " +(a == b))
{ println("testing native arrays")
val arr = Array(x, y + x)
println(arr.deep)
def foo[T <: Printable](x: Array[T]) = {
- for (i <- 0 until x.length) { x(i).print; println(" "+x(i)) }
+ for (i <- 0 until x.length) { x(i).print; println(" " + x(i)) }
}
val m = arr(0)
println(m)
@@ -93,7 +93,7 @@ object Test extends App {
// val arr = FlatArray(x, y + x)
// println(arr)
// def foo(x: FlatArray[Meter]) {
- // for (i <- 0 until x.length) { x(i).print; println(" "+x(i)) }
+ // for (i <- 0 until x.length) { x(i).print; println(" " + x(i)) }
// }
// val m = arr(0)
// println(m)
diff --git a/tests/pos/SI-7638.scala b/tests/pos/SI-7638.scala
index ed581efe9..eea302e17 100644
--- a/tests/pos/SI-7638.scala
+++ b/tests/pos/SI-7638.scala
@@ -36,16 +36,16 @@ object vectorOrder {
* unhandled exception while transforming SI-7638.scala
* error: uncaught exception during compilation: java.lang.UnsupportedOperationException
* error: java.lang.UnsupportedOperationException: tail of empty list
- * at scala.collection.immutable.Nil$.tail(List.scala:339)
- * at scala.collection.immutable.Nil$.tail(List.scala:334)
- * at scala.tools.nsc.transform.Mixin$$anonfun$scala$tools$nsc$transform$Mixin$$rebindSuper$1.apply(Mixin.scala:123)
- * at scala.tools.nsc.transform.Mixin$$anonfun$scala$tools$nsc$transform$Mixin$$rebindSuper$1.apply(Mixin.scala:122)
- * at scala.reflect.internal.SymbolTable.atPhase(SymbolTable.scala:207)
- * at scala.reflect.internal.SymbolTable.afterPhase(SymbolTable.scala:216)
- * at scala.tools.nsc.Global.afterPickler(Global.scala:1104)
- * at scala.tools.nsc.transform.Mixin.scala$tools$nsc$transform$Mixin$$rebindSuper(Mixin.scala:122)
- * at scala.tools.nsc.transform.Mixin$$anonfun$scala$tools$nsc$transform$Mixin$$mixinTraitMembers$1$1.apply(Mixin.scala:339)
- * at scala.tools.nsc.transform.Mixin$$anonfun$scala$tools$nsc$transform$Mixin$$mixinTraitMembers$1$1.apply(Mixin.scala:292)
+ * at scala.collection.immutable.Nil$.tail(List.scala:339)
+ * at scala.collection.immutable.Nil$.tail(List.scala:334)
+ * at scala.tools.nsc.transform.Mixin$$anonfun$scala$tools$nsc$transform$Mixin$$rebindSuper$1.apply(Mixin.scala:123)
+ * at scala.tools.nsc.transform.Mixin$$anonfun$scala$tools$nsc$transform$Mixin$$rebindSuper$1.apply(Mixin.scala:122)
+ * at scala.reflect.internal.SymbolTable.atPhase(SymbolTable.scala:207)
+ * at scala.reflect.internal.SymbolTable.afterPhase(SymbolTable.scala:216)
+ * at scala.tools.nsc.Global.afterPickler(Global.scala:1104)
+ * at scala.tools.nsc.transform.Mixin.scala$tools$nsc$transform$Mixin$$rebindSuper(Mixin.scala:122)
+ * at scala.tools.nsc.transform.Mixin$$anonfun$scala$tools$nsc$transform$Mixin$$mixinTraitMembers$1$1.apply(Mixin.scala:339)
+ * at scala.tools.nsc.transform.Mixin$$anonfun$scala$tools$nsc$transform$Mixin$$mixinTraitMembers$1$1.apply(Mixin.scala:292)
*/
new ArrayVectorOrder[A] { }
}
diff --git a/tests/pos/SI-7638a.scala b/tests/pos/SI-7638a.scala
index 060bd0ffa..7a73c8ea3 100644
--- a/tests/pos/SI-7638a.scala
+++ b/tests/pos/SI-7638a.scala
@@ -37,16 +37,16 @@ object vectorOrder {
* unhandled exception while transforming SI-7638.scala
* error: uncaught exception during compilation: java.lang.UnsupportedOperationException
* error: java.lang.UnsupportedOperationException: tail of empty list
- * at scala.collection.immutable.Nil$.tail(List.scala:339)
- * at scala.collection.immutable.Nil$.tail(List.scala:334)
- * at scala.tools.nsc.transform.Mixin$$anonfun$scala$tools$nsc$transform$Mixin$$rebindSuper$1.apply(Mixin.scala:123)
- * at scala.tools.nsc.transform.Mixin$$anonfun$scala$tools$nsc$transform$Mixin$$rebindSuper$1.apply(Mixin.scala:122)
- * at scala.reflect.internal.SymbolTable.atPhase(SymbolTable.scala:207)
- * at scala.reflect.internal.SymbolTable.afterPhase(SymbolTable.scala:216)
- * at scala.tools.nsc.Global.afterPickler(Global.scala:1104)
- * at scala.tools.nsc.transform.Mixin.scala$tools$nsc$transform$Mixin$$rebindSuper(Mixin.scala:122)
- * at scala.tools.nsc.transform.Mixin$$anonfun$scala$tools$nsc$transform$Mixin$$mixinTraitMembers$1$1.apply(Mixin.scala:339)
- * at scala.tools.nsc.transform.Mixin$$anonfun$scala$tools$nsc$transform$Mixin$$mixinTraitMembers$1$1.apply(Mixin.scala:292)
+ * at scala.collection.immutable.Nil$.tail(List.scala:339)
+ * at scala.collection.immutable.Nil$.tail(List.scala:334)
+ * at scala.tools.nsc.transform.Mixin$$anonfun$scala$tools$nsc$transform$Mixin$$rebindSuper$1.apply(Mixin.scala:123)
+ * at scala.tools.nsc.transform.Mixin$$anonfun$scala$tools$nsc$transform$Mixin$$rebindSuper$1.apply(Mixin.scala:122)
+ * at scala.reflect.internal.SymbolTable.atPhase(SymbolTable.scala:207)
+ * at scala.reflect.internal.SymbolTable.afterPhase(SymbolTable.scala:216)
+ * at scala.tools.nsc.Global.afterPickler(Global.scala:1104)
+ * at scala.tools.nsc.transform.Mixin.scala$tools$nsc$transform$Mixin$$rebindSuper(Mixin.scala:122)
+ * at scala.tools.nsc.transform.Mixin$$anonfun$scala$tools$nsc$transform$Mixin$$mixinTraitMembers$1$1.apply(Mixin.scala:339)
+ * at scala.tools.nsc.transform.Mixin$$anonfun$scala$tools$nsc$transform$Mixin$$mixinTraitMembers$1$1.apply(Mixin.scala:292)
*/
new ArrayVectorOrder[A] { }
}
diff --git a/tests/pos/assignments.scala b/tests/pos/assignments.scala
index 94223b62c..9801bdca2 100644
--- a/tests/pos/assignments.scala
+++ b/tests/pos/assignments.scala
@@ -3,8 +3,8 @@ object assignments {
var a = Array(1, 2, 3)
var i = 0
a(i) = a(i) * 2
- a(i+1) += 1
-
+ a(i + 1) += 1
+
class C {
var myX = 0
def x = myX
@@ -13,11 +13,11 @@ object assignments {
x = x + 1
x *= 2
}
-
+
var c = new C
c.x =c.x + 1
c.x *= 2
-
+
val cc = c
import cc._
x = x + 1
diff --git a/tests/pos/blockescapes.scala b/tests/pos/blockescapes.scala
index 68ce37ed3..589953fe5 100644
--- a/tests/pos/blockescapes.scala
+++ b/tests/pos/blockescapes.scala
@@ -1,11 +1,11 @@
object blockescapes {
-
+
{ val x = 0; () }
val x0 = { class Foo; new Foo }
val x1 = {}
var x2 = { val z = 0 }
val m1 = { val x = 2; x }
-
+
trait T
def m0: T = { object Foo { class Bar extends T } ; new Foo.Bar }
-} \ No newline at end of file
+}
diff --git a/tests/pos/implicits1.scala b/tests/pos/implicits1.scala
index 47b7f1c52..d8ca76de5 100644
--- a/tests/pos/implicits1.scala
+++ b/tests/pos/implicits1.scala
@@ -43,7 +43,7 @@ object Implicits {
val e: Int = z.foo(true)
// Haoyi Li's example on scala-user:
-
+
trait Modifier
implicit def stringNode(v: String): Modifier = new Modifier {}
diff --git a/tests/pos/inferred.scala b/tests/pos/inferred.scala
index 87bbd9473..311e56a50 100644
--- a/tests/pos/inferred.scala
+++ b/tests/pos/inferred.scala
@@ -55,4 +55,4 @@ object Inferred {
val ints3 = new CONS[Int](1, NIL)
val ints4 = new CONS(1, NIL)
-} \ No newline at end of file
+}
diff --git a/tests/pos/java-interop/t2433/Test.scala b/tests/pos/java-interop/t2433/Test.scala
index 02fd89b64..02fd89b64 100755..100644
--- a/tests/pos/java-interop/t2433/Test.scala
+++ b/tests/pos/java-interop/t2433/Test.scala
diff --git a/tests/pos/java-interop/t2940/Error.scala b/tests/pos/java-interop/t2940/Error.scala
index 92f08f580..305d8aa4c 100644
--- a/tests/pos/java-interop/t2940/Error.scala
+++ b/tests/pos/java-interop/t2940/Error.scala
@@ -1,5 +1,5 @@
abstract class Error {
- val c: Cycle[_]
+ val c: Cycle[_]
}
object Test {
diff --git a/tests/pos/java-interop/t2956/t2956.scala b/tests/pos/java-interop/t2956/t2956.scala
index 33803874b..33803874b 100755..100644
--- a/tests/pos/java-interop/t2956/t2956.scala
+++ b/tests/pos/java-interop/t2956/t2956.scala
diff --git a/tests/pos/nameddefaults.scala b/tests/pos/nameddefaults.scala
index 671f14a07..20a0eae47 100644
--- a/tests/pos/nameddefaults.scala
+++ b/tests/pos/nameddefaults.scala
@@ -1,7 +1,7 @@
object nameddefaults {
def foo(first: Int, second: Int = 2, third: Int = 3) = first + second
-
+
var x = 1
var y = 2
@@ -12,7 +12,7 @@ object nameddefaults {
foo(1)
// named and missing arguments
-
+
foo(first = 1, second = 3)
foo(second = 3, first = 1)
@@ -20,7 +20,7 @@ object nameddefaults {
foo(first = 2, third = 3)
foo(2, third = 3)
-
+
// same but with non-idempotent expressions
foo(first = x, second = y)
@@ -30,11 +30,11 @@ object nameddefaults {
foo(first = x, third = y)
foo(x, third = y)
-
+
// The same thing, but for classes
-
+
class C(first: Int, second: Int = 2, third: Int = 3) {}
-
+
new C(1, 2, 3)
new C(1, 2)
@@ -42,7 +42,7 @@ object nameddefaults {
new C(1)
// named and missing arguments
-
+
new C(first = 1, second = 3)
new C(second = 3, first = 1)
@@ -50,7 +50,7 @@ object nameddefaults {
new C(first = 2, third = 3)
new C(2, third = 3)
-
+
// same but with non-idempotent expressions
new C(first = x, second = y)
diff --git a/tests/pos/packageobject.scala b/tests/pos/packageobject.scala
index 1a373c248..b49a8489a 100644
--- a/tests/pos/packageobject.scala
+++ b/tests/pos/packageobject.scala
@@ -1,5 +1,5 @@
package test {
- object `package` {}
+ object `package` {}
}
-package object foo {} \ No newline at end of file
+package object foo {}
diff --git a/tests/pos/selftypes.scala b/tests/pos/selftypes.scala
index 243405f77..5180419d1 100644
--- a/tests/pos/selftypes.scala
+++ b/tests/pos/selftypes.scala
@@ -1,20 +1,20 @@
object selftypes {
-
+
trait A { self: AB =>
-
+
type AA = List[this.BX]
-
+
class AX
-
+
}
-
+
trait B { self: AB =>
-
+
type BB = AA
-
+
class BX
}
-
+
class AB extends A with B
-} \ No newline at end of file
+}
diff --git a/tests/pos/sigs.scala b/tests/pos/sigs.scala
index 4c1973cad..a9a1b464e 100644
--- a/tests/pos/sigs.scala
+++ b/tests/pos/sigs.scala
@@ -1,31 +1,31 @@
object sigs {
-
+
type Lst[A] = List[A]
-
+
type Twin[B] = (B, B)
-
+
var x = 7 * 9
-
+
class Base {
-
+
def foo(x: Int): Any = 33
def foo: Object = "x"
-
+
}
-
+
class Sub extends Base {
-
+
override def foo = "abc"
-
+
override def foo(x: Int) = "abc"
}
-
+
trait A { self: B =>
type AA
val a: AA & BB
-
+
}
-
+
trait B { this: A =>
type BB
val b: AA & BB
@@ -39,4 +39,4 @@ object sigs {
}
-} \ No newline at end of file
+}
diff --git a/tests/pos/t0031.scala b/tests/pos/t0031.scala
index d4050c818..6070a4683 100644
--- a/tests/pos/t0031.scala
+++ b/tests/pos/t0031.scala
@@ -7,12 +7,12 @@ object Main {
def require[a](precondition: => Boolean)(command: => a): Ensure[a] =
if (precondition)
new Ensure[a] {
- def ensure(postcondition: a => Boolean): a = {
- val result = command;
- if (postcondition(result)) result
- else sys.error("Assertion error")
+ def ensure(postcondition: a => Boolean): a = {
+ val result = command;
+ if (postcondition(result)) result
+ else sys.error("Assertion error")
}
- }
+ }
else
sys.error("Assertion error");
@@ -22,7 +22,7 @@ object Main {
} ensure (result => s contains result);
def main(args: Array[String]) = {
- val s = List(1, 2);
+ val s = List(1, 2);
Console.println(arb(s))
}
diff --git a/tests/pos/t0039.scala b/tests/pos/t0039.scala
index 652c606b0..7b51320e2 100644
--- a/tests/pos/t0039.scala
+++ b/tests/pos/t0039.scala
@@ -1,6 +1,6 @@
abstract class Extensible[A, This <: Extensible[A, This]](x: A, xs: This) { self: This =>
- def mkObj(x: A, xs: This): This;
+ def mkObj(x: A, xs: This): This;
}
class Fixed[A](x: A, xs: Fixed[A]) extends Extensible[A, Fixed[A]](x, xs) {
- def mkObj(x: A, xs: Fixed[A]) = new Fixed(x, xs);
+ def mkObj(x: A, xs: Fixed[A]) = new Fixed(x, xs);
}
diff --git a/tests/pos/t0061.scala b/tests/pos/t0061.scala
index 8a3aed7c6..0228413fa 100644
--- a/tests/pos/t0061.scala
+++ b/tests/pos/t0061.scala
@@ -5,6 +5,6 @@ object O {
case class testA() extends testClass ;
def ga( x:testClass ) = x match {
- case testA() => ()
+ case testA() => ()
}
}
diff --git a/tests/pos/t0066.scala b/tests/pos/t0066.scala
index 8ac328908..04bcd1f5d 100644
--- a/tests/pos/t0066.scala
+++ b/tests/pos/t0066.scala
@@ -1,7 +1,7 @@
class GBTree[A, B] {
abstract class Tree[A,B];
case class Node[A,B](key:A,value:B,smaller:Node[A,B],bigger:Node[A,B])
- extends Tree[A,B];
+ extends Tree[A,B];
case class Nil[A,B]() extends Tree[A,B];
}
diff --git a/tests/pos/t0770.scala b/tests/pos/t0770.scala
index 7a0a2bf9b..54990ec09 100644
--- a/tests/pos/t0770.scala
+++ b/tests/pos/t0770.scala
@@ -1,13 +1,13 @@
trait A
{
- private[this] val p = 5
+ private[this] val p = 5
- def f = (b: Byte) => p
+ def f = (b: Byte) => p
}
trait B
{
- def failure: Boolean
- def success = !failure
+ def failure: Boolean
+ def success = !failure
}
diff --git a/tests/pos/t1085.scala b/tests/pos/t1085.scala
index c59e657cb..62f2be0b8 100644
--- a/tests/pos/t1085.scala
+++ b/tests/pos/t1085.scala
@@ -1,5 +1,5 @@
trait Functor[a] {
- type MyType[a]
+ type MyType[a]
}
object Test {
diff --git a/tests/pos/t1133.scala b/tests/pos/t1133.scala
index 562b528ea..2e4793e99 100644
--- a/tests/pos/t1133.scala
+++ b/tests/pos/t1133.scala
@@ -11,14 +11,14 @@ object Match
object Extractor1 {
def unapply(x: Any) = x match {
- case x: String => Some(x, x+x, x+x+x, x+x, x)
+ case x: String => Some(x, x + x, x + x+x, x+x, x)
case _ => None
}
}
object Extractor2 {
def unapply(x: Any) = x match {
- case x: String => Some(x, x+x, x+x+x)
+ case x: String => Some(x, x + x, x + x+x)
case _ => None
}
}
diff --git a/tests/pos/t1168.scala b/tests/pos/t1168.scala
index 75638e792..f43436812 100644
--- a/tests/pos/t1168.scala
+++ b/tests/pos/t1168.scala
@@ -6,7 +6,7 @@ object Test extends App {
throw new Exception
} catch {
case e : SpecialException => {
- println("matched SpecialException: "+e)
+ println("matched SpecialException: " + e)
assume(e.isInstanceOf[SpecialException])
}
case e : Exception => {
diff --git a/tests/pos/t1722/Test.scala b/tests/pos/t1722/Test.scala
index f236d3fdc..f236d3fdc 100755..100644
--- a/tests/pos/t1722/Test.scala
+++ b/tests/pos/t1722/Test.scala
diff --git a/tests/pos/t1722/Top.scala b/tests/pos/t1722/Top.scala
index 4ac52412a..4ac52412a 100755..100644
--- a/tests/pos/t1722/Top.scala
+++ b/tests/pos/t1722/Top.scala
diff --git a/tests/pos/t2060.scala b/tests/pos/t2060.scala
index 0b9079062..0b9079062 100755..100644
--- a/tests/pos/t2060.scala
+++ b/tests/pos/t2060.scala
diff --git a/tests/pos/t2082.scala b/tests/pos/t2082.scala
index a7ee3789b..34a7c4e3a 100755..100644
--- a/tests/pos/t2082.scala
+++ b/tests/pos/t2082.scala
@@ -13,7 +13,7 @@ trait IdPK
class TestSubject extends KeyedMapper[Long, TestSubject] with IdPK
class TestRun extends KeyedMapper[Long, TestRun] with IdPK {
- object testSubject extends MappedForeignKey[Long, TestRun, TestSubject]
+ object testSubject extends MappedForeignKey[Long, TestRun, TestSubject]
}
object TestRun extends TestRun with KeyedMetaMapper[Long, TestRun]
diff --git a/tests/pos/t2179.scala b/tests/pos/t2179.scala
index 89e22b6e2..89e22b6e2 100755..100644
--- a/tests/pos/t2179.scala
+++ b/tests/pos/t2179.scala
diff --git a/tests/pos/t2208_pos.scala b/tests/pos/t2208_pos.scala
index dd6d686ba..9f8f96c7d 100644
--- a/tests/pos/t2208_pos.scala
+++ b/tests/pos/t2208_pos.scala
@@ -1,8 +1,8 @@
object Test {
- class A
+ class A
- class B[X]
- type Alias[X <: A] = B[X]
+ class B[X]
+ type Alias[X <: A] = B[X]
- val foo: B[A] = new Alias[A] // check that type aliases can be instantiated
+ val foo: B[A] = new Alias[A] // check that type aliases can be instantiated
}
diff --git a/tests/pos/t2305.scala b/tests/pos/t2305.scala
index 3338ab911..37d4ac6ca 100644
--- a/tests/pos/t2305.scala
+++ b/tests/pos/t2305.scala
@@ -5,22 +5,22 @@ trait Bind[Z[_]]
class MySerializable[X] extends java.io.Serializable
object Bind {
- implicit val JavaArrayListBind: Bind[ArrayList] = new Bind[ArrayList] {}
- implicit val MySerializableBind: Bind[MySerializable] = new Bind[MySerializable] {}
+ implicit val JavaArrayListBind: Bind[ArrayList] = new Bind[ArrayList] {}
+ implicit val MySerializableBind: Bind[MySerializable] = new Bind[MySerializable] {}
}
object works {
- // this works fine:
- def runbind(implicit bind: Bind[MySerializable]): Unit = {}
- runbind
+ // this works fine:
+ def runbind(implicit bind: Bind[MySerializable]): Unit = {}
+ runbind
}
object breaks {
- def runbind(implicit bind: Bind[ArrayList]): Unit = {}
+ def runbind(implicit bind: Bind[ArrayList]): Unit = {}
runbind
- /*java.lang.AssertionError: assertion failed: java.io.Serializable
- at scala.Predef$.assert(Predef.scala:107)
- at scala.tools.nsc.symtab.Types$TypeRef.transform(Types.scala:1417)
- at scala.tools.nsc.symtab.Types$TypeRef.baseType(Types.scala:1559)
- */
+ /*java.lang.AssertionError: assertion failed: java.io.Serializable
+ at scala.Predef$.assert(Predef.scala:107)
+ at scala.tools.nsc.symtab.Types$TypeRef.transform(Types.scala:1417)
+ at scala.tools.nsc.symtab.Types$TypeRef.baseType(Types.scala:1559)
+ */
}
diff --git a/tests/pos/t2405.scala b/tests/pos/t2405.scala
index 224b2ce83..a4ac914fb 100644
--- a/tests/pos/t2405.scala
+++ b/tests/pos/t2405.scala
@@ -2,22 +2,22 @@ object A { implicit val x: Int = 1 }
// Problem as stated in the ticket.
object Test1 {
- import A.{x => y}
- implicitly[Int]
+ import A.{x => y}
+ implicitly[Int]
}
// Testing for the absense of shadowing #1.
object Test2 {
- import A.{x => y}
- val x = 2
- implicitly[Int]
+ import A.{x => y}
+ val x = 2
+ implicitly[Int]
}
// Testing for the absense of shadowing #2.
object Test3 {
- {
- import A.{x => y}
- def x: Int = 0
- implicitly[Int]
- }
+ {
+ import A.{x => y}
+ def x: Int = 0
+ implicitly[Int]
+ }
}
diff --git a/tests/pos/t2425.scala b/tests/pos/t2425.scala
index 477d5467a..477d5467a 100755..100644
--- a/tests/pos/t2425.scala
+++ b/tests/pos/t2425.scala
diff --git a/tests/pos/t2429.scala b/tests/pos/t2429.scala
index 4cda3bde1..4cda3bde1 100755..100644
--- a/tests/pos/t2429.scala
+++ b/tests/pos/t2429.scala
diff --git a/tests/pos/t2444.scala b/tests/pos/t2444.scala
index fac1e95d0..fb93749aa 100644
--- a/tests/pos/t2444.scala
+++ b/tests/pos/t2444.scala
@@ -9,7 +9,7 @@ object Test {
def frob[P1, P2<:Foo](f:P1 => P2) = ()
def main(args:Array[String]) : Unit = {
- frob((p:Bar) => p.baz)
+ frob((p:Bar) => p.baz)
}
}
diff --git a/tests/pos/t2484.scala b/tests/pos/t2484.scala
index b822415fd..b822415fd 100755..100644
--- a/tests/pos/t2484.scala
+++ b/tests/pos/t2484.scala
diff --git a/tests/pos/t2500.scala b/tests/pos/t2500.scala
index d0ff99a93..4b02fe488 100644
--- a/tests/pos/t2500.scala
+++ b/tests/pos/t2500.scala
@@ -1,6 +1,6 @@
object Test {
- import scala.collection._
- ((Map(1 -> "a", 2 -> "b"): collection.Map[Int, String]) map identity[(Int, String)]) : scala.collection.Map[Int,String]
- ((SortedMap(1 -> "a", 2 -> "b"): collection.SortedMap[Int, String]) map identity[(Int, String)]): scala.collection.SortedMap[Int,String]
- ((SortedSet(1, 2): collection.SortedSet[Int]) map identity[Int]): scala.collection.SortedSet[Int]
+ import scala.collection._
+ ((Map(1 -> "a", 2 -> "b"): collection.Map[Int, String]) map identity[(Int, String)]) : scala.collection.Map[Int,String]
+ ((SortedMap(1 -> "a", 2 -> "b"): collection.SortedMap[Int, String]) map identity[(Int, String)]): scala.collection.SortedMap[Int,String]
+ ((SortedSet(1, 2): collection.SortedSet[Int]) map identity[Int]): scala.collection.SortedSet[Int]
}
diff --git a/tests/pos/t2504.scala b/tests/pos/t2504.scala
index 72117174c..72117174c 100755..100644
--- a/tests/pos/t2504.scala
+++ b/tests/pos/t2504.scala
diff --git a/tests/pos/t2545.scala b/tests/pos/t2545.scala
index 6ad994223..6ad994223 100755..100644
--- a/tests/pos/t2545.scala
+++ b/tests/pos/t2545.scala
diff --git a/tests/pos/t2591.scala b/tests/pos/t2591.scala
index 47ae551bf..59f7a02cc 100644
--- a/tests/pos/t2591.scala
+++ b/tests/pos/t2591.scala
@@ -7,9 +7,9 @@ object Implicits {
}
object Test {
- // should cause imp to be in scope so that the next expression type checks
- // `import Implicits._` works
- import Implicits.imp
+ // should cause imp to be in scope so that the next expression type checks
+ // `import Implicits._` works
+ import Implicits.imp
(new A) : Int
}
diff --git a/tests/pos/t262.scala b/tests/pos/t262.scala
index ec6187b36..9f7686a8f 100644
--- a/tests/pos/t262.scala
+++ b/tests/pos/t262.scala
@@ -3,7 +3,7 @@ object O {
def f:A;
}
class B extends A {
- def f = if(1 == 2) new C else new D;
+ def f = if (1 == 2) new C else new D;
}
class C extends A {
def f = this;
diff --git a/tests/pos/t2635.scala b/tests/pos/t2635.scala
index 7cd553135..7cd553135 100755..100644
--- a/tests/pos/t2635.scala
+++ b/tests/pos/t2635.scala
diff --git a/tests/pos/t2683.scala b/tests/pos/t2683.scala
index 4ba34b554..4ba34b554 100755..100644
--- a/tests/pos/t2683.scala
+++ b/tests/pos/t2683.scala
diff --git a/tests/pos/t3174.scala b/tests/pos/t3174.scala
index 8d9b2578d..8d9b2578d 100755..100644
--- a/tests/pos/t3174.scala
+++ b/tests/pos/t3174.scala
diff --git a/tests/pos/t3278.scala b/tests/pos/t3278.scala
index 05bfbc146..254f4dc79 100644
--- a/tests/pos/t3278.scala
+++ b/tests/pos/t3278.scala
@@ -1,30 +1,30 @@
class Foo
class Test {
- def update[B](x : B, b : Int): Unit = {}
- def apply[B](x : B) = 1
+ def update[B](x : B, b : Int): Unit = {}
+ def apply[B](x : B) = 1
}
class Test2 {
type B = Foo
- def update(x : B, b : Int): Unit = {}
- def apply(x : B) = 1
+ def update(x : B, b : Int): Unit = {}
+ def apply(x : B) = 1
}
object Test {
- def main(a : Array[String]): Unit = {
- val a = new Test
- val f = new Foo
- a(f) = 1 //works
- a(f) = a(f) + 1 //works
- a(f) += 1 //error: reassignment to val
- }
+ def main(a : Array[String]): Unit = {
+ val a = new Test
+ val f = new Foo
+ a(f) = 1 //works
+ a(f) = a(f) + 1 //works
+ a(f) += 1 //error: reassignment to val
+ }
}
object Test2 {
- def main(args : Array[String]): Unit = {
+ def main(args : Array[String]): Unit = {
args(0) += "a"
- val a = new Test2
- val f = new Foo
- a(f) = 1 //works
- a(f) = a(f) + 1 //works
- a(f) += 1 //error: reassignment to val
- }
+ val a = new Test2
+ val f = new Foo
+ a(f) = 1 //works
+ a(f) = a(f) + 1 //works
+ a(f) += 1 //error: reassignment to val
+ }
}
diff --git a/tests/pos/tailcall/i321.scala b/tests/pos/tailcall/i321.scala
index 595e13910..daa078dd5 100644
--- a/tests/pos/tailcall/i321.scala
+++ b/tests/pos/tailcall/i321.scala
@@ -23,4 +23,4 @@ class i321[T >: Null <: AnyRef] {
final def go2[U >: Null <: AnyRef](t: i321[U]): Int = t.go2(this)
-} \ No newline at end of file
+}
diff --git a/tests/pos/tailcall/tailcall.scala b/tests/pos/tailcall/tailcall.scala
index 1e05840ea..faa707e18 100644
--- a/tests/pos/tailcall/tailcall.scala
+++ b/tests/pos/tailcall/tailcall.scala
@@ -6,4 +6,4 @@ class tailcall {
class TypedApply[T2]{
private def firstDiff[T <: TypedApply[T2]](xs: List[T]): Int = firstDiff(xs)
-} \ No newline at end of file
+}
diff --git a/tests/pos/templateParents.scala b/tests/pos/templateParents.scala
index 845913270..1bc07b571 100644
--- a/tests/pos/templateParents.scala
+++ b/tests/pos/templateParents.scala
@@ -1,11 +1,11 @@
object templateParents {
// traits do not call a constructor
- class C[+T](x: T)
+ class C[+T](x: T)
trait D extends C[String]
trait E extends C[Int]
new C("abc") with D
-
+
}
object templateParents1 {
diff --git a/tests/pos/tryTyping.scala b/tests/pos/tryTyping.scala
index a2aeb17c8..cfa2c7a77 100644
--- a/tests/pos/tryTyping.scala
+++ b/tests/pos/tryTyping.scala
@@ -17,4 +17,4 @@ object tryTyping{
try{???; 1}
catch a3(3)
}
-} \ No newline at end of file
+}
diff --git a/tests/pos/typedIdents.scala b/tests/pos/typedIdents.scala
index f6c88379b..e99b5a045 100644
--- a/tests/pos/typedIdents.scala
+++ b/tests/pos/typedIdents.scala
@@ -18,17 +18,17 @@ package P { // `X' bound by package clause
println("L12: " + x) // `x' refers to constant `3' here
locally {
import Q.X._ // `x' and `y' bound by wildcard import
- // println("L14: "+x) // reference to `x' is ambiguous here
+ // println("L14: " + x) // reference to `x' is ambiguous here
import X.y // `y' bound by explicit import
println("L16: " + y) // `y' refers to `Q.X.y' here
locally {
val x = "abc" // `x' bound by local definition
import P.X._ // `x' and `y' bound by wildcard import
- // println("L19: "+y) // reference to `y' is ambiguous here
+ // println("L19: " + y) // reference to `y' is ambiguous here
println("L20: " + x) // `x' refers to string ``abc'' here
}
}
}
}
}
-} \ No newline at end of file
+}
diff --git a/tests/pos/typedapply.scala b/tests/pos/typedapply.scala
index e28e59d4f..8496d528b 100644
--- a/tests/pos/typedapply.scala
+++ b/tests/pos/typedapply.scala
@@ -8,4 +8,4 @@ object typedapply {
foo[Int, String] _
-} \ No newline at end of file
+}
diff --git a/tests/pos/varargs.scala b/tests/pos/varargs.scala
index 3739636b8..616456616 100644
--- a/tests/pos/varargs.scala
+++ b/tests/pos/varargs.scala
@@ -10,4 +10,4 @@ object varargs {
g(Nil: _*)
g(1)
g()
-} \ No newline at end of file
+}
diff --git a/tests/untried/neg-with-implicits/implicit-shadow.scala b/tests/untried/neg-with-implicits/implicit-shadow.scala
index ec7f70b6d..b03d0edbd 100644
--- a/tests/untried/neg-with-implicits/implicit-shadow.scala
+++ b/tests/untried/neg-with-implicits/implicit-shadow.scala
@@ -1,11 +1,11 @@
object Test {
- import B._, C._
+ import B._, C._
- 1.isEmpty
+ 1.isEmpty
}
trait A {
- implicit def i2s(i: Int): String = ""
+ implicit def i2s(i: Int): String = ""
}
object B extends A
diff --git a/tests/untried/neg-with-implicits/implicits.scala b/tests/untried/neg-with-implicits/implicits.scala
index 22633a1f3..5cb09804c 100644
--- a/tests/untried/neg-with-implicits/implicits.scala
+++ b/tests/untried/neg-with-implicits/implicits.scala
@@ -18,7 +18,7 @@ object Test {
import Sub._
val p = new Pos
def f(x: Int): Int = x
- f(p+1)
+ f(p + 1)
}
object test2 {
diff --git a/tests/untried/neg-with-implicits/t2405.scala b/tests/untried/neg-with-implicits/t2405.scala
index 6982285b9..c005e7a54 100644
--- a/tests/untried/neg-with-implicits/t2405.scala
+++ b/tests/untried/neg-with-implicits/t2405.scala
@@ -2,9 +2,9 @@ object A { implicit val x: Int = 1 }
// Expecting shadowing #1
object Test2 {
- {
- import A.{x => y}
- def y: Int = 0
- implicitly[Int]
- }
+ {
+ import A.{x => y}
+ def y: Int = 0
+ implicitly[Int]
+ }
}
diff --git a/tests/untried/neg-with-implicits/t3006.scala b/tests/untried/neg-with-implicits/t3006.scala
index a84b69c84..a84b69c84 100755..100644
--- a/tests/untried/neg-with-implicits/t3006.scala
+++ b/tests/untried/neg-with-implicits/t3006.scala
diff --git a/tests/untried/neg-with-implicits/t3224.scala b/tests/untried/neg-with-implicits/t3224.scala
index b7af8a67b..b7af8a67b 100755..100644
--- a/tests/untried/neg-with-implicits/t3224.scala
+++ b/tests/untried/neg-with-implicits/t3224.scala
diff --git a/tests/untried/neg-with-implicits/t7519-b/Mac_1.scala b/tests/untried/neg-with-implicits/t7519-b/Mac_1.scala
index 55b583d24..a9ecfd976 100644
--- a/tests/untried/neg-with-implicits/t7519-b/Mac_1.scala
+++ b/tests/untried/neg-with-implicits/t7519-b/Mac_1.scala
@@ -9,6 +9,6 @@ object IW {
}
object Mac {
def mac(s: String): String = macro macImpl
- def macImpl(c: Context)(s: c.Expr[String]): c.Expr[String] =
+ def macImpl(c: Context)(s: c.Expr[String]): c.Expr[String] =
c.universe.reify(IW.foo(s.splice))
}
diff --git a/tests/untried/neg/for-comprehension-old.scala b/tests/untried/neg/for-comprehension-old.scala
index 10ae363bd..270861751 100644
--- a/tests/untried/neg/for-comprehension-old.scala
+++ b/tests/untried/neg/for-comprehension-old.scala
@@ -1,11 +1,11 @@
class A {
- for (x <- 1 to 5 ; y = x) yield x+y // ok
- for (x <- 1 to 5 ; val y = x) yield x+y // fail
- for (val x <- 1 to 5 ; y = x) yield x+y // fail
- for (val x <- 1 to 5 ; val y = x) yield x+y // fail
+ for (x <- 1 to 5 ; y = x) yield x + y // ok
+ for (x <- 1 to 5 ; val y = x) yield x + y // fail
+ for (val x <- 1 to 5 ; y = x) yield x + y // fail
+ for (val x <- 1 to 5 ; val y = x) yield x + y // fail
- for (z <- 1 to 2 ; x <- 1 to 5 ; y = x) yield x+y // ok
- for (z <- 1 to 2 ; x <- 1 to 5 ; val y = x) yield x+y // fail
- for (z <- 1 to 2 ; val x <- 1 to 5 ; y = x) yield x+y // fail
- for (z <- 1 to 2 ; val x <- 1 to 5 ; val y = x) yield x+y // fail
+ for (z <- 1 to 2 ; x <- 1 to 5 ; y = x) yield x + y // ok
+ for (z <- 1 to 2 ; x <- 1 to 5 ; val y = x) yield x + y // fail
+ for (z <- 1 to 2 ; val x <- 1 to 5 ; y = x) yield x + y // fail
+ for (z <- 1 to 2 ; val x <- 1 to 5 ; val y = x) yield x + y // fail
}
diff --git a/tests/untried/neg/illegal-stmt-start.scala b/tests/untried/neg/illegal-stmt-start.scala
index 48ae0a8b0..275bc80e9 100644
--- a/tests/untried/neg/illegal-stmt-start.scala
+++ b/tests/untried/neg/illegal-stmt-start.scala
@@ -2,4 +2,4 @@ class Test {
def foo {
private def bar {}
}
-} \ No newline at end of file
+}
diff --git a/tests/untried/neg/literate_existentials.scala b/tests/untried/neg/literate_existentials.scala
index 8580347bf..fe47bd5fb 100644
--- a/tests/untried/neg/literate_existentials.scala
+++ b/tests/untried/neg/literate_existentials.scala
@@ -54,7 +54,7 @@ object LiterateExistentials {
// V1 <: Any
//
// Which simplify to:
-// V1 >: String <: Any
+// V1 >: String <: Any
//
// That's not inconsistent, so we can say that:
// T <: U forSome { type X1 >: L1 <: H1 }
@@ -62,7 +62,7 @@ object LiterateExistentials {
// Nothing <: A forSome { type A >: String <: Any }
// Now to ask the compiler:
-
+
implicitly[Nothing <:< (A forSome { type A >: String <: Any })]
@@ -85,15 +85,15 @@ object LiterateExistentials {
//
// Which simplify to:
//
-// V1 >: lub(Int, String) <: Any
+// V1 >: lub(Int, String) <: Any
//
-// V1 >: Any <: Any
+// V1 >: Any <: Any
//
// We have demonstrated consistency! We can say that:
// T :< (U forSome { type U >: L1 <: H1 })
// Under our bindings, this is:
// Int :< (M forSome { type M >: String <: Any })
-
+
implicitly[Int <:< (M forSome { type M >: String <: Any })]
@@ -179,7 +179,7 @@ object LiterateExistentials {
// Nothing <: V1
// V1 <: String
//
-// V1 >: Int <: String
+// V1 >: Int <: String
//
// Alas! These are inconsistent! There is no supertype of Int that is a
// subtype of String! Our substitution rule does not allow us to claim that our
diff --git a/tests/untried/neg/lubs.scala b/tests/untried/neg/lubs.scala
index 3524fa4d8..bb6f7c286 100644
--- a/tests/untried/neg/lubs.scala
+++ b/tests/untried/neg/lubs.scala
@@ -3,7 +3,7 @@ object test1 {
class C extends A[C]
class D extends A[D]
- def f = if(1 == 2) new C else new D
+ def f = if (1 == 2) new C else new D
val x1: A[Any] = f
val x2: A[A[Any]] = f
diff --git a/tests/untried/neg/macro-invalidusage-presuper/Impls_1.scala b/tests/untried/neg/macro-invalidusage-presuper/Impls_1.scala
index ea98f01fa..7f10e9185 100644
--- a/tests/untried/neg/macro-invalidusage-presuper/Impls_1.scala
+++ b/tests/untried/neg/macro-invalidusage-presuper/Impls_1.scala
@@ -2,4 +2,4 @@ import scala.reflect.macros.blackbox.Context
object Impls {
def impl(c: Context) = { import c.universe._; c.Expr[Unit](q"()") }
-} \ No newline at end of file
+}
diff --git a/tests/untried/neg/macro-invalidusage-presuper/Macros_Test_2.scala b/tests/untried/neg/macro-invalidusage-presuper/Macros_Test_2.scala
index ff46a5915..929c36528 100644
--- a/tests/untried/neg/macro-invalidusage-presuper/Macros_Test_2.scala
+++ b/tests/untried/neg/macro-invalidusage-presuper/Macros_Test_2.scala
@@ -1,3 +1,3 @@
import Impls._
-class D extends { def x = macro impl } with AnyRef \ No newline at end of file
+class D extends { def x = macro impl } with AnyRef
diff --git a/tests/untried/neg/names-defaults-neg.scala b/tests/untried/neg/names-defaults-neg.scala
index 2f8700ac6..589259622 100644
--- a/tests/untried/neg/names-defaults-neg.scala
+++ b/tests/untried/neg/names-defaults-neg.scala
@@ -98,7 +98,7 @@ object Test extends App {
f3818(y = 1, m = 1)
// DEFINITIONS
- def test1(a: Int, b: String) = a +": "+ b
+ def test1(a: Int, b: String) = a +": " + b
def test2(x: Unit) = println("test2")
def test3(a: Int, b: Int) = a + b
def test7(m: Int) = m
diff --git a/tests/untried/neg/override.scala b/tests/untried/neg/override.scala
index 797551606..797551606 100755..100644
--- a/tests/untried/neg/override.scala
+++ b/tests/untried/neg/override.scala
diff --git a/tests/untried/neg/specification-scopes/P_2.scala b/tests/untried/neg/specification-scopes/P_2.scala
index d59f82e90..03a52bdcd 100644
--- a/tests/untried/neg/specification-scopes/P_2.scala
+++ b/tests/untried/neg/specification-scopes/P_2.scala
@@ -1,21 +1,21 @@
package P { // 'X' bound by package clause
import Console._ // 'println' bound by wildcard import
object A {
- println("L4: "+X) // 'X' refers to 'P.X' here
+ println("L4: " + X) // 'X' refers to 'P.X' here
object B {
import Q._ // 'X' bound by wildcard import
- println("L7: "+X) // 'X' refers to 'Q.X' here
+ println("L7: " + X) // 'X' refers to 'Q.X' here
import X._ // 'x' and 'y' bound by wildcard import
- println("L8: "+x) // 'x' refers to 'Q.X.x' here
+ println("L8: " + x) // 'x' refers to 'Q.X.x' here
object C {
val x = 3 // 'x' bound by local definition
- println("L12: "+x); // 'x' refers to constant '3' here
+ println("L12: " + x); // 'x' refers to constant '3' here
{ import Q.X._ // 'x' and 'y' bound by wildcard
- println("L14: "+x) // reference to 'x' is ambiguous here
+ println("L14: " + x) // reference to 'x' is ambiguous here
import X.y // 'y' bound by explicit import
- println("L16: "+y); // 'y' refers to 'Q.X.y' here
+ println("L16: " + y); // 'y' refers to 'Q.X.y' here
{ val x = "abc" // 'x' bound by local definition
import P.X._ // 'x' and 'y' bound by wildcard
- println("L19: "+y) // reference to 'y' is ambiguous here
- println("L20: "+x) // 'x' refers to string ''abc'' here
+ println("L19: " + y) // reference to 'y' is ambiguous here
+ println("L20: " + x) // 'x' refers to string ''abc'' here
}}}}}}
diff --git a/tests/untried/neg/switch.scala b/tests/untried/neg/switch.scala
index a66ed768f..b4d3392f9 100644
--- a/tests/untried/neg/switch.scala
+++ b/tests/untried/neg/switch.scala
@@ -55,7 +55,7 @@ object Main {
}
// some ints just to mix it up a bit
- def succ4(x: Int, y: Int) = ((x+y): @switch) match {
+ def succ4(x: Int, y: Int) = ((x + y): @switch) match {
case 1 => 5
case 2 => 10
case 3 => 20
diff --git a/tests/untried/neg/t0764.scala b/tests/untried/neg/t0764.scala
index 7ee76feab..0c3f857a8 100644
--- a/tests/untried/neg/t0764.scala
+++ b/tests/untried/neg/t0764.scala
@@ -29,7 +29,7 @@ _1.type <:< Node{type T = NextType} (because skolemization and _1's upper bound)
Node{type T = NextType} <:< _1.type forSome val _1: Node{type T = NextType}
because:
Node{type T = NextType} <:< T forSome {type T <: Node{type T = NextType} with Singleton}
-because
+because
Node{type T = NextType} <:< Node{type T = NextType} with Singleton
hmmm.. might the with Singleton be throwing a wrench in our existential house?
diff --git a/tests/untried/neg/t1181.scala b/tests/untried/neg/t1181.scala
index 5e5fceacc..6cae8f576 100644
--- a/tests/untried/neg/t1181.scala
+++ b/tests/untried/neg/t1181.scala
@@ -3,7 +3,7 @@ package test
import scala.collection.immutable.Map
class CompilerTest(val valueList: List[Symbol]) {
- def buildMap(map: Map[Symbol, Symbol], keyList: List[Symbol], valueList: List[Symbol]): Map[Symbol, Symbol] = {
+ def buildMap(map: Map[Symbol, Symbol], keyList: List[Symbol], valueList: List[Symbol]): Map[Symbol, Symbol] = {
(keyList, valueList) match {
case (Nil, Nil) => map
_ => buildMap(map.updated(keyList.head, valueList.head), keyList.tail, valueList.tail)
diff --git a/tests/untried/neg/t1432.scala b/tests/untried/neg/t1432.scala
index bdf233128..5df20121d 100644
--- a/tests/untried/neg/t1432.scala
+++ b/tests/untried/neg/t1432.scala
@@ -7,7 +7,7 @@ object Bug_NoUnique {
type Alias2[E] = Wrap[E]
def wrap[E,A,Y](v : (A,E=>Y)) : (A,Alias2[E]=>Y) =
- throw new Error("Body here")
+ throw new Error("Body here")
def test(x : TypeCon[Wrap[Unit]]) : TypeCon[Unit] = wrap(x)
}
diff --git a/tests/untried/neg/t2066b.scala b/tests/untried/neg/t2066b.scala
index 2f8ffde14..b309a48d6 100644
--- a/tests/untried/neg/t2066b.scala
+++ b/tests/untried/neg/t2066b.scala
@@ -1,16 +1,16 @@
object Test extends App {
- trait A {
- def f[T[_]](x : T[Int]) : T[Any]
- }
+ trait A {
+ def f[T[_]](x : T[Int]) : T[Any]
+ }
- class B extends A {
- def f[T[+_]](x : T[Int]) : T[Any] = x
- }
+ class B extends A {
+ def f[T[+_]](x : T[Int]) : T[Any] = x
+ }
- class P[Y](var y : Y)
+ class P[Y](var y : Y)
- val p = new P(1)
- val palias = (new B():A).f[P](p)
- palias.y = "hello"
- val z: Int = p.y
+ val p = new P(1)
+ val palias = (new B():A).f[P](p)
+ palias.y = "hello"
+ val z: Int = p.y
}
diff --git a/tests/untried/neg/t2208.scala b/tests/untried/neg/t2208.scala
index 53165cc81..b86b1d9b3 100644
--- a/tests/untried/neg/t2208.scala
+++ b/tests/untried/neg/t2208.scala
@@ -1,8 +1,8 @@
object Test {
- class A
+ class A
- class B[X]
- type Alias[X <: A] = B[X]
+ class B[X]
+ type Alias[X <: A] = B[X]
- class C extends Alias[Any] // not ok, normalisation should check bounds before expanding Alias
+ class C extends Alias[Any] // not ok, normalisation should check bounds before expanding Alias
}
diff --git a/tests/untried/neg/t2275a.scala b/tests/untried/neg/t2275a.scala
index 7f2b803da..8e25a38fe 100644
--- a/tests/untried/neg/t2275a.scala
+++ b/tests/untried/neg/t2275a.scala
@@ -1,5 +1,5 @@
object Test {
- if(true) {
+ if (true) {
<br>
}else{
<span>{"louenesee"}</span>
diff --git a/tests/untried/neg/t2336.scala b/tests/untried/neg/t2336.scala
index 4cea02b72..4cea02b72 100755..100644
--- a/tests/untried/neg/t2336.scala
+++ b/tests/untried/neg/t2336.scala
diff --git a/tests/untried/neg/t2494.scala b/tests/untried/neg/t2494.scala
index 71e6bc4bb..71e6bc4bb 100755..100644
--- a/tests/untried/neg/t2494.scala
+++ b/tests/untried/neg/t2494.scala
diff --git a/tests/untried/neg/t2773.scala b/tests/untried/neg/t2773.scala
index aaa6351c8..aaa6351c8 100755..100644
--- a/tests/untried/neg/t2773.scala
+++ b/tests/untried/neg/t2773.scala
diff --git a/tests/untried/neg/t2779.scala b/tests/untried/neg/t2779.scala
index d025055aa..d025055aa 100755..100644
--- a/tests/untried/neg/t2779.scala
+++ b/tests/untried/neg/t2779.scala
diff --git a/tests/untried/neg/t2870.scala b/tests/untried/neg/t2870.scala
index 4de19242e..4de19242e 100755..100644
--- a/tests/untried/neg/t2870.scala
+++ b/tests/untried/neg/t2870.scala
diff --git a/tests/untried/neg/t2918.scala b/tests/untried/neg/t2918.scala
index ff2be39ae..ff2be39ae 100755..100644
--- a/tests/untried/neg/t2918.scala
+++ b/tests/untried/neg/t2918.scala
diff --git a/tests/untried/neg/t2968.scala b/tests/untried/neg/t2968.scala
index 41c3a798a..0adad4cd5 100644
--- a/tests/untried/neg/t2968.scala
+++ b/tests/untried/neg/t2968.scala
@@ -1,26 +1,26 @@
object t1 {
- case object Const {
- }
+ case object Const {
+ }
- class Var
- {
+ class Var
+ {
} // missing brace
object t2 {
- case class Const() {
- }
+ case class Const() {
+ }
- class Var
- {
+ class Var
+ {
} // missing brace
object t3 {
- final case class Const() {
- }
+ final case class Const() {
+ }
- class Var
- {
+ class Var
+ {
} // missing brace
diff --git a/tests/untried/neg/t3189.scala b/tests/untried/neg/t3189.scala
index 4ea4bb758..94c13c54d 100644
--- a/tests/untried/neg/t3189.scala
+++ b/tests/untried/neg/t3189.scala
@@ -1,3 +1,3 @@
object A {
val Array(a,b*) = ("": Any)
-} \ No newline at end of file
+}
diff --git a/tests/untried/neg/t3209.scala b/tests/untried/neg/t3209.scala
index d89372665..f03c2942b 100644
--- a/tests/untried/neg/t3209.scala
+++ b/tests/untried/neg/t3209.scala
@@ -1,2 +1,2 @@
@javax.annotation.Generated(Array("test"))
-package test \ No newline at end of file
+package test
diff --git a/tests/untried/neg/t3913.scala b/tests/untried/neg/t3913.scala
index a5408fe02..ef8cf5591 100644
--- a/tests/untried/neg/t3913.scala
+++ b/tests/untried/neg/t3913.scala
@@ -4,5 +4,5 @@ object LimboStage extends Stage( Set( LimboStage ))
object Test {
def main( args: Array[ String ]): Unit = {
val x = LimboStage
- }
+ }
}
diff --git a/tests/untried/neg/t4069.scala b/tests/untried/neg/t4069.scala
index 80df6ec16..831eba413 100644
--- a/tests/untried/neg/t4069.scala
+++ b/tests/untried/neg/t4069.scala
@@ -7,4 +7,4 @@ object ParserBug {
case 2 =>
<div/>
}
-} \ No newline at end of file
+}
diff --git a/tests/untried/neg/t4460b.scala b/tests/untried/neg/t4460b.scala
index 1233017dd..d347d5c53 100644
--- a/tests/untried/neg/t4460b.scala
+++ b/tests/untried/neg/t4460b.scala
@@ -1,9 +1,9 @@
trait A
class Outer() {
- class B(val x: Int) {
- self: A =>
+ class B(val x: Int) {
+ self: A =>
- def this() = this() // was binding to Predef.<init> !!
- }
+ def this() = this() // was binding to Predef.<init> !!
+ }
}
diff --git a/tests/untried/neg/t4584.scala b/tests/untried/neg/t4584.scala
index b34aba91a..0ed04355d 100644
--- a/tests/untried/neg/t4584.scala
+++ b/tests/untried/neg/t4584.scala
@@ -1 +1 @@
-class A { val \u2 \ No newline at end of file
+class A { val \u2
diff --git a/tests/untried/neg/t4818.scala b/tests/untried/neg/t4818.scala
index faae22920..c04b359b4 100644
--- a/tests/untried/neg/t4818.scala
+++ b/tests/untried/neg/t4818.scala
@@ -1,7 +1,7 @@
object Test {
- case class Fn[A, B](f: A => B)
+ case class Fn[A, B](f: A => B)
- def f(x: Any) = x match { case Fn(f) => f(5) }
+ def f(x: Any) = x match { case Fn(f) => f(5) }
- Fn((x: String) => x)
+ Fn((x: String) => x)
}
diff --git a/tests/untried/neg/t5702-neg-bad-xbrace.scala b/tests/untried/neg/t5702-neg-bad-xbrace.scala
index 64bbdb18b..89f5b5d62 100644
--- a/tests/untried/neg/t5702-neg-bad-xbrace.scala
+++ b/tests/untried/neg/t5702-neg-bad-xbrace.scala
@@ -26,6 +26,6 @@ object Test {
val xml = <top><a>apple</a><b>boy</b><c>child</c></top>
// bad brace or paren after _*
val <top>{a, z@_*)}</top> = xml
- println("A for "+ a +", ending with "+ z)
+ println("A for " + a +", ending with "+ z)
}
}
diff --git a/tests/untried/neg/t5702-neg-ugly-xbrace.scala b/tests/untried/neg/t5702-neg-ugly-xbrace.scala
index 0ff7bfa09..bb587e812 100644
--- a/tests/untried/neg/t5702-neg-ugly-xbrace.scala
+++ b/tests/untried/neg/t5702-neg-ugly-xbrace.scala
@@ -9,6 +9,6 @@ object Test {
// Assuming } for ) after _* would not be not outlandish.
// bad brace or paren after _*
val <top>{a, z@_*)</top> = xml
- println("A for "+ a +", ending with "+ z)
+ println("A for " + a +", ending with "+ z)
}
}
diff --git a/tests/untried/neg/t576.scala b/tests/untried/neg/t576.scala
index fd83217a4..2aaa65a65 100644
--- a/tests/untried/neg/t576.scala
+++ b/tests/untried/neg/t576.scala
@@ -6,7 +6,7 @@ abstract class BaseListXXX {
}
}
trait PriorityTreeXXX extends BaseListXXX {
- type Node <: BasicTreeNode;
+ type Node <: BasicTreeNode;
trait BasicTreeNode extends BaseNode {
def sibling: Node;
diff --git a/tests/untried/neg/t5856.scala b/tests/untried/neg/t5856.scala
index 2ceee590a..6838fe0dd 100644
--- a/tests/untried/neg/t5856.scala
+++ b/tests/untried/neg/t5856.scala
@@ -8,4 +8,4 @@ object Test {
val s7 = s"$s1 $null $super"
val s8 = s"$super"
val s9 = s"$"
-} \ No newline at end of file
+}
diff --git a/tests/untried/neg/t6214.scala b/tests/untried/neg/t6214.scala
index 0d5ffc5de..56d5e9360 100644
--- a/tests/untried/neg/t6214.scala
+++ b/tests/untried/neg/t6214.scala
@@ -2,6 +2,6 @@ object Test {
def m(f: String => Unit) = 0
def m(f: Int => Unit) = 0
def foo: Unit = {
- m { s => case class Foo() }
+ m { s => case class Foo() }
}
}
diff --git a/tests/untried/neg/t6258.scala b/tests/untried/neg/t6258.scala
index 19794b325..58236d154 100644
--- a/tests/untried/neg/t6258.scala
+++ b/tests/untried/neg/t6258.scala
@@ -1,10 +1,10 @@
object Test {
- val f : PartialFunction[_, Int] = { case a : Int => a } // undefined param
+ val f : PartialFunction[_, Int] = { case a : Int => a } // undefined param
- def foo[A](pf: PartialFunction[A, Int]): Unit = {};
- foo { case a : Int => a } // undefined param
+ def foo[A](pf: PartialFunction[A, Int]): Unit = {};
+ foo { case a : Int => a } // undefined param
- val g : PartialFunction[Int, _] = { case a : Int => a } // okay
+ val g : PartialFunction[Int, _] = { case a : Int => a } // okay
}
diff --git a/tests/untried/neg/t6558.scala b/tests/untried/neg/t6558.scala
index b4304ff68..57f02f13f 100644
--- a/tests/untried/neg/t6558.scala
+++ b/tests/untried/neg/t6558.scala
@@ -7,6 +7,6 @@ class AnnotNotFound {
class D[@typeparam T]
class E(
- @valueparam x: Any
+ @valueparam x: Any
)
}
diff --git a/tests/untried/neg/t771.scala b/tests/untried/neg/t771.scala
index 26bf44164..26bf44164 100755..100644
--- a/tests/untried/neg/t771.scala
+++ b/tests/untried/neg/t771.scala
diff --git a/tests/untried/neg/t7757b.scala b/tests/untried/neg/t7757b.scala
index e9a537dba..e67d944bd 100644
--- a/tests/untried/neg/t7757b.scala
+++ b/tests/untried/neg/t7757b.scala
@@ -1,2 +1,2 @@
trait Foo2
-@annot2 \ No newline at end of file
+@annot2
diff --git a/tests/untried/neg/t7872.scala b/tests/untried/neg/t7872.scala
index 66d22a071..55480be6a 100644
--- a/tests/untried/neg/t7872.scala
+++ b/tests/untried/neg/t7872.scala
@@ -1,7 +1,7 @@
trait Cov[+A]
trait Inv[-A]
-object varianceExploit {
+object varianceExploit {
type l[-a] = Cov[a]
type x = {type l[-a] = Cov[a]}
def foo[M[_]] = ()
diff --git a/tests/untried/neg/t7872b.scala b/tests/untried/neg/t7872b.scala
index 307a1470c..e76145cce 100644
--- a/tests/untried/neg/t7872b.scala
+++ b/tests/untried/neg/t7872b.scala
@@ -1,20 +1,20 @@
object coinv {
def up[F[+_]](fa: F[String]): F[Object] = fa
def down[F[-_]](fa: F[Object]): F[String] = fa
-
+
up(List("hi"))
-
+
// should not compile; `l' is unsound
def oops1 = down[({type l[-a] = List[a]})#l](List('whatever: Object)).head + "oops"
// scala> oops1
// java.lang.ClassCastException: scala.Symbol cannot be cast to java.lang.String
// at com.nocandysw.coinv$.oops1(coinv.scala:12)
-
+
type Stringer[-A] = A => String
down[Stringer](_.toString)
// [error] type A is contravariant, but type _ is declared covariant
// up[Stringer]("printed: " + _)
-
+
// should not compile; `l' is unsound
def oops2 = up[({type l[+a] = Stringer[a]})#l]("printed: " + _)
// scala> oops2(Some(33))
diff --git a/tests/untried/neg/t7872c.scala b/tests/untried/neg/t7872c.scala
index fa12a523b..112248bc8 100644
--- a/tests/untried/neg/t7872c.scala
+++ b/tests/untried/neg/t7872c.scala
@@ -1,7 +1,7 @@
object coinv {
def up[F[+_]](fa: F[String]): F[Object] = fa
def down[F[-_]](fa: F[Object]): F[String] = fa
-
+
up(List("hi"))
// [error] type A is covariant, but type _ is declared contravariant
down(List('whatever: Object))
diff --git a/tests/untried/neg/t8158/Macros_1.scala b/tests/untried/neg/t8158/Macros_1.scala
index b84e3ed8d..c0df1d9c0 100644
--- a/tests/untried/neg/t8158/Macros_1.scala
+++ b/tests/untried/neg/t8158/Macros_1.scala
@@ -31,4 +31,4 @@ object Max {
}
c.Expr[Any](t)
}
-} \ No newline at end of file
+}
diff --git a/tests/untried/neg/t8158/Test_2.scala b/tests/untried/neg/t8158/Test_2.scala
index f5ac6616b..aeeb62982 100644
--- a/tests/untried/neg/t8158/Test_2.scala
+++ b/tests/untried/neg/t8158/Test_2.scala
@@ -11,4 +11,4 @@ class BugTest {
case _ => ???
}
}
-} \ No newline at end of file
+}
diff --git a/tests/untried/neg/t856.scala b/tests/untried/neg/t856.scala
index fea216bfa..a239ac9eb 100644
--- a/tests/untried/neg/t856.scala
+++ b/tests/untried/neg/t856.scala
@@ -1,7 +1,7 @@
trait Complex extends Product2[Double,Double]
class ComplexRect(val _1:Double, _2:Double) extends Complex {
- override def toString = "ComplexRect("+_1+","+_2+")"
+ override def toString = "ComplexRect(" +_1 +","+_2 +")"
}
object Test {
diff --git a/tests/untried/neg/t963b.scala b/tests/untried/neg/t963b.scala
index 3442f46c4..f3927cb09 100644
--- a/tests/untried/neg/t963b.scala
+++ b/tests/untried/neg/t963b.scala
@@ -10,7 +10,7 @@ object B {
var a : A = _
var b : Boolean = false
def y : A = {
- if(b) {
+ if (b) {
a = new A { type T = Int; var v = 1 }
a
} else {
diff --git a/tests/untried/neg/unicode-unterminated-quote.scala b/tests/untried/neg/unicode-unterminated-quote.scala
index bb6eab667..16cadb086 100644
--- a/tests/untried/neg/unicode-unterminated-quote.scala
+++ b/tests/untried/neg/unicode-unterminated-quote.scala
@@ -1,2 +1,2 @@
class A {
- val x = \u0022 \ No newline at end of file
+ val x = \u0022
diff --git a/tests/untried/neg/warn-unused-privates.scala b/tests/untried/neg/warn-unused-privates.scala
index cb6e946a3..64e7679f3 100644
--- a/tests/untried/neg/warn-unused-privates.scala
+++ b/tests/untried/neg/warn-unused-privates.scala
@@ -1,7 +1,7 @@
class Bippy(a: Int, b: Int) {
private def this(c: Int) = this(c, c) // warn
private def bippy(x: Int): Int = bippy(x) // TODO: could warn
- private def boop(x: Int) = x+a+b // warn
+ private def boop(x: Int) = x + a + b // warn
final private val MILLIS1 = 2000 // no warn, might have been inlined
final private val MILLIS2: Int = 1000 // warn
final private val HI_COMPANION: Int = 500 // no warn, accessed from companion
diff --git a/tests/untried/neg/xmlcorner.scala b/tests/untried/neg/xmlcorner.scala
index 042ec05e6..671225186 100644
--- a/tests/untried/neg/xmlcorner.scala
+++ b/tests/untried/neg/xmlcorner.scala
@@ -11,7 +11,7 @@ object pos
def wrap(f : Int => Unit) = f(5)
wrap({ v =>
- if(v == 5) {
+ if (v == 5) {
val n = {
val m = (<a>{}</a>)
<div>{ v }</div>
diff --git a/tests/untried/neg/xmltruncated7.scala b/tests/untried/neg/xmltruncated7.scala
index 7e296a910..466b169df 100644
--- a/tests/untried/neg/xmltruncated7.scala
+++ b/tests/untried/neg/xmltruncated7.scala
@@ -1,3 +1,3 @@
object Test {
<p>foo}: </p>
-} \ No newline at end of file
+}
diff --git a/tests/untried/pos/FPTest.scala b/tests/untried/pos/FPTest.scala
index b351b7bb9..6fab0fe67 100644
--- a/tests/untried/pos/FPTest.scala
+++ b/tests/untried/pos/FPTest.scala
@@ -4,7 +4,7 @@ import annotation.strictfp
@strictfp class FPTest {
def main(args: Array[String]): Unit = {
- val d: Double = 8e+307
+ val d: Double = 8e + 307
println(4.0 * d * 0.5);
println(2.0 * d);
}
diff --git a/tests/untried/pos/SI-4012-a.scala b/tests/untried/pos/SI-4012-a.scala
index 7fceeea3c..8ce8e5691 100644
--- a/tests/untried/pos/SI-4012-a.scala
+++ b/tests/untried/pos/SI-4012-a.scala
@@ -4,4 +4,4 @@ trait C1[+A] {
trait C2[@specialized +A] extends C1[A] {
override def head: A = super.head
}
-class C3 extends C2[Char]
+class C3 extends C2[Char]
diff --git a/tests/untried/pos/SI-4012-b.scala b/tests/untried/pos/SI-4012-b.scala
index 6bc859276..102019463 100644
--- a/tests/untried/pos/SI-4012-b.scala
+++ b/tests/untried/pos/SI-4012-b.scala
@@ -3,13 +3,13 @@ trait Super[@specialized(Int) A] {
}
object Sub extends Super[Int] {
- // it is expected that super[Super].superb crashes, since
+ // it is expected that super[Super].superb crashes, since
// specialization does parent class rewiring, and the super
// of Sub becomes Super$mcII$sp and not Super. But I consider
// this normal behavior -- if you want, I can modify duplicatiors
// to make this work, but I consider it's best to keep this
// let the user know Super is not the superclass anymore.
// super[Super].superb - Vlad
- super.superb // okay
+ super.superb // okay
override def superb: Int = super.superb // okay
}
diff --git a/tests/untried/pos/ilya2/A.scala b/tests/untried/pos/ilya2/A.scala
index 923b50f04..e9b225be8 100644
--- a/tests/untried/pos/ilya2/A.scala
+++ b/tests/untried/pos/ilya2/A.scala
@@ -1,3 +1,3 @@
class A {
- def foo = new B().bar(null)
+ def foo = new B().bar(null)
}
diff --git a/tests/untried/pos/iterator-traversable-mix.scala b/tests/untried/pos/iterator-traversable-mix.scala
index 2d6bf44c7..acc9c13ad 100644
--- a/tests/untried/pos/iterator-traversable-mix.scala
+++ b/tests/untried/pos/iterator-traversable-mix.scala
@@ -4,5 +4,5 @@ object Test {
x2 <- Iterator(3, 4)
x3 <- Seq(5, 6).iterator
x4 <- Stream(7, 8)
- } yield x1+x2+x3+x4
+ } yield x1 + x2 + x3 + x4
}
diff --git a/tests/untried/pos/lexical.scala b/tests/untried/pos/lexical.scala
index 8c29513bb..8c29513bb 100755..100644
--- a/tests/untried/pos/lexical.scala
+++ b/tests/untried/pos/lexical.scala
diff --git a/tests/untried/pos/nested.scala b/tests/untried/pos/nested.scala
index b038fce39..f73790ae0 100644
--- a/tests/untried/pos/nested.scala
+++ b/tests/untried/pos/nested.scala
@@ -4,9 +4,9 @@
class A(pa : Int) {
def a1 = pa;
class B(pb : Int) {
- def b1 = pa+pb+a1;
+ def b1 = pa + pb + a1;
class C(pc : Int) extends A(b1) {
- def c1 = pc+pb+pa
+ def c1 = pc + pb + pa
}
val c1 = new C(66)
}
@@ -22,7 +22,7 @@ class A1(x0 : Int) extends A(x0) with M {
class D() extends B(42) {
val c2 = new C(66);
class E() extends C(5) {
- def e1 = c1+b1+a1;
+ def e1 = c1 + b1 + a1;
def e2 = new D();
}
}
diff --git a/tests/untried/pos/packageobjs.scala b/tests/untried/pos/packageobjs.scala
index ccab13371..ccab13371 100755..100644
--- a/tests/untried/pos/packageobjs.scala
+++ b/tests/untried/pos/packageobjs.scala
diff --git a/tests/untried/pos/pos-bug1210.scala b/tests/untried/pos/pos-bug1210.scala
index eb163a956..c19e77e80 100644
--- a/tests/untried/pos/pos-bug1210.scala
+++ b/tests/untried/pos/pos-bug1210.scala
@@ -7,21 +7,21 @@ object Test
abstract class M
{ self =>
- type Settings
- type selfType = M {type Settings = self.Settings}
+ type Settings
+ type selfType = M {type Settings = self.Settings}
val v: List[selfType] = f[selfType]((x: selfType) => x.v)
}
abstract class M2
{ self =>
- type Settings
- type selfType = M2 {type Settings = self.Settings}
+ type Settings
+ type selfType = M2 {type Settings = self.Settings}
def g: List[selfType] = Nil
{
- f[selfType](_.g)
+ f[selfType](_.g)
}
}
}
diff --git a/tests/untried/pos/scoping2.scala b/tests/untried/pos/scoping2.scala
index 39f3ef5f0..299c5e304 100644
--- a/tests/untried/pos/scoping2.scala
+++ b/tests/untried/pos/scoping2.scala
@@ -4,8 +4,8 @@ object That {
trait I {}
}
trait B {
- type T <: J;
- trait J {}
+ type T <: J;
+ trait J {}
}
trait C extends A with B {
type T <: I with J;
diff --git a/tests/untried/pos/simple-exceptions.scala b/tests/untried/pos/simple-exceptions.scala
index a9f16bf90..4572b9aa6 100644
--- a/tests/untried/pos/simple-exceptions.scala
+++ b/tests/untried/pos/simple-exceptions.scala
@@ -7,7 +7,7 @@ object Test {
def main(args: Array[String]): Unit = {
try {
try {
- Console.println("hi!")
+ Console.println("hi!")
sys.error("xx")
}
finally Console.println("ho!")
diff --git a/tests/untried/pos/spec-sparsearray-new.scala b/tests/untried/pos/spec-sparsearray-new.scala
index df31089fe..4903b1722 100644
--- a/tests/untried/pos/spec-sparsearray-new.scala
+++ b/tests/untried/pos/spec-sparsearray-new.scala
@@ -4,7 +4,7 @@ import scala.collection.mutable.MapLike
class SparseArray[@specialized(Int) T:ClassTag] extends collection.mutable.Map[Int,T] with collection.mutable.MapLike[Int,T,SparseArray[T]] {
override def get(x: Int) = {
val ind = findOffset(x)
- if(ind < 0) None else Some(sys.error("ignore"))
+ if (ind < 0) None else Some(sys.error("ignore"))
}
/**
diff --git a/tests/untried/pos/spec-sparsearray-old.scala b/tests/untried/pos/spec-sparsearray-old.scala
index e10dabd54..99a6309cc 100644
--- a/tests/untried/pos/spec-sparsearray-old.scala
+++ b/tests/untried/pos/spec-sparsearray-old.scala
@@ -3,7 +3,7 @@ import scala.collection.mutable.MapLike
class SparseArray[@specialized(Int) T:ClassManifest] extends collection.mutable.Map[Int,T] with collection.mutable.MapLike[Int,T,SparseArray[T]] {
override def get(x: Int) = {
val ind = findOffset(x)
- if(ind < 0) None else Some(sys.error("ignore"))
+ if (ind < 0) None else Some(sys.error("ignore"))
}
/**
diff --git a/tests/untried/pos/spec-t6286.scala b/tests/untried/pos/spec-t6286.scala
index 4d87998ec..4d87998ec 100755..100644
--- a/tests/untried/pos/spec-t6286.scala
+++ b/tests/untried/pos/spec-t6286.scala
diff --git a/tests/untried/pos/sudoku.scala b/tests/untried/pos/sudoku.scala
index 9435f504d..150586716 100644
--- a/tests/untried/pos/sudoku.scala
+++ b/tests/untried/pos/sudoku.scala
@@ -12,12 +12,12 @@ object SudokuSolver extends App {
// coordinate
def invalid(i: Int, x: Int, y: Int, n: Char): Boolean =
i<9 && (m(y)(i) == n || m(i)(x) == n ||
- m(y/3*3 + i/3)(x/3*3 + i % 3) == n || invalid(i+1, x, y, n))
+ m(y/3*3 + i/3)(x/3*3 + i % 3) == n || invalid(i + 1, x, y, n))
// Looping over a half-closed range of consecutive integers [l..u)
// is factored out into a higher-order function
def fold(f: (Int, Int) => Int, accu: Int, l: Int, u: Int): Int =
- if(l==u) accu else fold(f, f(accu, l), l+1, u)
+ if (l==u) accu else fold(f, f(accu, l), l + 1, u)
// The search function examines each position on the board in turn,
// trying the numbers 1..9 in each unfilled position
@@ -25,17 +25,17 @@ object SudokuSolver extends App {
// accu by applying the given function f to it whenever a solution m
// is found
def search(x:Int, y:Int, f: (Int) => Int, accu: Int): Int = (x, y) match {
- case (9, y) => search(0, y+1, f, accu) // next row
+ case (9, y) => search(0, y + 1, f, accu) // next row
case (0, 9) => f(accu) // found a solution
- case (x, y) => if (m(y)(x) != '0') search(x+1, y, f, accu) else
+ case (x, y) => if (m(y)(x) != '0') search(x + 1, y, f, accu) else
fold((accu: Int, n: Int) =>
if (invalid(0, x, y, (n + 48).toChar)) accu else {
m(y)(x) = (n + 48).toChar;
- val newaccu = search(x+1, y, f, accu);
+ val newaccu = search(x + 1, y, f, accu);
m(y)(x) = '0';
newaccu}, accu, 1, 10)}
// The main part of the program uses the search function to accumulate
// the total number of solutions
- println("\n"+search(0,0,i => {print; i+1},0)+" solution(s)")
+ println("\n" + search(0,0,i => {print; i + 1},0)+" solution(s)")
}
diff --git a/tests/untried/pos/t262.scala b/tests/untried/pos/t262.scala
index ec6187b36..9f7686a8f 100644
--- a/tests/untried/pos/t262.scala
+++ b/tests/untried/pos/t262.scala
@@ -3,7 +3,7 @@ object O {
def f:A;
}
class B extends A {
- def f = if(1 == 2) new C else new D;
+ def f = if (1 == 2) new C else new D;
}
class C extends A {
def f = this;
diff --git a/tests/untried/pos/t2635.scala b/tests/untried/pos/t2635.scala
index 7cd553135..7cd553135 100755..100644
--- a/tests/untried/pos/t2635.scala
+++ b/tests/untried/pos/t2635.scala
diff --git a/tests/untried/pos/t2683.scala b/tests/untried/pos/t2683.scala
index 4ba34b554..4ba34b554 100755..100644
--- a/tests/untried/pos/t2683.scala
+++ b/tests/untried/pos/t2683.scala
diff --git a/tests/untried/pos/t3136.scala b/tests/untried/pos/t3136.scala
index 33d42c2f3..239bd8f54 100644
--- a/tests/untried/pos/t3136.scala
+++ b/tests/untried/pos/t3136.scala
@@ -13,7 +13,7 @@ object NullaryMethodType {
object Test {
def TEST(tp: Type): String =
tp match {
- case PolyType(ps1, PolyType(ps2, res @ PolyType(a, b))) => "1"+tp // couldn't find a simpler version that still crashes
- case NullaryMethodType(meh) => "2"+meh
+ case PolyType(ps1, PolyType(ps2, res @ PolyType(a, b))) => "1" + tp // couldn't find a simpler version that still crashes
+ case NullaryMethodType(meh) => "2" + meh
}
}
diff --git a/tests/untried/pos/t3174b.scala b/tests/untried/pos/t3174b.scala
index 4df1bfe83..4df1bfe83 100755..100644
--- a/tests/untried/pos/t3174b.scala
+++ b/tests/untried/pos/t3174b.scala
diff --git a/tests/untried/pos/t3570.scala b/tests/untried/pos/t3570.scala
index 0e20905af..237391719 100644
--- a/tests/untried/pos/t3570.scala
+++ b/tests/untried/pos/t3570.scala
@@ -1,7 +1,7 @@
class test {
- object Break extends Throwable
- def break = throw Break
- def block(x: => Unit): Unit = {
- try { x } catch { case e: Break.type => }
- }
+ object Break extends Throwable
+ def break = throw Break
+ def block(x: => Unit): Unit = {
+ try { x } catch { case e: Break.type => }
+ }
}
diff --git a/tests/untried/pos/t3578.scala b/tests/untried/pos/t3578.scala
index d98411820..2ce92a776 100644
--- a/tests/untried/pos/t3578.scala
+++ b/tests/untried/pos/t3578.scala
@@ -2,16 +2,16 @@ object Test {
sealed abstract class JValue {
def ++(other: JValue) = {
def append(value1: JValue, value2: JValue): JValue = (value1, value2) match {
- case (JNothing, x) => x
- case (x, JNothing) => x
- case (JObject(xs), x: JField) => JObject(xs ::: List(x))
- case (x: JField, JObject(xs)) => JObject(x :: xs)
- case (JArray(xs), JArray(ys)) => JArray(xs ::: ys)
- case (JArray(xs), v: JValue) => JArray(xs ::: List(v))
- case (v: JValue, JArray(xs)) => JArray(v :: xs)
- case (f1: JField, f2: JField) => JObject(f1 :: f2 :: Nil)
- case (JField(n, v1), v2: JValue) => JField(n, append(v1, v2))
- case (x, y) => JArray(x :: y :: Nil)
+ case (JNothing, x) => x
+ case (x, JNothing) => x
+ case (JObject(xs), x: JField) => JObject(xs ::: List(x))
+ case (x: JField, JObject(xs)) => JObject(x :: xs)
+ case (JArray(xs), JArray(ys)) => JArray(xs ::: ys)
+ case (JArray(xs), v: JValue) => JArray(xs ::: List(v))
+ case (v: JValue, JArray(xs)) => JArray(v :: xs)
+ case (f1: JField, f2: JField) => JObject(f1 :: f2 :: Nil)
+ case (JField(n, v1), v2: JValue) => JField(n, append(v1, v2))
+ case (x, y) => JArray(x :: y :: Nil)
}
append(this, other)
}
diff --git a/tests/untried/pos/t3670.scala b/tests/untried/pos/t3670.scala
index ec4fbe5b4..4eb7cebbc 100644
--- a/tests/untried/pos/t3670.scala
+++ b/tests/untried/pos/t3670.scala
@@ -25,7 +25,7 @@ class B {
class C {
val things = List("things")
- if(things.size < 100) {
+ if (things.size < 100) {
lazy val msg = "foo"
msg
}
@@ -33,7 +33,7 @@ class C {
class D {
val things = List("things")
- if(things.size < 100) {
+ if (things.size < 100) {
if (things.size > 10) {
lazy val msg = "foo"
msg
diff --git a/tests/untried/pos/t4220.scala b/tests/untried/pos/t4220.scala
index 98f264976..280bb6dab 100644
--- a/tests/untried/pos/t4220.scala
+++ b/tests/untried/pos/t4220.scala
@@ -3,5 +3,5 @@
class Boo(a: Int = 0)
object test {
- class Boo
+ class Boo
}
diff --git a/tests/untried/pos/t443.scala b/tests/untried/pos/t443.scala
index cdaefe9ec..f1f7ec258 100644
--- a/tests/untried/pos/t443.scala
+++ b/tests/untried/pos/t443.scala
@@ -3,12 +3,12 @@ object Test {
def lookup(): Option[Tuple2[String, String]] =
((null: Option[Tuple2[String, String]]) : @unchecked) match {
case Some((_, _)) =>
- if (true)
- Some((null, null))
- else
- lookup() match {
- case Some(_) => Some(null)
- case None => None
- }
+ if (true)
+ Some((null, null))
+ else
+ lookup() match {
+ case Some(_) => Some(null)
+ case None => None
+ }
}
}
diff --git a/tests/untried/pos/t4842.scala b/tests/untried/pos/t4842.scala
index 17ff68483..24a71294d 100644
--- a/tests/untried/pos/t4842.scala
+++ b/tests/untried/pos/t4842.scala
@@ -13,14 +13,14 @@ class Blerg (x: AnyRef) {
class Outer {
- class Inner (x: AnyRef) {
- def this() = {
- this(new { class Bar { println(Bar.this); new { println(Bar.this) } }; new Bar } ) // okay
- }
+ class Inner (x: AnyRef) {
+ def this() = {
+ this(new { class Bar { println(Bar.this); new { println(Bar.this) } }; new Bar } ) // okay
+ }
- def this(x: Boolean) = {
- this(new { println(Outer.this) } ) // okay
- }
- }
+ def this(x: Boolean) = {
+ this(new { println(Outer.this) } ) // okay
+ }
+ }
}
diff --git a/tests/untried/pos/t592.scala b/tests/untried/pos/t592.scala
index 6a941ef51..bad1c8528 100644
--- a/tests/untried/pos/t592.scala
+++ b/tests/untried/pos/t592.scala
@@ -19,7 +19,7 @@ abstract class DirectedGraph extends Graph {
class EdgeImpl(origin: Node, dest: Node) {
def from = origin;
def to = dest;
- override def toString = ""+origin+" --> "+dest
+ override def toString = "" + origin +" --> "+ dest
}
class NodeImpl extends NodeIntf { self: Node =>
@@ -30,7 +30,7 @@ abstract class DirectedGraph extends Graph {
edges = edge :: edges;
edge;
}
- override def toString = "Node "+id
+ override def toString = "Node " + id
}
protected def newNode: Node;
diff --git a/tests/untried/pos/t6028/t6028_1.scala b/tests/untried/pos/t6028/t6028_1.scala
index 6edb76069..521d2e07d 100644
--- a/tests/untried/pos/t6028/t6028_1.scala
+++ b/tests/untried/pos/t6028/t6028_1.scala
@@ -1,3 +1,3 @@
class C {
- def foo(a: Int): Unit = () => a
+ def foo(a: Int): Unit = () => a
}
diff --git a/tests/untried/pos/t6028/t6028_2.scala b/tests/untried/pos/t6028/t6028_2.scala
index f44048c0a..de053b8e6 100644
--- a/tests/untried/pos/t6028/t6028_2.scala
+++ b/tests/untried/pos/t6028/t6028_2.scala
@@ -1,4 +1,4 @@
object Test {
- // ensure that parameter names are untouched by lambdalift
- new C().foo(a = 0)
+ // ensure that parameter names are untouched by lambdalift
+ new C().foo(a = 0)
}
diff --git a/tests/untried/pos/t6601/UsePrivateValueClass_2.scala b/tests/untried/pos/t6601/UsePrivateValueClass_2.scala
index 461b8397b..ec9793751 100644
--- a/tests/untried/pos/t6601/UsePrivateValueClass_2.scala
+++ b/tests/untried/pos/t6601/UsePrivateValueClass_2.scala
@@ -1,8 +1,8 @@
object Test {
- // After the first attempt to make seprately compiled value
- // classes respect the privacy of constructors, we got:
- //
- // exception when typing v.a().==(v.a())/class scala.reflect.internal.Trees$Apply
+ // After the first attempt to make seprately compiled value
+ // classes respect the privacy of constructors, we got:
+ //
+ // exception when typing v.a().==(v.a())/class scala.reflect.internal.Trees$Apply
// constructor V in class V cannot be accessed in object Test in file test/files/pos/t6601/UsePrivateValueClass_2.scala
// scala.reflect.internal.Types$TypeError: constructor V in class V cannot be accessed in object Test
def foo(v: V) = v.a == v.a
diff --git a/tests/untried/pos/t7532b/A_1.scala b/tests/untried/pos/t7532b/A_1.scala
index 586465ee6..6d70d15ff 100644
--- a/tests/untried/pos/t7532b/A_1.scala
+++ b/tests/untried/pos/t7532b/A_1.scala
@@ -1,7 +1,7 @@
package pack
class R {
- class attr // Will have the bytecode name `R$attr`, not to be confused with `R@tr`!
- class `@`
+ class attr // Will have the bytecode name `R$attr`, not to be confused with `R@tr`!
+ class `@`
}
class `@`
diff --git a/tests/untried/pos/t789.scala b/tests/untried/pos/t789.scala
index 7a17f10b0..c453e229a 100644
--- a/tests/untried/pos/t789.scala
+++ b/tests/untried/pos/t789.scala
@@ -9,16 +9,16 @@ object main { // don't do this at home
type Both = SizeImpl with ColorImpl
def info(x:Impl) = x match {
- case x:Both => "size "+x.size+" color "+x.color // you wish
- case x:SizeImpl => "size "+x.size
- case x:ColorImpl => "color "+x.color
+ case x:Both => "size " + x.size +" color "+ x.color // you wish
+ case x:SizeImpl => "size " + x.size
+ case x:ColorImpl => "color " + x.color
case _ => "n.a."
}
def info2(x:Impl) = x match {
- case x:SizeImpl with ColorImpl => "size "+x.size+" color "+x.color // you wish
- case x:SizeImpl => "size "+x.size
- case x:ColorImpl => "color "+x.color
+ case x:SizeImpl with ColorImpl => "size " + x.size +" color "+ x.color // you wish
+ case x:SizeImpl => "size " + x.size
+ case x:ColorImpl => "color " + x.color
case _ => "n.a."
}
diff --git a/tests/untried/pos/t8046.scala b/tests/untried/pos/t8046.scala
index 304d70b6b..9beb0b9d1 100644
--- a/tests/untried/pos/t8046.scala
+++ b/tests/untried/pos/t8046.scala
@@ -2,13 +2,13 @@ trait One {
type Op[A]
type Alias[A] = Op[A]
}
-
+
trait Two extends One {
trait Op[A] extends (A => A)
-
+
// This compiles
class View1 extends Op[Int] { def apply(xs: Int) = xs }
-
+
// ??? base class View2 not found in basetypes of class View2
// ./a.scala:9: error: class View2 needs to be abstract, since \
// method apply in trait Function1 of type (v1: T1)R is not defined
diff --git a/tests/untried/pos/t8046b.scala b/tests/untried/pos/t8046b.scala
index 45b99fd7e..99d06b7e0 100644
--- a/tests/untried/pos/t8046b.scala
+++ b/tests/untried/pos/t8046b.scala
@@ -2,14 +2,14 @@ trait One {
type Op[A]
type Alias = Op[Int]
}
-
+
trait Two extends One {
trait Op[A] extends M[A]
//(a: Alias) => a.value.toChar // okay
// (=> A).asSeenFrom(a.type, trait M): => Int
class View2 extends Alias { value.toChar } // toChar is not a member of type parameter A
// (=> A).asSeenFrom(View2.this.type, trait M): => A
-
+
// override type Alias = Op[Int] // works with this
}
diff --git a/tests/untried/pos/t8060.scala b/tests/untried/pos/t8060.scala
index 90e014d74..24881b60e 100644
--- a/tests/untried/pos/t8060.scala
+++ b/tests/untried/pos/t8060.scala
@@ -1,10 +1,10 @@
trait M[F[_]]
-
+
trait P[A] {
type CC[X] = P[X]
def f(p: A => Boolean): M[CC]
}
-
+
trait Other {
// was infinite loop trying to dealias `x$1.CC`
def g[A](p: A => Boolean): P[A] => M[P] = _ f p
diff --git a/tests/untried/pos/t8170.scala b/tests/untried/pos/t8170.scala
index 1991da72f..fe9f262ba 100644
--- a/tests/untried/pos/t8170.scala
+++ b/tests/untried/pos/t8170.scala
@@ -22,6 +22,6 @@ this = {AliasArgsTypeRef@3004}"Test#7680.a#14899.T#14823[O#7702.X#7793]"
info = namer: [F#14824 <: O#7703.X#7793]F#14824
result = {AbstractNoArgsTypeRef@3237}"F#24451"
tp = {PolyType@3235}"[F#14824 <: O#7703.X#7793]F#14824"
-tparams =
+tparams =
(0) = {AbstractTypeSymbol@3247}"type F#24451"
*/
diff --git a/tests/untried/pos/t8170b.scala b/tests/untried/pos/t8170b.scala
index 53036f6c8..e3d1d33d9 100644
--- a/tests/untried/pos/t8170b.scala
+++ b/tests/untried/pos/t8170b.scala
@@ -13,13 +13,13 @@ object ScalaZeee {
type Folded[N[X] >: M[X], U, F <: HFold[N, U]] <: U
}
}
-
+
object TypelevelUsage {
import ScalaZeee._
type T = GenericCons[Some, String, KNil.type]
val klist1: T = ???
type T2 = klist1.Folded[Option, Int, HFold[Option, Int]]
val count2: T2 = ???
-
+
count2.ensuring(x => true).toChar // trigger an implicit search
}
diff --git a/tests/untried/pos/t8315.scala b/tests/untried/pos/t8315.scala
index 2f7742ed6..f56cfda7a 100644
--- a/tests/untried/pos/t8315.scala
+++ b/tests/untried/pos/t8315.scala
@@ -2,11 +2,11 @@ object Test {
def crash(as: Listt): Unit = {
map(as, (_: Any) => return)
}
-
+
final def map(x: Listt, f: Any => Any): Any = {
if (x eq Nill) "" else f("")
}
}
-
+
object Nill extends Listt
class Listt
diff --git a/tests/untried/pos/t8363.scala b/tests/untried/pos/t8363.scala
index 639faf412..aecb8e4dc 100644
--- a/tests/untried/pos/t8363.scala
+++ b/tests/untried/pos/t8363.scala
@@ -1,5 +1,5 @@
class C(a: Any)
-class Test {
+class Test {
def foo: Any = {
def form = 0
class C1 extends C(() => form)
diff --git a/tests/untried/pos/t8376/Test.scala b/tests/untried/pos/t8376/Test.scala
index ba078a353..9440d76fa 100644
--- a/tests/untried/pos/t8376/Test.scala
+++ b/tests/untried/pos/t8376/Test.scala
@@ -5,6 +5,6 @@ class Test {
}
object BindingsY {
- def select1(root: String, steps: String*) = ()
+ def select1(root: String, steps: String*) = ()
def select1(root: Any, steps: String*) = ()
}
diff --git a/tests/untried/pos/tcpoly_infer_easy.scala b/tests/untried/pos/tcpoly_infer_easy.scala
index 0f1929502..bfa369f4a 100644
--- a/tests/untried/pos/tcpoly_infer_easy.scala
+++ b/tests/untried/pos/tcpoly_infer_easy.scala
@@ -1,5 +1,5 @@
object Test {
- def test[CC[+X] <: Iterable[X], A](xs: CC[A]): CC[A] = xs
- val xs = test(List(1,2))
- val xs2: List[Int] = test(List(1,2))
+ def test[CC[+X] <: Iterable[X], A](xs: CC[A]): CC[A] = xs
+ val xs = test(List(1,2))
+ val xs2: List[Int] = test(List(1,2))
}
diff --git a/tests/untried/pos/test4refine.scala b/tests/untried/pos/test4refine.scala
index 671096293..abf078193 100644
--- a/tests/untried/pos/test4refine.scala
+++ b/tests/untried/pos/test4refine.scala
@@ -18,7 +18,7 @@ trait S extends o.I {
abstract class O() {
type X;
abstract trait I {
- type Y;
+ type Y;
def foo(x: X, y: Y): E = e;
}
val i:I { type Y = E } = null;
diff --git a/tests/untried/pos/unapplyComplex.scala b/tests/untried/pos/unapplyComplex.scala
index 148fcc1bb..5261b70f4 100644
--- a/tests/untried/pos/unapplyComplex.scala
+++ b/tests/untried/pos/unapplyComplex.scala
@@ -12,28 +12,28 @@ class ComplexPolar(val _1: Double, val _2: Double) extends Complex {
object ComplexRect {
def unapply(z:Complex): Option[Complex] = {
- if(z.isInstanceOf[ComplexRect]) Some(z) else z match {
+ if (z.isInstanceOf[ComplexRect]) Some(z) else z match {
case ComplexPolar(mod, arg) =>
- Some(new ComplexRect(mod*math.cos(arg), mod*math.sin(arg)))
+ Some(new ComplexRect(mod*math.cos(arg), mod*math.sin(arg)))
} } }
object ComplexPolar {
def unapply(z:Complex): Option[Complex] = {
- if(z.isInstanceOf[ComplexPolar]) Some(z) else z match {
+ if (z.isInstanceOf[ComplexPolar]) Some(z) else z match {
case ComplexRect(re,im) =>
- Some(new ComplexPolar(math.sqrt(re*re + im*im), math.atan(re/im)))
+ Some(new ComplexPolar(math.sqrt(re*re + im*im), math.atan(re/im)))
} } }
object Test {
def main(args:Array[String]) = {
new ComplexRect(1,1) match {
case ComplexPolar(mod,arg) => // z @ ???
- Console.println("mod"+mod+"arg"+arg)
+ Console.println("mod"+mod+"arg"+arg)
}
val Komplex = ComplexRect
new ComplexPolar(math.sqrt(2),math.Pi / 4.0) match {
case Komplex(re,im) => // z @ ???
- Console.println("re"+re+" im"+im)
+ Console.println("re"+re+" im"+im)
}
}
}
diff --git a/tests/untried/pos/unapplyVal.scala b/tests/untried/pos/unapplyVal.scala
index 368b9b937..d6dea324f 100644
--- a/tests/untried/pos/unapplyVal.scala
+++ b/tests/untried/pos/unapplyVal.scala
@@ -12,7 +12,7 @@ class Buffer {
x match {
case Put =>
case Put(y) =>
- println("returning "+y)
+ println("returning " + y)
}
}
}