From 78d917393ea03ef94f892549f87cbc2cabba8ac6 Mon Sep 17 00:00:00 2001 From: Adriaan Moors Date: Thu, 23 Feb 2017 15:44:37 -0800 Subject: SI-10206 tighten fix for SI-6889 There are more supertypes of `AnyRef` than you might think: `?{def clone: ?}` is one example... --- .../scala/tools/nsc/typechecker/Implicits.scala | 41 ++++++++++++---------- 1 file changed, 22 insertions(+), 19 deletions(-) (limited to 'src/compiler/scala') diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala index 509ce59104..2333c29b30 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala @@ -134,11 +134,6 @@ trait Implicits { private val improvesCache = perRunCaches.newMap[(ImplicitInfo, ImplicitInfo), Boolean]() private val implicitSearchId = { var id = 1 ; () => try id finally id += 1 } - private def isInvalidConversionSource(tpe: Type): Boolean = tpe match { - case Function1(in, _) => in <:< NullClass.tpe - case _ => false - } - def resetImplicits() { implicitsCache.clear() infoMapCache.clear() @@ -1388,27 +1383,32 @@ trait Implicits { } } if (result.isSuccess && isView) { - def maybeInvalidConversionError(msg: String) { + def maybeInvalidConversionError(msg: String): Boolean = { // We have to check context.ambiguousErrors even though we are calling "issueAmbiguousError" // which ostensibly does exactly that before issuing the error. Why? I have no idea. Test is pos/t7690. // AM: I would guess it's because ambiguous errors will be buffered in silent mode if they are not reported if (context.ambiguousErrors) context.issueAmbiguousError(AmbiguousImplicitTypeError(tree, msg)) + true } pt match { - case Function1(_, out) => - // must inline to avoid capturing result - def prohibit(sym: Symbol) = (sym.tpe <:< out) && { - maybeInvalidConversionError(s"the result type of an implicit conversion must be more specific than ${sym.name}") - true - } - if (prohibit(AnyRefClass) || (settings.isScala211 && prohibit(AnyValClass))) - result = SearchFailure - case _ => false - } - if (settings.isScala211 && isInvalidConversionSource(pt)) { - maybeInvalidConversionError("an expression of type Null is ineligible for implicit conversion") - result = SearchFailure + // SI-10206 don't use subtyping to rule out AnyRef/AnyVal: + // - there are several valid structural types that are supertypes of AnyRef (e.g., created by HasMember); + // typeSymbol will do the trick (AnyRef is a type alias for Object), while ruling out these structural types + // - also don't want to accidentally constrain type vars through using <:< + case Function1(in, out) => + val outSym = out.typeSymbol + + val fail = + if (out.annotations.isEmpty && (outSym == ObjectClass || (isScala211 && outSym == AnyValClass))) + maybeInvalidConversionError(s"the result type of an implicit conversion must be more specific than $out") + else if (isScala211 && in.annotations.isEmpty && in.typeSymbol == NullClass) + maybeInvalidConversionError("an expression of type Null is ineligible for implicit conversion") + else false + + if (fail) result = SearchFailure + + case _ => } } @@ -1418,6 +1418,9 @@ trait Implicits { result } + // this setting is expensive to check, actually.... + private[this] val isScala211 = settings.isScala211 + def allImplicits: List[SearchResult] = { def search(iss: Infoss, isLocalToCallsite: Boolean) = applicableInfos(iss, isLocalToCallsite).values ( -- cgit v1.2.3 From 894b027ddf1551317ecdbda053aec90b6d6fa2f4 Mon Sep 17 00:00:00 2001 From: Adriaan Moors Date: Sun, 26 Feb 2017 16:01:37 -0800 Subject: Allow user-defined `[un]apply` in case companion Don't emit a synthetic `apply` (or `unapply`) when it would clash with an existing one. This allows e.g., a `private apply`, along with a `case class` with a `private` constructor. We have to retract the synthetic method in a pretty roundabout way, as we need the other methods and the owner to be completed already. Unless we have to complete the synthetic `apply` while completing the user-defined one, this should not be a problem. If this does happen, this implies there's a cycle in computing the user-defined signature and the synthetic one, which is not allowed. --- .../scala/tools/nsc/typechecker/Namers.scala | 81 ++++++++++++++++++---- test/files/neg/userdefined_apply.check | 13 ++++ test/files/neg/userdefined_apply.scala | 31 +++++++++ test/files/pos/userdefined_apply.scala | 36 ++++++++++ 4 files changed, 149 insertions(+), 12 deletions(-) create mode 100644 test/files/neg/userdefined_apply.check create mode 100644 test/files/neg/userdefined_apply.scala create mode 100644 test/files/pos/userdefined_apply.scala (limited to 'src/compiler/scala') diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala index ee64a6646f..19607b6681 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala @@ -610,7 +610,15 @@ trait Namers extends MethodSynthesis { noDuplicates(selectors map (_.rename), AppearsTwice) } - def enterCopyMethod(copyDef: DefDef): Symbol = { + class CompleterWrapper(completer: TypeCompleter) extends TypeCompleter { + val tree = completer.tree + + override def complete(sym: Symbol): Unit = { + completer.complete(sym) + } + } + + def copyMethodCompleter(copyDef: DefDef): TypeCompleter = { val sym = copyDef.symbol val lazyType = completerOf(copyDef) @@ -629,14 +637,57 @@ trait Namers extends MethodSynthesis { ) } - sym setInfo { - mkTypeCompleter(copyDef) { sym => - assignParamTypes() - lazyType complete sym - } + mkTypeCompleter(copyDef) { sym => + assignParamTypes() + lazyType complete sym } } + // for apply/unapply, which may need to disappear when they clash with a user-defined method of matching signature + def applyUnapplyMethodCompleter(un_applyDef: DefDef, companionContext: Context): TypeCompleter = + new CompleterWrapper(completerOf(un_applyDef)) { + override def complete(sym: Symbol): Unit = { + super.complete(sym) + + // If there's a same-named locked symbol, we're currently completing its signature. + // This means it (may) refer to us, and is thus either overloaded or recursive without a signature. + // rule out locked symbols from the owner.info.member call + val scopePartiallyCompleted = + companionContext.scope.lookupAll(sym.name).exists(existing => existing != sym && existing.hasFlag(LOCKED)) + + val suppress = + scopePartiallyCompleted || { + val userDefined = companionContext.owner.info.member(sym.name).filter(_ != sym) + (userDefined != NoSymbol) && { + userDefined.info match { + // TODO: do we have something for this already? the synthetic symbol can't be overloaded, right? + case OverloadedType(pre, alternatives) => + // pre probably relevant because of inherited overloads? + alternatives.exists(_.isErroneous) || alternatives.exists(alt => pre.memberInfo(alt) matches pre.memberInfo(sym)) + case tp => + (tp eq ErrorType) || tp.matches(sym.info) + } + } + } + + if (suppress) { + sym setInfo ErrorType + sym setFlag IS_ERROR + + // Don't unlink in an error situation to generate less confusing error messages. + // Ideally, our error reporting would distinguish overloaded from recursive user-defined apply methods without signature, + // but this would require some form of partial-completion of method signatures, so that we can + // know what the argument types were, even though we can't complete the result type, because + // we hit a cycle while trying to compute it (when we get here with locked user-defined symbols, we + // are in the complete for that symbol, and thus the locked symbol has not yet received enough info; + // I hesitate to provide more info, because it would involve a WildCard or something for its result type, + // which could upset other code paths) + if (!scopePartiallyCompleted) + companionContext.scope.unlink(sym) + } + } + } + def completerOf(tree: Tree): TypeCompleter = { val mono = namerOf(tree.symbol) monoTypeCompleter tree val tparams = treeInfo.typeParameters(tree) @@ -697,11 +748,17 @@ trait Namers extends MethodSynthesis { val bridgeFlag = if (mods hasAnnotationNamed tpnme.bridgeAnnot) BRIDGE | ARTIFACT else 0 val sym = assignAndEnterSymbol(tree) setFlag bridgeFlag - if (name == nme.copy && sym.isSynthetic) - enterCopyMethod(tree) - else - sym setInfo completerOf(tree) - } + // copy/apply/unapply synthetics are added using the addIfMissing mechanism, + // which ensures the owner has its preliminary info (we may add another decl here) + val completer = + if (sym hasFlag SYNTHETIC) { + if (name == nme.copy) copyMethodCompleter(tree) + else if (sym hasFlag CASE) applyUnapplyMethodCompleter(tree, context) + else completerOf(tree) + } else completerOf(tree) + + sym setInfo completer + } def enterClassDef(tree: ClassDef) { val ClassDef(mods, _, _, impl) = tree @@ -1351,7 +1408,7 @@ trait Namers extends MethodSynthesis { val defTpt = // don't mess with tpt's of case copy default getters, because assigning something other than TypeTree() - // will break the carefully orchestrated naming/typing logic that involves enterCopyMethod and caseClassCopyMeth + // will break the carefully orchestrated naming/typing logic that involves copyMethodCompleter and caseClassCopyMeth if (meth.isCaseCopy) TypeTree() else { // If the parameter type mentions any type parameter of the method, let the compiler infer the diff --git a/test/files/neg/userdefined_apply.check b/test/files/neg/userdefined_apply.check new file mode 100644 index 0000000000..ca0154885d --- /dev/null +++ b/test/files/neg/userdefined_apply.check @@ -0,0 +1,13 @@ +userdefined_apply.scala:3: error: overloaded method apply needs result type + private def apply(x: Int) = if (x > 0) new ClashOverloadNoSig(x) else apply("") + ^ +userdefined_apply.scala:12: error: overloaded method apply needs result type + private def apply(x: Int) = if (x > 0) ClashRecNoSig(1) else ??? + ^ +userdefined_apply.scala:19: error: overloaded method apply needs result type + private def apply(x: Boolean) = if (x) NoClashNoSig(1) else ??? + ^ +userdefined_apply.scala:26: error: overloaded method apply needs result type + private def apply(x: Boolean) = if (x) NoClashOverload(1) else apply("") + ^ +four errors found diff --git a/test/files/neg/userdefined_apply.scala b/test/files/neg/userdefined_apply.scala new file mode 100644 index 0000000000..1f2aff6e82 --- /dev/null +++ b/test/files/neg/userdefined_apply.scala @@ -0,0 +1,31 @@ +object ClashOverloadNoSig { + // error: overloaded method apply needs result type + private def apply(x: Int) = if (x > 0) new ClashOverloadNoSig(x) else apply("") + + def apply(x: String): ClashOverloadNoSig = ??? +} + +case class ClashOverloadNoSig private(x: Int) + +object ClashRecNoSig { + // error: recursive method apply needs result type + private def apply(x: Int) = if (x > 0) ClashRecNoSig(1) else ??? +} + +case class ClashRecNoSig private(x: Int) + +object NoClashNoSig { + // error: overloaded method apply needs result type + private def apply(x: Boolean) = if (x) NoClashNoSig(1) else ??? +} + +case class NoClashNoSig private(x: Int) + +object NoClashOverload { + // error: overloaded method apply needs result type + private def apply(x: Boolean) = if (x) NoClashOverload(1) else apply("") + + def apply(x: String): NoClashOverload = ??? +} + +case class NoClashOverload private(x: Int) diff --git a/test/files/pos/userdefined_apply.scala b/test/files/pos/userdefined_apply.scala new file mode 100644 index 0000000000..ca563f1dc5 --- /dev/null +++ b/test/files/pos/userdefined_apply.scala @@ -0,0 +1,36 @@ +// NOTE: the companion inherits a public apply method from Function1! +case class NeedsCompanion private (x: Int) + +object ClashNoSig { // ok + private def apply(x: Int) = if (x > 0) new ClashNoSig(x) else ??? +} +case class ClashNoSig private (x: Int) + + +object Clash { + private def apply(x: Int) = if (x > 0) new Clash(x) else ??? +} +case class Clash private (x: Int) + +object ClashSig { + private def apply(x: Int): ClashSig = if (x > 0) new ClashSig(x) else ??? +} +case class ClashSig private (x: Int) + +object ClashOverload { + private def apply(x: Int): ClashOverload = if (x > 0) new ClashOverload(x) else apply("") + def apply(x: String): ClashOverload = ??? +} +case class ClashOverload private (x: Int) + +object NoClashSig { + private def apply(x: Boolean): NoClashSig = if (x) NoClashSig(1) else ??? +} +case class NoClashSig private (x: Int) + +object NoClashOverload { + // needs full sig + private def apply(x: Boolean): NoClashOverload = if (x) NoClashOverload(1) else apply("") + def apply(x: String): NoClashOverload = ??? +} +case class NoClashOverload private (x: Int) -- cgit v1.2.3 From 615849058b5452b9d54ac152a1380ca7f81998c9 Mon Sep 17 00:00:00 2001 From: Adriaan Moors Date: Tue, 28 Feb 2017 14:14:11 -0800 Subject: Improvements based on reviews by Lukas & Jason --- spec/05-classes-and-objects.md | 5 +-- .../scala/tools/nsc/typechecker/Namers.scala | 48 ++++++++++++++-------- .../scala/reflect/internal/tpe/FindMembers.scala | 14 +++++++ test/files/neg/userdefined_apply.check | 20 +++++++-- test/files/neg/userdefined_apply.scala | 26 ++++++++++++ test/files/pos/userdefined_apply.scala | 18 ++++++++ 6 files changed, 107 insertions(+), 24 deletions(-) (limited to 'src/compiler/scala') diff --git a/spec/05-classes-and-objects.md b/spec/05-classes-and-objects.md index 82f399fe47..65666e31cb 100644 --- a/spec/05-classes-and-objects.md +++ b/spec/05-classes-and-objects.md @@ -873,10 +873,9 @@ If a type parameter section is missing in the class, it is also missing in the ` If the companion object $c$ is already defined, the `apply` and `unapply` methods are added to the existing object. +If the object $c$ already has a [matching](#definition-matching) +`apply` (or `unapply`) member, no new definition is added. The definition of `apply` is omitted if class $c$ is `abstract`. -If the object $c$ already defines a [matching](#definition-matching) member of the -same name as the synthetic member to be added, the synthetic member -is not added (overloading or mutual recursion is allowed, however). If the case class definition contains an empty value parameter list, the `unapply` method returns a `Boolean` instead of an `Option` type and diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala index 19607b6681..69b8cb12e6 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala @@ -647,25 +647,41 @@ trait Namers extends MethodSynthesis { def applyUnapplyMethodCompleter(un_applyDef: DefDef, companionContext: Context): TypeCompleter = new CompleterWrapper(completerOf(un_applyDef)) { override def complete(sym: Symbol): Unit = { + assert(sym hasAllFlags CASE | SYNTHETIC, sym.defString) + super.complete(sym) + // owner won't be locked + val ownerInfo = companionContext.owner.info + // If there's a same-named locked symbol, we're currently completing its signature. - // This means it (may) refer to us, and is thus either overloaded or recursive without a signature. - // rule out locked symbols from the owner.info.member call - val scopePartiallyCompleted = - companionContext.scope.lookupAll(sym.name).exists(existing => existing != sym && existing.hasFlag(LOCKED)) - - val suppress = - scopePartiallyCompleted || { - val userDefined = companionContext.owner.info.member(sym.name).filter(_ != sym) + // If `scopePartiallyCompleted`, the program is known to have a type error, since + // this means a user-defined method is missing a result type while its rhs refers to `sym` or an overload. + // This is an error because overloaded/recursive methods must have a result type. + // The method would be overloaded if its signature, once completed, would not match the synthetic method's, + // or recursive if it turned out we should unlink our synthetic method (matching sig). + // In any case, error out. We don't unlink the symbol so that `symWasOverloaded` says yes, + // which would be wrong if the method is in fact recursive, but it seems less confusing. + val scopePartiallyCompleted = new HasMember(ownerInfo, sym.name, BridgeFlags | SYNTHETIC, LOCKED).apply() + + // Check `scopePartiallyCompleted` first to rule out locked symbols from the owner.info.member call, + // as FindMember will call info on a locked symbol (while checking type matching to assemble an overloaded type), + // and throw a TypeError, so that we are aborted. + // Do not consider deferred symbols, as suppressing our concrete implementation would be an error regardless + // of whether the signature matches (if it matches, we omitted a valid implementation, if it doesn't, + // we would get an error for the missing implementation it isn't implemented by some overload other than our synthetic one) + val suppress = scopePartiallyCompleted || { + // can't exclude deferred members using DEFERRED flag here (TODO: why?) + val userDefined = ownerInfo.memberBasedOnName(sym.name, BridgeFlags | SYNTHETIC) + (userDefined != NoSymbol) && { - userDefined.info match { - // TODO: do we have something for this already? the synthetic symbol can't be overloaded, right? - case OverloadedType(pre, alternatives) => - // pre probably relevant because of inherited overloads? - alternatives.exists(_.isErroneous) || alternatives.exists(alt => pre.memberInfo(alt) matches pre.memberInfo(sym)) - case tp => - (tp eq ErrorType) || tp.matches(sym.info) + assert(userDefined != sym) + val alts = userDefined.alternatives // could be just the one, if this member isn't overloaded + // don't compute any further `memberInfo`s if there's an error somewhere + alts.exists(_.isErroneous) || { + val self = companionContext.owner.thisType + val memberInfo = self.memberInfo(sym) + alts.exists(alt => !alt.isDeferred && (self.memberInfo(alt) matches memberInfo)) } } } @@ -748,8 +764,6 @@ trait Namers extends MethodSynthesis { val bridgeFlag = if (mods hasAnnotationNamed tpnme.bridgeAnnot) BRIDGE | ARTIFACT else 0 val sym = assignAndEnterSymbol(tree) setFlag bridgeFlag - // copy/apply/unapply synthetics are added using the addIfMissing mechanism, - // which ensures the owner has its preliminary info (we may add another decl here) val completer = if (sym hasFlag SYNTHETIC) { if (name == nme.copy) copyMethodCompleter(tree) diff --git a/src/reflect/scala/reflect/internal/tpe/FindMembers.scala b/src/reflect/scala/reflect/internal/tpe/FindMembers.scala index 83a5d23e7c..1b00815bca 100644 --- a/src/reflect/scala/reflect/internal/tpe/FindMembers.scala +++ b/src/reflect/scala/reflect/internal/tpe/FindMembers.scala @@ -285,4 +285,18 @@ trait FindMembers { initBaseClasses.head.newOverloaded(tpe, members) } } + + private[scala] final class HasMember(tpe: Type, name: Name, excludedFlags: Long, requiredFlags: Long) extends FindMemberBase[Boolean](tpe, name, excludedFlags, requiredFlags) { + private[this] var _result = false + override protected def result: Boolean = _result + + protected def shortCircuit(sym: Symbol): Boolean = { + _result = true + true // prevents call to addMemberIfNew + } + + // Not used + protected def addMemberIfNew(sym: Symbol): Unit = {} + } + } diff --git a/test/files/neg/userdefined_apply.check b/test/files/neg/userdefined_apply.check index ca0154885d..c8c8976f5f 100644 --- a/test/files/neg/userdefined_apply.check +++ b/test/files/neg/userdefined_apply.check @@ -1,13 +1,25 @@ userdefined_apply.scala:3: error: overloaded method apply needs result type private def apply(x: Int) = if (x > 0) new ClashOverloadNoSig(x) else apply("") ^ -userdefined_apply.scala:12: error: overloaded method apply needs result type +userdefined_apply.scala:14: error: overloaded method apply needs result type private def apply(x: Int) = if (x > 0) ClashRecNoSig(1) else ??? ^ -userdefined_apply.scala:19: error: overloaded method apply needs result type +userdefined_apply.scala:21: error: overloaded method apply needs result type private def apply(x: Boolean) = if (x) NoClashNoSig(1) else ??? ^ -userdefined_apply.scala:26: error: overloaded method apply needs result type +userdefined_apply.scala:28: error: overloaded method apply needs result type private def apply(x: Boolean) = if (x) NoClashOverload(1) else apply("") ^ -four errors found +userdefined_apply.scala:45: error: recursive method apply needs result type +case class NoClashNoSigPoly private(x: Int) + ^ +userdefined_apply.scala:39: error: NoClashNoSigPoly.type does not take parameters + def apply(x: T) = if (???) NoClashNoSigPoly(1) else ??? + ^ +userdefined_apply.scala:57: error: recursive method apply needs result type +case class ClashNoSigPoly private(x: Int) + ^ +userdefined_apply.scala:51: error: ClashNoSigPoly.type does not take parameters + def apply(x: T) = if (???) ClashNoSigPoly(1) else ??? + ^ +8 errors found diff --git a/test/files/neg/userdefined_apply.scala b/test/files/neg/userdefined_apply.scala index 1f2aff6e82..0a0d960b39 100644 --- a/test/files/neg/userdefined_apply.scala +++ b/test/files/neg/userdefined_apply.scala @@ -8,6 +8,8 @@ object ClashOverloadNoSig { case class ClashOverloadNoSig private(x: Int) object ClashRecNoSig { + // TODO: status quo is that the error refers to an overloaded method, which is actually recursive + // (we should have unlinked the symbol in the `if(suppress)` part of `applyUnapplyMethodCompleter`) // error: recursive method apply needs result type private def apply(x: Int) = if (x > 0) ClashRecNoSig(1) else ??? } @@ -29,3 +31,27 @@ object NoClashOverload { } case class NoClashOverload private(x: Int) + + +class BaseNCNSP[T] { + // TODO: suppress the following error + // error: NoClashNoSigPoly.type does not take parameters + def apply(x: T) = if (???) NoClashNoSigPoly(1) else ??? +} + +object NoClashNoSigPoly extends BaseNCNSP[Boolean] +// TODO: position error at definition of apply in superclass instead of on case clss +// error: recursive method apply needs result type +case class NoClashNoSigPoly private(x: Int) + + +class BaseCNSP[T] { + // TODO: suppress the following error + // error: ClashNoSigPoly.type does not take parameters + def apply(x: T) = if (???) ClashNoSigPoly(1) else ??? +} + +object ClashNoSigPoly extends BaseCNSP[Int] +// TODO: position error at definition of apply in superclass instead of on case clss +// error: recursive method apply needs result type +case class ClashNoSigPoly private(x: Int) diff --git a/test/files/pos/userdefined_apply.scala b/test/files/pos/userdefined_apply.scala index ca563f1dc5..e29f9f5141 100644 --- a/test/files/pos/userdefined_apply.scala +++ b/test/files/pos/userdefined_apply.scala @@ -34,3 +34,21 @@ object NoClashOverload { def apply(x: String): NoClashOverload = ??? } case class NoClashOverload private (x: Int) + + + +class BaseNCP[T] { + // error: overloaded method apply needs result type + def apply(x: T): NoClashPoly = if (???) NoClashPoly(1) else ??? +} + +object NoClashPoly extends BaseNCP[Boolean] +case class NoClashPoly private(x: Int) + + +class BaseCP[T] { + // error: overloaded method apply needs result type + def apply(x: T): ClashPoly = if (???) ClashPoly(1) else ??? +} +object ClashPoly extends BaseCP[Int] +case class ClashPoly private(x: Int) -- cgit v1.2.3 From 42f813e18160b6ea8dfc9c2ae850a67dd0819773 Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Fri, 3 Mar 2017 17:48:24 +0100 Subject: [backport] new repo, version numbers for integration builds Integration builds now have version number like `2.12.2-bin-sha7` or `2.13.0-pre-sha7` and are published to scala-integration (no longer scala-release-temp). scala-release-temp is still used in the bootstrap script for publishing locker. --- README.md | 2 +- project/VersionUtil.scala | 35 ++++-- scripts/jobs/integrate/bootstrap | 136 ++++++++++++--------- .../scala/tools/nsc/settings/ScalaVersion.scala | 2 +- 4 files changed, 103 insertions(+), 72 deletions(-) (limited to 'src/compiler/scala') diff --git a/README.md b/README.md index bbfe7a797b..c2891390d4 100644 --- a/README.md +++ b/README.md @@ -230,7 +230,7 @@ tested version during CI validation. The Scala CI builds nightly download releases (including all modules) and publishes them to the following locations: - [2.12.x](http://www.scala-lang.org/files/archive/nightly/2.12.x/?C=M;O=D) - - [2.11.x](http://www.scala-lang.org/files/archive/nightly/2.11.x/?C=M;O=A) + - [2.11.x](http://www.scala-lang.org/files/archive/nightly/2.11.x/?C=M;O=D) The CI also publishes nightly API docs: - [2.12.x](http://www.scala-lang.org/files/archive/nightly/2.12.x/api/?C=M;O=D) diff --git a/project/VersionUtil.scala b/project/VersionUtil.scala index 2a52159d4e..3f9b727ef0 100644 --- a/project/VersionUtil.scala +++ b/project/VersionUtil.scala @@ -45,17 +45,19 @@ object VersionUtil { /** Compute the canonical, Maven and OSGi version number from `baseVersion` and `baseVersionSuffix`. * Examples of the generated versions: * - * ("2.11.8", "SNAPSHOT" ) -> ("2.11.8-20151215-133023-7559aed3c5", "2.11.8-SNAPSHOT", "2.11.8.v20151215-133023-7559aed3c5") - * ("2.11.8", "SHA-SNAPSHOT") -> ("2.11.8-20151215-133023-7559aed3c5", "2.11.8-7559aed3c5-SNAPSHOT", "2.11.8.v20151215-133023-7559aed3c5") - * ("2.11.8", "" ) -> ("2.11.8", "2.11.8", "2.11.8.v20151215-133023-VFINAL-7559aed3c5") - * ("2.11.8", "M3" ) -> ("2.11.8-M3", "2.11.8-M3", "2.11.8.v20151215-133023-M3-7559aed3c5") - * ("2.11.8", "RC4" ) -> ("2.11.8-RC4", "2.11.8-RC4", "2.11.8.v20151215-133023-RC4-7559aed3c5") - * ("2.11.8-RC4", "SPLIT" ) -> ("2.11.8-RC4", "2.11.8-RC4", "2.11.8.v20151215-133023-RC4-7559aed3c5") + * ("2.11.8", "SNAPSHOT" ) -> ("2.11.8-20151215-133023-7559aed", "2.11.8-bin-SNAPSHOT", "2.11.8.v20151215-133023-7559aed") + * ("2.11.8", "SHA-SNAPSHOT") -> ("2.11.8-20151215-133023-7559aed", "2.11.8-bin-7559aed-SNAPSHOT", "2.11.8.v20151215-133023-7559aed") + * ("2.11.8", "SHA" ) -> ("2.11.8-7559aed", "2.11.8-bin-7559aed", "2.11.8.v20151215-133023-7559aed") + * ("2.11.0", "SHA" ) -> ("2.11.0-7559aed", "2.11.0-pre-7559aed", "2.11.0.v20151215-133023-7559aed") + * ("2.11.8", "" ) -> ("2.11.8", "2.11.8", "2.11.8.v20151215-133023-VFINAL-7559aed") + * ("2.11.8", "M3" ) -> ("2.11.8-M3", "2.11.8-M3", "2.11.8.v20151215-133023-M3-7559aed") + * ("2.11.8", "RC4" ) -> ("2.11.8-RC4", "2.11.8-RC4", "2.11.8.v20151215-133023-RC4-7559aed") + * ("2.11.8-RC4", "SPLIT" ) -> ("2.11.8-RC4", "2.11.8-RC4", "2.11.8.v20151215-133023-RC4-7559aed") * * A `baseVersionSuffix` of "SNAPSHOT" is the default, which is used for local snapshot builds. The PR validation - * job uses "SHA-SNAPSHOT". An empty suffix is used for releases. All other suffix values are treated as RC / - * milestone builds. The special suffix value "SPLIT" is used to split the real suffix off from `baseVersion` - * instead and then apply the usual logic. */ + * job uses "SHA-SNAPSHOT". A proper version number for an integration build can be computed with "SHA". An empty + * suffix is used for releases. All other suffix values are treated as RC / milestone builds. The special suffix + * value "SPLIT" is used to split the real suffix off from `baseVersion` instead and then apply the usual logic. */ private lazy val versionPropertiesImpl: Def.Initialize[Versions] = Def.setting { val (base, suffix) = { @@ -78,11 +80,18 @@ object VersionUtil { val date = executeTool("get-scala-commit-date") val sha = executeTool("get-scala-commit-sha").substring(0, 7) // The script produces 10 digits at the moment + val Patch = """\d+\.\d+\.(\d+)""".r + def cross = base match { + case Patch(p) if p.toInt > 0 => "bin" + case _ => "pre" + } + val (canonicalV, mavenV, osgiV, release) = suffix match { - case "SNAPSHOT" => (s"$base-$date-$sha", s"$base-SNAPSHOT", s"$base.v$date-$sha", false) - case "SHA-SNAPSHOT" => (s"$base-$date-$sha", s"$base-$sha-SNAPSHOT", s"$base.v$date-$sha", false) - case "" => (s"$base", s"$base", s"$base.v$date-VFINAL-$sha", true) - case suffix => (s"$base-$suffix", s"$base-$suffix", s"$base.v$date-$suffix-$sha", true) + case "SNAPSHOT" => (s"$base-$date-$sha", s"$base-$cross-SNAPSHOT", s"$base.v$date-$sha", false) + case "SHA-SNAPSHOT" => (s"$base-$date-$sha", s"$base-$cross-$sha-SNAPSHOT", s"$base.v$date-$sha", false) + case "SHA" => (s"$base-$sha", s"$base-$cross-$sha", s"$base.v$date-$sha", false) + case "" => (s"$base", s"$base", s"$base.v$date-VFINAL-$sha", true) + case suffix => (s"$base-$suffix", s"$base-$suffix", s"$base.v$date-$suffix-$sha", true) } Versions(canonicalV, mavenV, osgiV, sha, date, release) diff --git a/scripts/jobs/integrate/bootstrap b/scripts/jobs/integrate/bootstrap index abb5b283c6..216a93637b 100644 --- a/scripts/jobs/integrate/bootstrap +++ b/scripts/jobs/integrate/bootstrap @@ -3,16 +3,17 @@ # Script Overview # - determine scala version # - determine module versions -# - build minimal core (aka locker) of Scala, use the determined version number, publish to private-repo -# - build those modules where a binary compatible version doesn't exist, publish to private-repo -# - build Scala using the previously built core and bootstrap modules, publish to private-repo (overwrites the minimal core version on private-repo) -# - for releases (not nightlies) +# - build minimal core (aka locker) of Scala, use the determined version number, publish to scala-integration +# - build those modules where a binary compatible version doesn't exist, publish to scala-integration +# - build Scala using the previously built core and bootstrap modules, publish to scala-integration, overwriting +# the existing artifacts (note: in 2.12.x we use scala-release-temp for locker, but that doesn't work in 2.11.x, +# because we can only pass in one `extra.repo` to ant, see comment in PR 5764). +# - for releases # - stage Scala on sonatype # - rebuild modules that needed a rebuild with this Scala build, and stage them on sonatype # - for nightlies # - force rebuild all modules and publish them locally (for testing purposes) # - the Scala version is serialized to jenkins.properties, which is passed downstream to scala-release jobs -# - this removes the need to tag scala/scala-dist (it's still encouraged for releases, but not a hard requirement) # Specifying the Scala version: @@ -23,8 +24,8 @@ # - Note: After building a release, the jenkins job provides an updated versions.properties file as artifact. # Put this file in the Scala repo and create a pull request, and also update the file build.number. # -# - Otherwise, a nightly release is built: -# - version number is read from the build.number file, extended with -$sha-nightly +# - Otherwise, an integration build is performed: +# - version number is read from the build.number file, extended with -[bin|pre]-$sha # Specifying module versions: there are two modes @@ -56,7 +57,7 @@ # to be re-built using the 2.11.1 release, we could not use 2.11.0. We could also not release the modules # after 2.11.1 was out, because that way the scala-library-all pom of 2.11.1 would depend on the old modules. # -# (*) https://github.com/sbt/sbt/blob/0.13.8/util/cross/src/main/input_sources/CrossVersionUtil.scala#L39 +# (*) https://github.com/sbt/sbt/blob/v0.13.13/util/cross/src/main/input_sources/CrossVersionUtil.scala#L41 # Binary incompatible changes in Modules: example with Scala 2.11 / 2.12 and scala-parser-combinators @@ -107,24 +108,30 @@ mkdir -p $baseDir/ivy2 rm -rf $baseDir/resolutionScratch_ mkdir -p $baseDir/resolutionScratch_ -# repo used to publish "locker" scala to (to start the bootstrap) -releaseTempRepoCred="private-repo" -releaseTempRepoUrl=${releaseTempRepoUrl-"https://scala-ci.typesafe.com/artifactory/scala-release-temp/"} -jcenterCacheUrl=${jcenterCacheUrl-"https://scala-ci.typesafe.com/artifactory/jcenter/"} - -# Used below in sbtArgs since we use a dedicated repository to share artifcacts between jobs, -# so we need to configure SBT to use these rather than its default, Maven Central. -# See http://www.scala-sbt.org/0.13/docs/Proxy-Repositories.html -sbtRepositoryConfig="$scriptsDir/repositories-scala-release" -cat > "$sbtRepositoryConfig" << EOF +function generateRepositoriesConfig(){ + # Used below in sbtArgs since we use a dedicated repository to share artifcacts between jobs, + # so we need to configure SBT to use these rather than its default, Maven Central. + # See http://www.scala-sbt.org/0.13/docs/Proxy-Repositories.html + sbtRepositoryConfig="$scriptsDir/repositories-scala-release" + jcenterCacheUrl=${jcenterCacheUrl-"https://scala-ci.typesafe.com/artifactory/jcenter/"} + cat > "$sbtRepositoryConfig" << EOF [repositories] - private-repo: $releaseTempRepoUrl + script-repo: $1 jcenter-cache: $jcenterCacheUrl typesafe-ivy-releases: https://dl.bintray.com/typesafe/ivy-releases/, [organisation]/[module]/[revision]/[type]s/[artifact](-[classifier]).[ext], bootOnly sbt-plugin-releases: https://dl.bintray.com/sbt/sbt-plugin-releases/, [organisation]/[module]/(scala_[scalaVersion]/)(sbt_[sbtVersion]/)[revision]/[type]s/[artifact](-[classifier]).[ext] maven-central local EOF +} + +integrationRepoCred="private-repo" + +# repo for locker, quick and the modules +integrationRepoUrl=${integrationRepoUrl-"https://scala-ci.typesafe.com/artifactory/scala-integration/"} + +# adding `integrationRepoUrl` to find the locker scala version when building modules +generateRepositoriesConfig $integrationRepoUrl ##### git gfxd() { @@ -198,15 +205,15 @@ sbtResolve() { # then set the version to the right one and publish (which won't re-gen the docs). # Also tried publish-local without docs using 'set publishArtifact in (Compile, packageDoc) := false' and republishing, no dice. -# Each buildModule() function is invoked twice: first to build against locker and publish to private-repo, then +# Each buildModule() function is invoked twice: first to build against locker and publish to artifactory, then # to build against the release and publish to sonatype (or publish-local if publishToSonatype is not "yes"). -# In the second round, sbtResolve is always true: the module will be found in the private-repo! +# In the second round, sbtResolve is always true: the module will be found in the artifactory! # Therefore, if MODULE_BUILT is "yes" (in the second round), we know that we need to build (and publish) the # module again. # -# Note: we tried an alternative solution in which sbtResolve would not look at private-repo, but that fails. For example, +# Note: we tried an alternative solution in which sbtResolve would not look at artifactory, but that fails. For example, # scala-xml depends on scala-library, so sbt tries to find the scala-library of the version that we are currently building, -# which exists only in private-repo. +# which exists only in artifactory. buildXML() { if [ "$XML_BUILT" != "yes" ] && [ "$forceRebuild" != "yes" ] && ( sbtResolve "org.scala-lang.modules" "scala-xml" $XML_VER ) @@ -281,7 +288,7 @@ buildActorsMigration(){ fi } -# should only be called with publishTasks publishing to private-repo +# should only be called with publishTasks publishing to artifactory buildScalacheck(){ if [ "$SCALACHECK_BUILT" != "yes" ] && [ "$forceRebuild" != "yes" ] && ( sbtResolve "org.scalacheck" "scalacheck" $SCALACHECK_VER ) then echo "Found scalacheck $SCALACHECK_VER; not building." @@ -292,9 +299,9 @@ buildScalacheck(){ fi } -# build modules, using ${buildTasks[@]} (except for Scalacheck, which is hard-coded to publish to private-repo) +# build modules, using ${buildTasks[@]} (except for Scalacheck, which is hard-coded to publish to artifactory) buildModules() { - publishTasks=('set credentials += Credentials(Path.userHome / ".credentials-private-repo")' "set every publishTo := Some(\"private-repo\" at \"$releaseTempRepoUrl\")") + publishTasks=('set credentials += Credentials(Path.userHome / ".credentials-private-repo")' "set every publishTo := Some(\"publish-repo\" at \"$integrationRepoUrl\")") buildTasks=($publishPrivateTask) buildXML buildParsers @@ -329,20 +336,19 @@ scalaVerToBinary() { local patch="$(echo $2 | sed -e "s#$RE#\3#")" # The binary version is majMin (e.g. "2.12") if - # - there's no suffix : 2.12.0, 2.12.1 - # - the suffix starts with "-bin": 2.12.0-bin-M1 - # - the patch version is > 0 : 2.12.1-M1, 1.12.3-RC2, 2.12.1-sha-nightly, 2.12.2-SNAPSHOT + # - there's no suffix : 2.12.0, 2.12.1 + # - the suffix starts with "-bin" : 2.12.1-bin-sha, 2.12.1-bin-sha-custom, 2.12.1-bin-SNAPSHOT + # - the suffix is \w+ and patch version is > 0: 2.12.1-M1, 2.12.1-RC2 (also 2.12.1-sha, 2.12.1-SNAPSHOT, which we don't use) # - # Otherwise, the binary version is the full version: 2.12.0-M1, 2.12.0-RC2, 2.12.0-sha-nightly, 2.12.0-SNAPSHOT + # Otherwise, the binary version is the full version: 2.12.0-M1, 2.12.0-RC2, 2.12.0-pre-sha, 2.12.0-pre-SNAPSHOT + # (also 2.12.0-sha, 2.12.0-SNAPSHOT, which we don't use) # - # Adapted from sbt: https://github.com/sbt/sbt/blob/0.13.8/util/cross/src/main/input_sources/CrossVersionUtil.scala#L39 + # Adapted from sbt: https://github.com/sbt/sbt/blob/v0.13.13/util/cross/src/main/input_sources/CrossVersionUtil.scala#L42 # - # Note: during the pre-release cycle of a major release (e.g. before 2.12.0), the SCALA_BINARY_VER of nightly / SNAPSHOT - # versions is the full version, e.g. 2.12.0-sha-nightly, so modules are always re-built. This is in line with what sbt - # does: for example, with scalaVersion := "2.12.0-SNAPSHOT", sbt will resolve scala-xml as scala-xml_2.12.0-SNAPSHOT. - # Once the 2.12.0 release is out, the binary version is 2.12 for all versions (e.g. for 2.12.1-sha-nightly). + # During the pre-release cycle of a major release (e.g. before 2.12.0), the SCALA_BINARY_VER of integration / SNAPSHOT + # versions is the full version, e.g. 2.12.0-pre-sha, so modules are always re-built. - if [[ "$3" == "" || "${3:0:4}" == "-bin" || "$patch" != "0" ]]; then + if [[ "$3" == "" || "${3:0:4}" == "-bin" || ("$patch" != "0" && "$3" =~ ^-[a-zA-Z0-9_]+$) ]]; then echo "$majMin" else echo "$1" @@ -363,10 +369,15 @@ determineScalaVersion() { if [ -z "$scalaTag" ] then - echo "No tag found, building nightly snapshot." + echo "No tag found, running an integration build." parseScalaProperties "build.number" SCALA_VER_BASE="$version_major.$version_minor.$version_patch" - SCALA_VER_SUFFIX="-$(git rev-parse --short HEAD)-nightly" + local shaSuffix=$(git rev-parse HEAD | cut -c1-7) + local cross="bin" + if [[ $SCALA_VER_BASE =~ ^.*\.0$ ]]; then + cross="pre" + fi + SCALA_VER_SUFFIX="-$cross-$shaSuffix" SCALADOC_SOURCE_LINKS_VER=$(git rev-parse HEAD) # TODO: publish nightly snapshot using this script - currently it's a separate jenkins job still running at EPFL. @@ -468,20 +479,31 @@ createNetrcFile() { grep 'password=' $1 | sed 's/password=\(.*\)/password \1/' >> $netrcFile } +# deletes existing artifacts (core and modules) matching the $SCALA_VER from the repository passed as argument removeExistingBuilds() { - createNetrcFile "$HOME/.credentials-private-repo" - local netrcFile="$HOME/.credentials-private-repo-netrc" - - local storageApiUrl=`echo $releaseTempRepoUrl | sed 's/\(scala-release-temp\)/api\/storage\/\1/'` - local scalaLangModules=`curl -s $storageApiUrl/org/scala-lang | jq -r '.children | .[] | "org/scala-lang" + .uri'` - - for module in "org/scalacheck" $scalaLangModules; do - local artifacts=`curl -s $storageApiUrl/$module | jq -r ".children | .[] | select(.uri | endswith(\"$SCALA_VER\")) | .uri"` - for artifact in $artifacts; do - echo "Deleting $releaseTempRepoUrl$module$artifact" - curl -s --netrc-file $netrcFile -X DELETE $releaseTempRepoUrl$module$artifact + local repoUrl=$1 + local repoPrefix="https://scala-ci.typesafe.com/artifactory/" + if [[ $repoUrl == "$repoPrefix"* ]]; then + local repoId=${1#$repoPrefix} + local storageApiUrl="${repoPrefix}api/storage/$repoId" + + createNetrcFile "$HOME/.credentials-private-repo" + local netrcFile="$HOME/.credentials-private-repo-netrc" + + # "module" is not a scala module (like scala-xml), but an artifact of a boostrap build. the variable + # contains: "org/scala-lang/modules", "org/scala-lang/scala-compiler", "org/scala-lang/scala-library", ... + local scalaLangModules=`curl -s $storageApiUrl/org/scala-lang | jq -r '.children | .[] | "org/scala-lang" + .uri' | grep -v actors-migration` + + for module in $scalaLangModules; do + local artifacts=`curl -s $storageApiUrl/$module | jq -r ".children | .[] | select(.uri | endswith(\"$SCALA_VER\")) | .uri"` + for artifact in $artifacts; do + echo "Deleting $repoUrl$module$artifact" + curl -s --netrc-file $netrcFile -X DELETE $repoUrl$module$artifact + done done - done + else + echo "Unknown repo, not deleting anything: $repoUrl" + fi } constructUpdatedModuleVersions() { @@ -507,7 +529,7 @@ constructUpdatedModuleVersions() { if [ ! -z "$SCALA_BINARY_VER" ]; then updatedModuleVersions=("${updatedModuleVersions[@]}" "-Dscala.binary.version=$SCALA_BINARY_VER"); fi } -# build locker (scala + modules) and quick, publishing everything to private-repo +# build locker (scala + modules) and quick, publishing everything to artifactory bootstrap() { echo "### Bootstrapping" @@ -524,8 +546,8 @@ bootstrap() { # in sabbus lingo, the resulting Scala build will be used as starr to build the released Scala compiler ant -Dmaven.version.number=$SCALA_VER\ -Dremote.snapshot.repository=NOPE\ - -Dremote.release.repository=$releaseTempRepoUrl\ - -Drepository.credentials.id=$releaseTempRepoCred\ + -Dremote.release.repository=$integrationRepoUrl\ + -Drepository.credentials.id=$integrationRepoCred\ -Dscalac.args.optimise=-optimise\ -Ddocs.skip=1\ -Dlocker.skip=1\ @@ -561,14 +583,14 @@ bootstrap() { # which is fully cross-versioned (for $SCALA_VER, the version we're releasing) ant -Dstarr.version=$SCALA_VER\ -Dscala.full.version=$SCALA_VER\ - -Dextra.repo.url=$releaseTempRepoUrl\ + -Dextra.repo.url=$integrationRepoUrl\ -Dmaven.version.suffix=$SCALA_VER_SUFFIX\ ${updatedModuleVersions[@]} \ -Dupdate.versions=1\ -Dscaladoc.git.commit=$SCALADOC_SOURCE_LINKS_VER\ -Dremote.snapshot.repository=NOPE\ - -Dremote.release.repository=$releaseTempRepoUrl\ - -Drepository.credentials.id=$releaseTempRepoCred\ + -Dremote.release.repository=$integrationRepoUrl\ + -Drepository.credentials.id=$integrationRepoCred\ -Dscalac.args.optimise=-optimise\ $antBuildTask $publishPrivateTask @@ -614,7 +636,7 @@ determineScalaVersion deriveModuleVersions -removeExistingBuilds +removeExistingBuilds $integrationRepoUrl bootstrap diff --git a/src/compiler/scala/tools/nsc/settings/ScalaVersion.scala b/src/compiler/scala/tools/nsc/settings/ScalaVersion.scala index 43bdad5882..d7901730a4 100644 --- a/src/compiler/scala/tools/nsc/settings/ScalaVersion.scala +++ b/src/compiler/scala/tools/nsc/settings/ScalaVersion.scala @@ -135,7 +135,7 @@ abstract class ScalaBuild extends Ordered[ScalaBuild] { def unparse: String } /** - * A development, test, nightly, snapshot or other "unofficial" build + * A development, test, integration, snapshot or other "unofficial" build */ case class Development(id: String) extends ScalaBuild { def unparse = s"-${id}" -- cgit v1.2.3