From 342e71e1cfc676091b9a1519fe7a9ab8d00eec9d Mon Sep 17 00:00:00 2001 From: Rex Kerr Date: Tue, 31 Mar 2015 10:38:13 -0700 Subject: SI-8254 List SerializationProxy fails to default(Read/Write)Object Added `defaultWriteObject` to the beginning of `writeObject` and `defaultReadObject` to the beginning of `readObject` as required by specs: [writing](http://docs.oracle.com/javase/6/docs/platform/serialization/spec/output.html#861), [reading](http://docs.oracle.com/javase/6/docs/platform/serialization/spec/input.html#2971). Verified that it is a no-op in terms of serialization stream (but it provides hooks that Infinispan and others may use). No explicit tests. If there is a change in serialization, t8549 will catch it. --- src/library/scala/collection/immutable/List.scala | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/library/scala/collection/immutable/List.scala b/src/library/scala/collection/immutable/List.scala index 254f14f13c..82e38d3549 100644 --- a/src/library/scala/collection/immutable/List.scala +++ b/src/library/scala/collection/immutable/List.scala @@ -462,6 +462,7 @@ object List extends SeqFactory[List] { private class SerializationProxy[A](@transient private var orig: List[A]) extends Serializable { private def writeObject(out: ObjectOutputStream) { + out.defaultWriteObject() var xs: List[A] = orig while (!xs.isEmpty) { out.writeObject(xs.head) @@ -473,6 +474,7 @@ object List extends SeqFactory[List] { // Java serialization calls this before readResolve during de-serialization. // Read the whole list and store it in `orig`. private def readObject(in: ObjectInputStream) { + in.defaultReadObject() val builder = List.newBuilder[A] while (true) in.readObject match { case ListSerializeEnd => -- cgit v1.2.3 From 99d3ab3be01ccd347d79162ed412aaf1ff0dff36 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Mon, 11 May 2015 14:55:32 +1000 Subject: Update to scala-java8-compat 0.4.0 --- build.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.xml b/build.xml index 07622e6061..0bbed26431 100755 --- a/build.xml +++ b/build.xml @@ -314,7 +314,7 @@ TODO: - + -- cgit v1.2.3 From 6ad9b44b27ede70ec723204bd80361d60f448c1a Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Fri, 24 Apr 2015 15:41:31 +1000 Subject: [indylambda] Relieve LambdaMetafactory of boxing duties `LambdaMetafactory` generates code to perform a limited number of type adaptations when delegating from its implementation of the functional interface method to the lambda target method. These adaptations are: numeric widening, casting, boxing and unboxing. However, the semantics of unboxing numerics in Java differs to Scala: they treat `UNBOX(null)` as cause to raise a `NullPointerException`, Scala (in `BoxesRuntime.unboxTo{Byte,Short,...}`) reinterprets the null as zero. Furthermore, Java has no idea how to adapt between a value class and its wrapped type, nor from a void return to `BoxedUnit`. This commit detects when the lambda target method would require such adaptation. If it does, an extra method, `$anonfun$1$adapted` is created to perform the adaptation, and this is used as the target of the lambda. This obviates the use of `JProcedureN` for `Unit` returning lambdas, we know use `JFunctionN` as the functional interface and bind this to an `$adapted` method that summons the instance of `BoxedUnit` after calling the `void` returning lambda target. The enclosed test cases fail without boxing changes. They don't execute with indylambda enabled under regular partest runs yet, you need to add scala-java8-compat to scala-library and pass the SCALAC_OPTS to partest manually to try this out, as described in https://github.com/scala/scala/pull/4463. Once we enable indylambda by default, however, this test will exercise the code in this patch all the time. It is also possible to run the tests with: ``` % curl https://oss.sonatype.org/content/repositories/releases/org/scala-lang/modules/scala-java8-compat_2.11/0.4.0/scala-java8-compat_2.11-0.4.0.jar > scala-java8-compat_2.11-0.4.0.jar % export INDYLAMBDA="-Ydelambdafy:method -Ybackend:GenBCode -target:jvm-1.8 -classpath .:scala-java8-compat_2.11-0.4.0.jar" qscalac $INDYLAMBDA test/files/run/indylambda-boxing/*.scala && qscala $INDYLAMBDA Test ``` --- .../scala/tools/nsc/transform/Delambdafy.scala | 143 +++++++++++++++++---- .../scala/reflect/internal/Definitions.scala | 3 +- .../reflect/internal/transform/PostErasure.scala | 3 +- test/files/run/function-null-unbox.scala | 8 ++ test/files/run/indylambda-boxing/VC.scala | 2 + test/files/run/indylambda-boxing/test.scala | 29 +++++ 6 files changed, 157 insertions(+), 31 deletions(-) create mode 100644 test/files/run/function-null-unbox.scala create mode 100644 test/files/run/indylambda-boxing/VC.scala create mode 100644 test/files/run/indylambda-boxing/test.scala diff --git a/src/compiler/scala/tools/nsc/transform/Delambdafy.scala b/src/compiler/scala/tools/nsc/transform/Delambdafy.scala index 92db57c533..17fad78972 100644 --- a/src/compiler/scala/tools/nsc/transform/Delambdafy.scala +++ b/src/compiler/scala/tools/nsc/transform/Delambdafy.scala @@ -88,6 +88,8 @@ abstract class Delambdafy extends Transform with TypingTransformers with ast.Tre case class DelambdafyAnonClass(lambdaClassDef: ClassDef, newExpr: Tree) extends TransformedFunction case class InvokeDynamicLambda(tree: Apply) extends TransformedFunction + private val boxingBridgeMethods = mutable.ArrayBuffer[Tree]() + // here's the main entry point of the transform override def transform(tree: Tree): Tree = tree match { // the main thing we care about is lambdas @@ -105,6 +107,12 @@ abstract class Delambdafy extends Transform with TypingTransformers with ast.Tre // ... or an invokedynamic call super.transform(apply) } + case Template(_, _, _) => + try { + // during this call boxingBridgeMethods will be populated from the Function case + val Template(parents, self, body) = super.transform(tree) + Template(parents, self, body ++ boxingBridgeMethods) + } finally boxingBridgeMethods.clear() case _ => super.transform(tree) } @@ -137,6 +145,61 @@ abstract class Delambdafy extends Transform with TypingTransformers with ast.Tre val isStatic = target.hasFlag(STATIC) + def createBoxingBridgeMethod(functionParamTypes: List[Type], functionResultType: Type): Tree = { + val methSym = oldClass.newMethod(target.name.append("$adapted").toTermName, target.pos, target.flags | FINAL | ARTIFACT) + var neededAdaptation = false + def boxedType(tpe: Type): Type = { + if (isPrimitiveValueClass(tpe.typeSymbol)) {neededAdaptation = true; ObjectTpe} + else if (enteringErasure(tpe.typeSymbol.isDerivedValueClass)) {neededAdaptation = true; ObjectTpe} + else tpe + } + val targetParams: List[Symbol] = target.paramss.head + val numCaptures = targetParams.length - functionParamTypes.length + val (targetCaptureParams, targetFunctionParams) = targetParams.splitAt(numCaptures) + val bridgeParams: List[Symbol] = + targetCaptureParams.map(param => methSym.newSyntheticValueParam(param.tpe, param.name.toTermName)) ::: + map2(targetFunctionParams, functionParamTypes)((param, tp) => methSym.newSyntheticValueParam(boxedType(tp), param.name.toTermName)) + + val bridgeResultType: Type = { + if (target.info.resultType == UnitTpe && functionResultType != UnitTpe) { + neededAdaptation = true + ObjectTpe + } else + boxedType(functionResultType) + } + val methodType = MethodType(bridgeParams, bridgeResultType) + methSym setInfo methodType + if (!neededAdaptation) + EmptyTree + else { + val bridgeParamTrees = bridgeParams.map(ValDef(_)) + + oldClass.info.decls enter methSym + + val body = localTyper.typedPos(originalFunction.pos) { + val newTarget = Select(gen.mkAttributedThis(oldClass), target) + val args: List[Tree] = mapWithIndex(bridgeParams) { (param, i) => + if (i < numCaptures) { + gen.mkAttributedRef(param) + } else { + val functionParam = functionParamTypes(i - numCaptures) + val targetParam = targetParams(i) + if (enteringErasure(functionParam.typeSymbol.isDerivedValueClass)) { + val casted = cast(gen.mkAttributedRef(param), functionParam) + val unboxed = unbox(casted, ErasedValueType(functionParam.typeSymbol, targetParam.tpe)).modifyType(postErasure.elimErasedValueType) + unboxed + } else adaptToType(gen.mkAttributedRef(param), targetParam.tpe) + } + } + gen.mkMethodCall(newTarget, args) + } + val body1 = if (enteringErasure(functionResultType.typeSymbol.isDerivedValueClass)) + adaptToType(box(body.setType(ErasedValueType(functionResultType.typeSymbol, body.tpe)), "boxing lambda target"), bridgeResultType) + else adaptToType(body, bridgeResultType) + val methDef0 = DefDef(methSym, List(bridgeParamTrees), body1) + postErasure.newTransformer(unit).transform(methDef0).asInstanceOf[DefDef] + } + } /** * Creates the apply method for the anonymous subclass of FunctionN */ @@ -292,22 +355,56 @@ abstract class Delambdafy extends Transform with TypingTransformers with ast.Tre thisArg ::: captureArgs } - val functionalInterface = java8CompatFunctionalInterface(target, originalFunction.tpe) + val arity = originalFunction.vparams.length + + // Reconstruct the type of the function entering erasure. + // We do this by taking the type after erasure, and re-boxing `ErasedValueType`. + // + // Unfortunately, the more obvious `enteringErasure(target.info)` doesn't work + // as we would like, value classes in parameter position show up as the unboxed types. + val (functionParamTypes, functionResultType) = exitingErasure { + def boxed(tp: Type) = tp match { + case ErasedValueType(valueClazz, _) => TypeRef(NoPrefix, valueClazz, Nil) + case _ => tp + } + // We don't need to deeply map `boxedValueClassType` over the infos as `ErasedValueType` + // will only appear directly as a parameter type in a method signature, as shown + // https://gist.github.com/retronym/ba81dbd462282c504ff8 + val info = target.info + val boxedParamTypes = info.paramTypes.takeRight(arity).map(boxed) + (boxedParamTypes, boxed(info.resultType)) + } + val functionType = definitions.functionType(functionParamTypes, functionResultType) + + val (functionalInterface, isSpecialized) = java8CompatFunctionalInterface(target, functionType) if (functionalInterface.exists) { // Create a symbol representing a fictional lambda factory method that accepts the captured // arguments and returns a Function. - val msym = currentOwner.newMethod(nme.ANON_FUN_NAME, originalFunction.pos, ARTIFACT) + val msym = currentOwner.newMethod(nme.ANON_FUN_NAME, originalFunction.pos, ARTIFACT) val argTypes: List[Type] = allCaptureArgs.map(_.tpe) val params = msym.newSyntheticValueParams(argTypes) - msym.setInfo(MethodType(params, originalFunction.tpe)) + msym.setInfo(MethodType(params, functionType)) val arity = originalFunction.vparams.length + val lambdaTarget = + if (isSpecialized) + target + else { + createBoxingBridgeMethod(functionParamTypes, functionResultType) match { + case EmptyTree => + target + case bridge => + boxingBridgeMethods += bridge + bridge.symbol + } + } + // We then apply this symbol to the captures. val apply = localTyper.typedPos(originalFunction.pos)(Apply(Ident(msym), allCaptureArgs)).asInstanceOf[Apply] // The backend needs to know the target of the lambda and the functional interface in order // to emit the invokedynamic instruction. We pass this information as tree attachment. - apply.updateAttachment(LambdaMetaFactoryCapable(target, arity, functionalInterface)) + apply.updateAttachment(LambdaMetaFactoryCapable(lambdaTarget, arity, functionalInterface)) InvokeDynamicLambda(apply) } else { val anonymousClassDef = makeAnonymousClass @@ -469,34 +566,24 @@ abstract class Delambdafy extends Transform with TypingTransformers with ast.Tre final case class LambdaMetaFactoryCapable(target: Symbol, arity: Int, functionalInterface: Symbol) // The functional interface that can be used to adapt the lambda target method `target` to the - // given function type. Returns `NoSymbol` if the compiler settings are unsuitable, or `LambdaMetaFactory` - // would be unable to generate the correct implementation (e.g. functions referring to derived value classes) - private def java8CompatFunctionalInterface(target: Symbol, functionType: Type): Symbol = { + // given function type. Returns `NoSymbol` if the compiler settings are unsuitable. + private def java8CompatFunctionalInterface(target: Symbol, functionType: Type): (Symbol, Boolean) = { val canUseLambdaMetafactory: Boolean = { - val hasValueClass = exitingErasure { - val methodType: Type = target.info - methodType.exists(_.isInstanceOf[ErasedValueType]) - } val isTarget18 = settings.target.value.contains("jvm-1.8") - settings.isBCodeActive && isTarget18 && !hasValueClass + settings.isBCodeActive && isTarget18 } - def functionalInterface: Symbol = { - val sym = functionType.typeSymbol - val pack = currentRun.runDefinitions.Scala_Java8_CompatPackage - val name1 = specializeTypes.specializedFunctionName(sym, functionType.typeArgs) - val paramTps :+ restpe = functionType.typeArgs - val arity = paramTps.length - if (name1.toTypeName == sym.name) { - val returnUnit = restpe.typeSymbol == UnitClass - val functionInterfaceArray = - if (returnUnit) currentRun.runDefinitions.Scala_Java8_CompatPackage_JProcedure - else currentRun.runDefinitions.Scala_Java8_CompatPackage_JFunction - functionInterfaceArray.apply(arity) - } else { - pack.info.decl(name1.toTypeName.prepend("J")) - } + val sym = functionType.typeSymbol + val pack = currentRun.runDefinitions.Scala_Java8_CompatPackage + val name1 = specializeTypes.specializedFunctionName(sym, functionType.typeArgs) + val paramTps :+ restpe = functionType.typeArgs + val arity = paramTps.length + val isSpecialized = name1.toTypeName != sym.name + val functionalInterface = if (!isSpecialized) { + currentRun.runDefinitions.Scala_Java8_CompatPackage_JFunction(arity) + } else { + pack.info.decl(name1.toTypeName.prepend("J")) } - if (canUseLambdaMetafactory) functionalInterface else NoSymbol + (if (canUseLambdaMetafactory) functionalInterface else NoSymbol, isSpecialized) } } diff --git a/src/reflect/scala/reflect/internal/Definitions.scala b/src/reflect/scala/reflect/internal/Definitions.scala index 5b20d9db8e..73ffb267a9 100644 --- a/src/reflect/scala/reflect/internal/Definitions.scala +++ b/src/reflect/scala/reflect/internal/Definitions.scala @@ -1515,8 +1515,7 @@ trait Definitions extends api.StandardDefinitions { private lazy val PolySigMethods: Set[Symbol] = Set[Symbol](MethodHandle.info.decl(sn.Invoke), MethodHandle.info.decl(sn.InvokeExact)).filter(_.exists) lazy val Scala_Java8_CompatPackage = rootMirror.getPackageIfDefined("scala.compat.java8") - lazy val Scala_Java8_CompatPackage_JFunction = (0 to MaxTupleArity).toArray map (i => getMemberIfDefined(Scala_Java8_CompatPackage.moduleClass, TypeName("JFunction" + i))) - lazy val Scala_Java8_CompatPackage_JProcedure = (0 to MaxTupleArity).toArray map (i => getMemberIfDefined(Scala_Java8_CompatPackage.moduleClass, TypeName("JProcedure" + i))) + lazy val Scala_Java8_CompatPackage_JFunction = (0 to MaxFunctionArity).toArray map (i => getMemberIfDefined(Scala_Java8_CompatPackage.moduleClass, TypeName("JFunction" + i))) } } } diff --git a/src/reflect/scala/reflect/internal/transform/PostErasure.scala b/src/reflect/scala/reflect/internal/transform/PostErasure.scala index dd4f044818..466c6133b2 100644 --- a/src/reflect/scala/reflect/internal/transform/PostErasure.scala +++ b/src/reflect/scala/reflect/internal/transform/PostErasure.scala @@ -9,7 +9,8 @@ trait PostErasure { object elimErasedValueType extends TypeMap { def apply(tp: Type) = tp match { case ConstantType(Constant(tp: Type)) => ConstantType(Constant(apply(tp))) - case ErasedValueType(_, underlying) => underlying + case ErasedValueType(_, underlying) => + underlying case _ => mapOver(tp) } } diff --git a/test/files/run/function-null-unbox.scala b/test/files/run/function-null-unbox.scala new file mode 100644 index 0000000000..6c0369fffd --- /dev/null +++ b/test/files/run/function-null-unbox.scala @@ -0,0 +1,8 @@ +object Test { + def main(args: Array[String]): Unit = { + val i2s = (x: Int) => "" + assert(i2s.asInstanceOf[AnyRef => String].apply(null) == "") + val i2i = (x: Int) => x + 1 + assert(i2i.asInstanceOf[AnyRef => Int].apply(null) == 1) + } +} diff --git a/test/files/run/indylambda-boxing/VC.scala b/test/files/run/indylambda-boxing/VC.scala new file mode 100644 index 0000000000..ef867a3658 --- /dev/null +++ b/test/files/run/indylambda-boxing/VC.scala @@ -0,0 +1,2 @@ + +class VC(private val i: Int) extends AnyVal diff --git a/test/files/run/indylambda-boxing/test.scala b/test/files/run/indylambda-boxing/test.scala new file mode 100644 index 0000000000..cc0a460640 --- /dev/null +++ b/test/files/run/indylambda-boxing/test.scala @@ -0,0 +1,29 @@ +class Capture +class Test { + def test1 = (i: Int) => "" + def test2 = (i: VC) => i + def test3 = (i: Int) => i + + def test4 = {val c = new Capture; (i: Int) => {(c, Test.this.toString); 42} } + def test5 = {val c = new Capture; (i: VC) => (c, Test.this.toString) } + def test6 = {val c = new Capture; (i: Int) => (c, Test.this.toString) } + + def test7 = {val vc = new Capture; (i: Int) => vc } + def test8 = {val c = 42; (s: String) => (s, c)} + def test9 = {val c = 42; (s: String) => ()} +} + +object Test { + def main(args: Array[String]): Unit = { + val t = new Test + assert(t.test1.apply(42) == "") + assert(t.test2.apply(new VC(42)) == new VC(42)) + assert(t.test3.apply(-1) == -1) + t.test4.apply(0) + t.test5.apply(new VC(42)) + t.test6.apply(42) + t.test7.apply(0) + t.test8.apply("") + t.test9.apply("") + } +} -- cgit v1.2.3 From d504017f15692ae72db05118ed2be38daa3b7752 Mon Sep 17 00:00:00 2001 From: Christoph Neijenhuis Date: Fri, 15 May 2015 10:17:28 +0200 Subject: Fixed deprecation warning in scaladoc example of Try --- src/library/scala/util/Try.scala | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/src/library/scala/util/Try.scala b/src/library/scala/util/Try.scala index f65c77f5a0..0a6a7972c2 100644 --- a/src/library/scala/util/Try.scala +++ b/src/library/scala/util/Try.scala @@ -24,11 +24,12 @@ import scala.language.implicitConversions * * Example: * {{{ + * import scala.io.StdIn * import scala.util.{Try, Success, Failure} * * def divide: Try[Int] = { - * val dividend = Try(Console.readLine("Enter an Int that you'd like to divide:\n").toInt) - * val divisor = Try(Console.readLine("Enter an Int that you'd like to divide by:\n").toInt) + * val dividend = Try(StdIn.readLine("Enter an Int that you'd like to divide:\n").toInt) + * val divisor = Try(StdIn.readLine("Enter an Int that you'd like to divide by:\n").toInt) * val problem = dividend.flatMap(x => divisor.map(y => x/y)) * problem match { * case Success(v) => -- cgit v1.2.3 From afa2ff9f76123ab982dc5bb2f1110bb58e75c68c Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Tue, 12 May 2015 15:28:35 +1000 Subject: [indylambda] Support lambda {de}serialization To support serialization, we use the alternative lambda metafactory that lets us specify that our anonymous functions should extend the marker interface `scala.Serializable`. They will also have a `writeObject` method added that implements the serialization proxy pattern using `j.l.invoke.SerializedLamba`. To support deserialization, we synthesize a `$deserializeLamba$` method in each class with lambdas. This will be called reflectively by `SerializedLambda#readResolve`. This method in turn delegates to `LambdaDeserializer`, currently defined [1] in `scala-java8-compat`, that uses `LambdaMetafactory` to spin up the anonymous class and instantiate it with the deserialized environment. Note: `LambdaDeserializer` can reuses the anonymous class on subsequent deserializations of a given lambda, in the same spirit as an invokedynamic call site only spins up the class on the first time it is run. But first we'll need to host a cache in a static field of each lambda hosting class. This is noted as a TODO and a failing test, and will be updated in the next commit. `LambdaDeserializer` will be moved into our standard library in the 2.12.x branch, where we can introduce dependencies on the Java 8 standard library. The enclosed test cases must be manually run with indylambda enabled. Once we enable indylambda by default on 2.12.x, the test will actually test the new feature. ``` % echo $INDYLAMBDA -Ydelambdafy:method -Ybackend:GenBCode -target:jvm-1.8 -classpath .:scala-java8-compat_2.11-0.5.0-SNAPSHOT.jar % qscala $INDYLAMBDA -e "println((() => 42).getClass)" class Main$$anon$1$$Lambda$1/1183231938 % qscala $INDYLAMBDA -e "assert(classOf[scala.Serializable].isInstance(() => 42))" % qscalac $INDYLAMBDA test/files/run/lambda-serialization.scala && qscala $INDYLAMBDA Test ``` This commit contains a few minor refactorings to the code that generates the invokedynamic instruction to use more meaningful names and to reuse Java signature generation code in ASM rather than the DIY approach. [1] https://github.com/scala/scala-java8-compat/pull/37 --- .../tools/nsc/backend/jvm/BCodeBodyBuilder.scala | 43 ++++++++++++---------- .../scala/tools/nsc/backend/jvm/BCodeHelpers.scala | 27 ++++++++++++++ .../tools/nsc/backend/jvm/BCodeSkelBuilder.scala | 12 ++++++ .../scala/tools/nsc/transform/Delambdafy.scala | 3 ++ .../scala/reflect/internal/Definitions.scala | 1 + src/reflect/scala/reflect/internal/StdNames.scala | 2 + .../reflect/internal/transform/PostErasure.scala | 3 +- .../scala/reflect/runtime/JavaUniverseForce.scala | 1 + test/files/run/lambda-serialization.scala | 35 ++++++++++++++++++ 9 files changed, 105 insertions(+), 22 deletions(-) create mode 100644 test/files/run/lambda-serialization.scala diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala index 8ebe27e61b..40ba0c010b 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala @@ -33,7 +33,6 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder { * Functionality to build the body of ASM MethodNode, except for `synchronized` and `try` expressions. */ abstract class PlainBodyBuilder(cunit: CompilationUnit) extends PlainSkelBuilder(cunit) { - import icodes.TestOp import icodes.opcodes.InvokeStyle @@ -1287,38 +1286,42 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder { def genInvokeDynamicLambda(lambdaTarget: Symbol, arity: Int, functionalInterface: Symbol) { val isStaticMethod = lambdaTarget.hasFlag(Flags.STATIC) + def asmType(sym: Symbol) = classBTypeFromSymbol(sym).toASMType - val targetHandle = + val implMethodHandle = new asm.Handle(if (lambdaTarget.hasFlag(Flags.STATIC)) asm.Opcodes.H_INVOKESTATIC else asm.Opcodes.H_INVOKEVIRTUAL, classBTypeFromSymbol(lambdaTarget.owner).internalName, lambdaTarget.name.toString, asmMethodType(lambdaTarget).descriptor) - val receiver = if (isStaticMethod) None else Some(lambdaTarget.owner) + val receiver = if (isStaticMethod) Nil else lambdaTarget.owner :: Nil val (capturedParams, lambdaParams) = lambdaTarget.paramss.head.splitAt(lambdaTarget.paramss.head.length - arity) // Requires https://github.com/scala/scala-java8-compat on the runtime classpath - val returnUnit = lambdaTarget.info.resultType.typeSymbol == UnitClass - val functionalInterfaceDesc: String = classBTypeFromSymbol(functionalInterface).descriptor - val desc = (receiver.toList ::: capturedParams).map(sym => toTypeKind(sym.info)).mkString(("("), "", ")") + functionalInterfaceDesc + val invokedType = asm.Type.getMethodDescriptor(asmType(functionalInterface), (receiver ::: capturedParams).map(sym => toTypeKind(sym.info).toASMType): _*) - // TODO specialization val constrainedType = new MethodBType(lambdaParams.map(p => toTypeKind(p.tpe)), toTypeKind(lambdaTarget.tpe.resultType)).toASMType - val abstractMethod = functionalInterface.info.decls.find(_.isDeferred).getOrElse(functionalInterface.info.member(nme.apply)) - val methodName = abstractMethod.name.toString - val applyN = { - val mt = asmMethodType(abstractMethod) - mt.toASMType - } - - bc.jmethod.visitInvokeDynamicInsn(methodName, desc, lambdaMetaFactoryBootstrapHandle, - // boostrap args - applyN, targetHandle, constrainedType + val sam = functionalInterface.info.decls.find(_.isDeferred).getOrElse(functionalInterface.info.member(nme.apply)) + val samName = sam.name.toString + val samMethodType = asmMethodType(sam).toASMType + + val flags = 3 // TODO 2.12.x Replace with LambdaMetafactory.FLAG_SERIALIZABLE | LambdaMetafactory.FLAG_MARKERS + + val ScalaSerializable = classBTypeFromSymbol(definitions.SerializableClass).toASMType + bc.jmethod.visitInvokeDynamicInsn(samName, invokedType, lambdaMetaFactoryBootstrapHandle, + /* samMethodType = */ samMethodType, + /* implMethod = */ implMethodHandle, + /* instantiatedMethodType = */ constrainedType, + /* flags = */ flags.asInstanceOf[AnyRef], + /* markerInterfaceCount = */ 1.asInstanceOf[AnyRef], + /* markerInterfaces[0] = */ ScalaSerializable, + /* bridgeCount = */ 0.asInstanceOf[AnyRef] ) + indyLambdaHosts += this.claszSymbol } } - val lambdaMetaFactoryBootstrapHandle = + lazy val lambdaMetaFactoryBootstrapHandle = new asm.Handle(asm.Opcodes.H_INVOKESTATIC, - "java/lang/invoke/LambdaMetafactory", "metafactory", - "(Ljava/lang/invoke/MethodHandles$Lookup;Ljava/lang/String;Ljava/lang/invoke/MethodType;Ljava/lang/invoke/MethodType;Ljava/lang/invoke/MethodHandle;Ljava/lang/invoke/MethodType;)Ljava/lang/invoke/CallSite;") + definitions.LambdaMetaFactory.fullName('/'), sn.AltMetafactory.toString, + "(Ljava/lang/invoke/MethodHandles$Lookup;Ljava/lang/String;Ljava/lang/invoke/MethodType;[Ljava/lang/Object;)Ljava/lang/invoke/CallSite;") } diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala index 18468f5ae3..783c89584e 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala @@ -682,6 +682,33 @@ abstract class BCodeHelpers extends BCodeIdiomatic with BytecodeWriters { new java.lang.Long(id) ).visitEnd() } + + /** + * Add: + * + * private static Object $deserializeLambda$(SerializedLambda l) { + * return scala.compat.java8.runtime.LambdaDeserializer.deserializeLambda(MethodHandles.lookup(), null, l); + * } + * @param jclass + */ + // TODO add a static cache field to the class, and pass that as the second argument to `deserializeLambda`. + // This will make the test at run/lambda-serialization.scala:15 work + def addLambdaDeserialize(jclass: asm.ClassVisitor): Unit = { + val cw = jclass + import scala.tools.asm.Opcodes._ + cw.visitInnerClass("java/lang/invoke/MethodHandles$Lookup", "java/lang/invoke/MethodHandles", "Lookup", ACC_PUBLIC + ACC_FINAL + ACC_STATIC) + + { + val mv = cw.visitMethod(ACC_PRIVATE + ACC_STATIC + ACC_SYNTHETIC, "$deserializeLambda$", "(Ljava/lang/invoke/SerializedLambda;)Ljava/lang/Object;", null, null) + mv.visitCode() + mv.visitMethodInsn(INVOKESTATIC, "java/lang/invoke/MethodHandles", "lookup", "()Ljava/lang/invoke/MethodHandles$Lookup;", false) + mv.visitInsn(asm.Opcodes.ACONST_NULL) + mv.visitVarInsn(ALOAD, 0) + mv.visitMethodInsn(INVOKESTATIC, "scala/compat/java8/runtime/LambdaDeserializer", "deserializeLambda", "(Ljava/lang/invoke/MethodHandles$Lookup;Ljava/util/Map;Ljava/lang/invoke/SerializedLambda;)Ljava/lang/Object;", false) + mv.visitInsn(ARETURN) + mv.visitEnd() + } + } } // end of trait BCClassGen /* functionality for building plain and mirror classes */ diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeSkelBuilder.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeSkelBuilder.scala index 2a06c62e37..b2011f8e0c 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BCodeSkelBuilder.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeSkelBuilder.scala @@ -68,6 +68,8 @@ abstract class BCodeSkelBuilder extends BCodeHelpers { var isCZStaticModule = false var isCZRemote = false + protected val indyLambdaHosts = collection.mutable.Set[Symbol]() + /* ---------------- idiomatic way to ask questions to typer ---------------- */ def paramTKs(app: Apply): List[BType] = { @@ -121,6 +123,16 @@ abstract class BCodeSkelBuilder extends BCodeHelpers { innerClassBufferASM ++= classBType.info.get.nestedClasses gen(cd.impl) + + + val shouldAddLambdaDeserialize = ( + settings.target.value == "jvm-1.8" + && settings.Ydelambdafy.value == "method" + && indyLambdaHosts.contains(claszSymbol)) + + if (shouldAddLambdaDeserialize) + addLambdaDeserialize(cnode) + addInnerClassesASM(cnode, innerClassBufferASM.toList) cnode.visitAttribute(classBType.inlineInfoAttribute.get) diff --git a/src/compiler/scala/tools/nsc/transform/Delambdafy.scala b/src/compiler/scala/tools/nsc/transform/Delambdafy.scala index 17fad78972..55ab73028e 100644 --- a/src/compiler/scala/tools/nsc/transform/Delambdafy.scala +++ b/src/compiler/scala/tools/nsc/transform/Delambdafy.scala @@ -146,6 +146,9 @@ abstract class Delambdafy extends Transform with TypingTransformers with ast.Tre val isStatic = target.hasFlag(STATIC) def createBoxingBridgeMethod(functionParamTypes: List[Type], functionResultType: Type): Tree = { + // Note: we bail out of this method and return EmptyTree if we find there is no adaptation required. + // If we need to improve performance, we could check the types first before creating the + // method and parameter symbols. val methSym = oldClass.newMethod(target.name.append("$adapted").toTermName, target.pos, target.flags | FINAL | ARTIFACT) var neededAdaptation = false def boxedType(tpe: Type): Type = { diff --git a/src/reflect/scala/reflect/internal/Definitions.scala b/src/reflect/scala/reflect/internal/Definitions.scala index 73ffb267a9..806fc37617 100644 --- a/src/reflect/scala/reflect/internal/Definitions.scala +++ b/src/reflect/scala/reflect/internal/Definitions.scala @@ -514,6 +514,7 @@ trait Definitions extends api.StandardDefinitions { lazy val ScalaSignatureAnnotation = requiredClass[scala.reflect.ScalaSignature] lazy val ScalaLongSignatureAnnotation = requiredClass[scala.reflect.ScalaLongSignature] + lazy val LambdaMetaFactory = getClassIfDefined("java.lang.invoke.LambdaMetafactory") lazy val MethodHandle = getClassIfDefined("java.lang.invoke.MethodHandle") // Option classes diff --git a/src/reflect/scala/reflect/internal/StdNames.scala b/src/reflect/scala/reflect/internal/StdNames.scala index c0562b0679..63e2ca0dbe 100644 --- a/src/reflect/scala/reflect/internal/StdNames.scala +++ b/src/reflect/scala/reflect/internal/StdNames.scala @@ -1167,6 +1167,8 @@ trait StdNames { final val Invoke: TermName = newTermName("invoke") final val InvokeExact: TermName = newTermName("invokeExact") + final val AltMetafactory: TermName = newTermName("altMetafactory") + val Boxed = immutable.Map[TypeName, TypeName]( tpnme.Boolean -> BoxedBoolean, tpnme.Byte -> BoxedByte, diff --git a/src/reflect/scala/reflect/internal/transform/PostErasure.scala b/src/reflect/scala/reflect/internal/transform/PostErasure.scala index 466c6133b2..dd4f044818 100644 --- a/src/reflect/scala/reflect/internal/transform/PostErasure.scala +++ b/src/reflect/scala/reflect/internal/transform/PostErasure.scala @@ -9,8 +9,7 @@ trait PostErasure { object elimErasedValueType extends TypeMap { def apply(tp: Type) = tp match { case ConstantType(Constant(tp: Type)) => ConstantType(Constant(apply(tp))) - case ErasedValueType(_, underlying) => - underlying + case ErasedValueType(_, underlying) => underlying case _ => mapOver(tp) } } diff --git a/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala b/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala index 1c0aa7cf6d..8c03ee7ca3 100644 --- a/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala +++ b/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala @@ -310,6 +310,7 @@ trait JavaUniverseForce { self: runtime.JavaUniverse => definitions.QuasiquoteClass_api_unapply definitions.ScalaSignatureAnnotation definitions.ScalaLongSignatureAnnotation + definitions.LambdaMetaFactory definitions.MethodHandle definitions.OptionClass definitions.OptionModule diff --git a/test/files/run/lambda-serialization.scala b/test/files/run/lambda-serialization.scala new file mode 100644 index 0000000000..46b26d7c5e --- /dev/null +++ b/test/files/run/lambda-serialization.scala @@ -0,0 +1,35 @@ +import java.io.{ByteArrayInputStream, ObjectInputStream, ObjectOutputStream, ByteArrayOutputStream} + +object Test { + def main(args: Array[String]): Unit = { + roundTrip + } + + def roundTrip(): Unit = { + val c = new Capture("Capture") + val lambda = (p: Param) => ("a", p, c) + val reconstituted1 = serializeDeserialize(lambda).asInstanceOf[Object => Any] + val p = new Param + assert(reconstituted1.apply(p) == ("a", p, c)) + val reconstituted2 = serializeDeserialize(lambda).asInstanceOf[Object => Any] + assert(reconstituted1.getClass == reconstituted2.getClass) + + val reconstituted3 = serializeDeserialize(reconstituted1) + assert(reconstituted3.apply(p) == ("a", p, c)) + + val specializedLambda = (p: Int) => List(p, c).length + assert(serializeDeserialize(specializedLambda).apply(42) == 2) + assert(serializeDeserialize(serializeDeserialize(specializedLambda)).apply(42) == 2) + } + + def serializeDeserialize[T <: AnyRef](obj: T) = { + val buffer = new ByteArrayOutputStream + val out = new ObjectOutputStream(buffer) + out.writeObject(obj) + val in = new ObjectInputStream(new ByteArrayInputStream(buffer.toByteArray)) + in.readObject.asInstanceOf[T] + } +} + +case class Capture(s: String) extends Serializable +class Param -- cgit v1.2.3 From 1d8c63277e97c57e12fa9864a2d238d4f54c10f0 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Sat, 16 May 2015 21:02:56 +1000 Subject: [indylambda] Enable caching for lambda deserialization We add a static field to each class that defines lambdas that will hold a `ju.Map[String, MethodHandle]` to cache references to the constructors of the classes originally created by `LambdaMetafactory`. The cache is initially null, and created on the first deserialization. In case of a race between two threads deserializing the first lambda hosted by a class, the last one to finish will clobber the one-element cache of the first. This lack of strong guarantees mirrors the current policy in `LambdaDeserializer`. We should consider whether to strengthen the combinaed guarantee here. A useful benchmark would be those of the invokedynamic instruction, which allows multiple threads to call the boostrap method in parallel, but guarantees that if that happens, the results of all but one will be discarded: > If several threads simultaneously execute the bootstrap method for > the same dynamic call site, the Java Virtual Machine must choose > one returned call site object and install it visibly to all threads. We could meet this guarantee easily, albeit excessively, by synchronizing `$deserializeLambda$`. But a more fine grained approach is possible and desirable. A test is included that shows we are able to garbage collect classloaders of classes that have hosted lambda deserialization. --- .../scala/tools/nsc/backend/jvm/BCodeHelpers.scala | 40 ++++++++++++++++++---- .../tools/nsc/backend/jvm/BCodeSkelBuilder.scala | 2 +- .../scala/tools/nsc/backend/jvm/CoreBTypes.scala | 4 +++ .../scala/reflect/internal/Definitions.scala | 2 ++ .../scala/reflect/runtime/JavaUniverseForce.scala | 2 ++ test/files/run/lambda-serialization-gc.scala | 40 ++++++++++++++++++++++ 6 files changed, 82 insertions(+), 8 deletions(-) create mode 100644 test/files/run/lambda-serialization-gc.scala diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala index 783c89584e..6aa3a62295 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala @@ -685,24 +685,50 @@ abstract class BCodeHelpers extends BCodeIdiomatic with BytecodeWriters { /** * Add: - * + * private static java.util.Map $deserializeLambdaCache$ = null * private static Object $deserializeLambda$(SerializedLambda l) { - * return scala.compat.java8.runtime.LambdaDeserializer.deserializeLambda(MethodHandles.lookup(), null, l); + * var cache = $deserializeLambdaCache$ + * if (cache eq null) { + * cache = new java.util.HashMap() + * $deserializeLambdaCache$ = cache + * } + * return scala.compat.java8.runtime.LambdaDeserializer.deserializeLambda(MethodHandles.lookup(), cache, l); * } - * @param jclass */ - // TODO add a static cache field to the class, and pass that as the second argument to `deserializeLambda`. - // This will make the test at run/lambda-serialization.scala:15 work - def addLambdaDeserialize(jclass: asm.ClassVisitor): Unit = { + def addLambdaDeserialize(clazz: Symbol, jclass: asm.ClassVisitor): Unit = { val cw = jclass import scala.tools.asm.Opcodes._ + + // Need to force creation of BTypes for these as `getCommonSuperClass` is called on + // automatically computing the max stack size (`visitMaxs`) during method writing. + javaUtilHashMapReference + javaUtilMapReference + cw.visitInnerClass("java/lang/invoke/MethodHandles$Lookup", "java/lang/invoke/MethodHandles", "Lookup", ACC_PUBLIC + ACC_FINAL + ACC_STATIC) + { + val fv = cw.visitField(ACC_PRIVATE + ACC_STATIC + ACC_SYNTHETIC, "$deserializeLambdaCache$", "Ljava/util/Map;", null, null) + fv.visitEnd() + } + { val mv = cw.visitMethod(ACC_PRIVATE + ACC_STATIC + ACC_SYNTHETIC, "$deserializeLambda$", "(Ljava/lang/invoke/SerializedLambda;)Ljava/lang/Object;", null, null) mv.visitCode() + mv.visitFieldInsn(GETSTATIC, clazz.javaBinaryName.encoded, "$deserializeLambdaCache$", "Ljava/util/Map;") + mv.visitVarInsn(ASTORE, 1) + mv.visitVarInsn(ALOAD, 1) + val l0 = new asm.Label() + mv.visitJumpInsn(IFNONNULL, l0) + mv.visitTypeInsn(NEW, "java/util/HashMap") + mv.visitInsn(DUP) + mv.visitMethodInsn(INVOKESPECIAL, "java/util/HashMap", "", "()V", false) + mv.visitVarInsn(ASTORE, 1) + mv.visitVarInsn(ALOAD, 1) + mv.visitFieldInsn(PUTSTATIC, clazz.javaBinaryName.encoded, "$deserializeLambdaCache$", "Ljava/util/Map;") + mv.visitLabel(l0) + mv.visitFrame(asm.Opcodes.F_APPEND,1, Array("java/util/Map"), 0, null) mv.visitMethodInsn(INVOKESTATIC, "java/lang/invoke/MethodHandles", "lookup", "()Ljava/lang/invoke/MethodHandles$Lookup;", false) - mv.visitInsn(asm.Opcodes.ACONST_NULL) + mv.visitVarInsn(ALOAD, 1) mv.visitVarInsn(ALOAD, 0) mv.visitMethodInsn(INVOKESTATIC, "scala/compat/java8/runtime/LambdaDeserializer", "deserializeLambda", "(Ljava/lang/invoke/MethodHandles$Lookup;Ljava/util/Map;Ljava/lang/invoke/SerializedLambda;)Ljava/lang/Object;", false) mv.visitInsn(ARETURN) diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeSkelBuilder.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeSkelBuilder.scala index b2011f8e0c..a2fd22d24c 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BCodeSkelBuilder.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeSkelBuilder.scala @@ -131,7 +131,7 @@ abstract class BCodeSkelBuilder extends BCodeHelpers { && indyLambdaHosts.contains(claszSymbol)) if (shouldAddLambdaDeserialize) - addLambdaDeserialize(cnode) + addLambdaDeserialize(claszSymbol, cnode) addInnerClassesASM(cnode, innerClassBufferASM.toList) diff --git a/src/compiler/scala/tools/nsc/backend/jvm/CoreBTypes.scala b/src/compiler/scala/tools/nsc/backend/jvm/CoreBTypes.scala index 492fe3ae79..00ca096e59 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/CoreBTypes.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/CoreBTypes.scala @@ -114,6 +114,8 @@ class CoreBTypes[BTFS <: BTypesFromSymbols[_ <: Global]](val bTypes: BTFS) { lazy val jioSerializableReference : ClassBType = classBTypeFromSymbol(JavaSerializableClass) // java/io/Serializable lazy val scalaSerializableReference : ClassBType = classBTypeFromSymbol(SerializableClass) // scala/Serializable lazy val classCastExceptionReference : ClassBType = classBTypeFromSymbol(ClassCastExceptionClass) // java/lang/ClassCastException + lazy val javaUtilMapReference : ClassBType = classBTypeFromSymbol(JavaUtilMap) // java/util/Map + lazy val javaUtilHashMapReference : ClassBType = classBTypeFromSymbol(JavaUtilHashMap) // java/util/HashMap lazy val srBooleanRef : ClassBType = classBTypeFromSymbol(requiredClass[scala.runtime.BooleanRef]) lazy val srByteRef : ClassBType = classBTypeFromSymbol(requiredClass[scala.runtime.ByteRef]) @@ -258,6 +260,8 @@ final class CoreBTypesProxy[BTFS <: BTypesFromSymbols[_ <: Global]](val bTypes: def jioSerializableReference : ClassBType = _coreBTypes.jioSerializableReference def scalaSerializableReference : ClassBType = _coreBTypes.scalaSerializableReference def classCastExceptionReference : ClassBType = _coreBTypes.classCastExceptionReference + def javaUtilMapReference : ClassBType = _coreBTypes.javaUtilMapReference + def javaUtilHashMapReference : ClassBType = _coreBTypes.javaUtilHashMapReference def srBooleanRef : ClassBType = _coreBTypes.srBooleanRef def srByteRef : ClassBType = _coreBTypes.srByteRef diff --git a/src/reflect/scala/reflect/internal/Definitions.scala b/src/reflect/scala/reflect/internal/Definitions.scala index 806fc37617..f3dd6a3280 100644 --- a/src/reflect/scala/reflect/internal/Definitions.scala +++ b/src/reflect/scala/reflect/internal/Definitions.scala @@ -369,6 +369,8 @@ trait Definitions extends api.StandardDefinitions { lazy val JavaEnumClass = requiredClass[java.lang.Enum[_]] lazy val RemoteInterfaceClass = requiredClass[java.rmi.Remote] lazy val RemoteExceptionClass = requiredClass[java.rmi.RemoteException] + lazy val JavaUtilMap = requiredClass[java.util.Map[_, _]] + lazy val JavaUtilHashMap = requiredClass[java.util.HashMap[_, _]] lazy val ByNameParamClass = specialPolyClass(tpnme.BYNAME_PARAM_CLASS_NAME, COVARIANT)(_ => AnyTpe) lazy val JavaRepeatedParamClass = specialPolyClass(tpnme.JAVA_REPEATED_PARAM_CLASS_NAME, COVARIANT)(tparam => arrayType(tparam.tpe)) diff --git a/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala b/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala index 8c03ee7ca3..ea213cadd9 100644 --- a/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala +++ b/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala @@ -255,6 +255,8 @@ trait JavaUniverseForce { self: runtime.JavaUniverse => definitions.JavaEnumClass definitions.RemoteInterfaceClass definitions.RemoteExceptionClass + definitions.JavaUtilMap + definitions.JavaUtilHashMap definitions.ByNameParamClass definitions.JavaRepeatedParamClass definitions.RepeatedParamClass diff --git a/test/files/run/lambda-serialization-gc.scala b/test/files/run/lambda-serialization-gc.scala new file mode 100644 index 0000000000..8fa0b4b402 --- /dev/null +++ b/test/files/run/lambda-serialization-gc.scala @@ -0,0 +1,40 @@ +import java.io._ + +import java.net.URLClassLoader + +class C { + def serializeDeserialize[T <: AnyRef](obj: T) = { + val buffer = new ByteArrayOutputStream + val out = new ObjectOutputStream(buffer) + out.writeObject(obj) + val in = new ObjectInputStream(new ByteArrayInputStream(buffer.toByteArray)) + in.readObject.asInstanceOf[T] + } + + serializeDeserialize((c: String) => c.length) +} + +object Test { + def main(args: Array[String]): Unit = { + test() + } + + def test(): Unit = { + val loader = getClass.getClassLoader.asInstanceOf[URLClassLoader] + val loaderCClass = classOf[C] + def deserializedInThrowawayClassloader = { + val throwawayLoader: java.net.URLClassLoader = new java.net.URLClassLoader(loader.getURLs, ClassLoader.getSystemClassLoader) { + val maxMemory = Runtime.getRuntime.maxMemory() + val junk = new Array[Byte]((maxMemory / 2).toInt) + } + val clazz = throwawayLoader.loadClass("C") + assert(clazz != loaderCClass) + clazz.newInstance() + } + (1 to 4) foreach { i => + // This would OOM by the third iteration if we leaked `throwawayLoader` during + // deserialization. + deserializedInThrowawayClassloader + } + } +} -- cgit v1.2.3 From aca8f96d1cf5f645c24eac11f695dadae08ea381 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Fri, 1 May 2015 12:34:51 +1000 Subject: SI-9286 Check subclass privates for "same type after erasure" The overriding pairs cursor used to detect erased signature clashes was turning a blind eye to any pair that contained a private method. However, this could lead to a `VerifyError` or `IllegalAccessError`. Checking against javac's behaviour in both directions: ``` % cat sandbox/Test.java public abstract class Test { class C { int foo() { return 0; } } class D extends C { private int foo() { return 1; } } } % javac sandbox/Test.java sandbox/Test.java:3: error: name clash: foo() in Test.D and foo() in Test.C have the same erasure, yet neither overrides the other class D extends C { private int foo() { return 1; } } ^ where A is a type-variable: A extends Object declared in method foo() 1 error ``` ``` % cat sandbox/Test.java public abstract class Test { class C { private int foo() { return 0; } } class D extends C { int foo() { return 1; } } } % javac sandbox/Test.java % ``` This commit only the exludes private symbols from the superclass from the checks by moving the test from `excludes` to `matches`. --- src/compiler/scala/tools/nsc/transform/Erasure.scala | 3 +-- test/files/neg/t9286a.check | 7 +++++++ test/files/neg/t9286a.scala | 13 +++++++++++++ test/files/neg/t9286b.check | 7 +++++++ test/files/neg/t9286b.scala | 5 +++++ test/files/neg/t9286c.check | 7 +++++++ test/files/neg/t9286c.scala | 14 ++++++++++++++ 7 files changed, 54 insertions(+), 2 deletions(-) create mode 100644 test/files/neg/t9286a.check create mode 100644 test/files/neg/t9286a.scala create mode 100644 test/files/neg/t9286b.check create mode 100644 test/files/neg/t9286b.scala create mode 100644 test/files/neg/t9286c.check create mode 100644 test/files/neg/t9286c.scala diff --git a/src/compiler/scala/tools/nsc/transform/Erasure.scala b/src/compiler/scala/tools/nsc/transform/Erasure.scala index 9fdc3a9d72..a04625c9c5 100644 --- a/src/compiler/scala/tools/nsc/transform/Erasure.scala +++ b/src/compiler/scala/tools/nsc/transform/Erasure.scala @@ -814,11 +814,10 @@ abstract class Erasure extends AddInterfaces // specialized members have no type history before 'specialize', causing double def errors for curried defs override def exclude(sym: Symbol): Boolean = ( sym.isType - || sym.isPrivate || super.exclude(sym) || !sym.hasTypeAt(currentRun.refchecksPhase.id) ) - override def matches(lo: Symbol, high: Symbol) = true + override def matches(lo: Symbol, high: Symbol) = !high.isPrivate } def isErasureDoubleDef(pair: SymbolPair) = { import pair._ diff --git a/test/files/neg/t9286a.check b/test/files/neg/t9286a.check new file mode 100644 index 0000000000..2bc7c0cf15 --- /dev/null +++ b/test/files/neg/t9286a.check @@ -0,0 +1,7 @@ +t9286a.scala:6: error: name clash between defined and inherited member: +def foo(o: (String,)): Unit in class T and +private def foo(o: (Any,)): Unit at line 6 +have same type after erasure: (o: Tuple1)Unit + private def foo(o: Tuple1[Any]) = () + ^ +one error found diff --git a/test/files/neg/t9286a.scala b/test/files/neg/t9286a.scala new file mode 100644 index 0000000000..0375ac591f --- /dev/null +++ b/test/files/neg/t9286a.scala @@ -0,0 +1,13 @@ +class T { + def foo(o: Tuple1[String]) = () +} + +class U extends T { + private def foo(o: Tuple1[Any]) = () +} + +object Test { + def main(args: Array[String]): Unit = { + new U().foo(null) // IllegalAccessError: tried to access method U.foo(Lscala/Tuple1;)V from class Test$ + } +} diff --git a/test/files/neg/t9286b.check b/test/files/neg/t9286b.check new file mode 100644 index 0000000000..89a191bfee --- /dev/null +++ b/test/files/neg/t9286b.check @@ -0,0 +1,7 @@ +t9286b.scala:2: error: name clash between defined and inherited member: +def foo: Int in class C and +private def foo[A]: Int at line 2 +have same type after erasure: ()Int +class D extends C { private def foo[A] = 0 } + ^ +one error found diff --git a/test/files/neg/t9286b.scala b/test/files/neg/t9286b.scala new file mode 100644 index 0000000000..5c23075426 --- /dev/null +++ b/test/files/neg/t9286b.scala @@ -0,0 +1,5 @@ +class C { def foo = 0 } +class D extends C { private def foo[A] = 0 } + +class E { private def foo = 0 } +class F extends E { def foo[A] = 0 } // okay diff --git a/test/files/neg/t9286c.check b/test/files/neg/t9286c.check new file mode 100644 index 0000000000..785cb3f937 --- /dev/null +++ b/test/files/neg/t9286c.check @@ -0,0 +1,7 @@ +t9286c.scala:8: error: name clash between defined and inherited member: +def foo(m: M[_ >: String]): Int in trait T and +private def foo(m: M[_ >: Any]): Int at line 8 +have same type after erasure: (m: M)Int + def foo(m: M[_ >: Any]) = 0 // Expected: "same type after erasure" + ^ +one error found diff --git a/test/files/neg/t9286c.scala b/test/files/neg/t9286c.scala new file mode 100644 index 0000000000..3df08dcfe6 --- /dev/null +++ b/test/files/neg/t9286c.scala @@ -0,0 +1,14 @@ +class M[_] +trait T { + def foo(m: M[_ >: String]) = 42 +} + +object Test { + def t: T = new T { + def foo(m: M[_ >: Any]) = 0 // Expected: "same type after erasure" + } + def main(args: Array[String]): Unit = { + val m: M[String] = null + t.foo(m) // VeriyError: Duplicate method name&signature + } +} -- cgit v1.2.3 From e07a77f79a064d983703d90c965ccfbbc14daa3d Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Tue, 19 May 2015 10:34:13 +1000 Subject: Update README.md --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index ba269dd71f..5bcbbb6229 100644 --- a/README.md +++ b/README.md @@ -66,7 +66,7 @@ You'll need a Java SDK (6 or newer), Apache Ant (version 1.8.0 or above), and cu ## Git Hygiene -As git history is forever, we take great pride in the quality of the commits we merge into the repository. The title of your commit will be read hundreds (of thousands? :-)) of times, so it pays off to spend just a little bit more time to polish it, making it descriptive and concise. Please take a minute to read the advice [most projects agree on](https://github.com/erlang/otp/wiki/Writing-good-commit-messages), and stick to 50-60 characters for the first line, wrapping subsequent ones at 80 (at most). +As git history is forever, we take great pride in the quality of the commits we merge into the repository. The title of your commit will be read hundreds (of thousands? :-)) of times, so it pays off to spend just a little bit more time to polish it, making it descriptive and concise. Please take a minute to read the advice [most projects agree on](https://github.com/erlang/otp/wiki/Writing-good-commit-messages), and stick to 72 or fewer characters for the first line, wrapping subsequent ones at 80 (at most). When not sure how to formulate your commit message, imagine you're writing a bullet item for the next release notes, or describing what the commit does to the code base (use active verbs in the present tense). When your commit title is featured in the next release notes, it will be read by a lot of curious Scala users, looking for the latest improvements. Satisfy their thirst for information with as few words as possible! Also, a commit should convey clearly to your (future) fellow contributors what it does to the code base. -- cgit v1.2.3 From 6322d1b9e7ffddf16baf8166eba911d7db4927e0 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Wed, 20 May 2015 16:57:40 +1000 Subject: Avoid inefficient specialied lambdas w. delambdafy jvm-1.8, GenASM A previous change disabled -Ydelambdafy:method for specialized lambdas, as `DelambdafyTransformer` made no attempt to emit the requisite machinery to avoid boxing. This was loosened to allow them under `-target:jvm-1.8`, in the knowledge that `indylambda` would do the right thing. However, this wasn't quite right: indylambda is only supported in `GenBCode`, so we should consider that setting as well. --- src/compiler/scala/tools/nsc/transform/UnCurry.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/compiler/scala/tools/nsc/transform/UnCurry.scala b/src/compiler/scala/tools/nsc/transform/UnCurry.scala index 1020b98bb9..b310e6c3a1 100644 --- a/src/compiler/scala/tools/nsc/transform/UnCurry.scala +++ b/src/compiler/scala/tools/nsc/transform/UnCurry.scala @@ -237,7 +237,7 @@ abstract class UnCurry extends InfoTransform def canUseDelamdafyMethod = ( (inConstructorFlag == 0) // Avoiding synthesizing code prone to SI-6666, SI-8363 by using old-style lambda translation - && (!isSpecialized || (settings.target.value == "jvm-1.8")) // DelambdafyTransformer currently only emits generic FunctionN-s, use the old style in the meantime + && (!isSpecialized || (settings.isBCodeActive && settings.target.value == "jvm-1.8")) // DelambdafyTransformer currently only emits generic FunctionN-s, use the old style in the meantime ) if (inlineFunctionExpansion || !canUseDelamdafyMethod) { val parents = addSerializable(abstractFunctionForFunctionType(fun.tpe)) -- cgit v1.2.3 From 232c3167395b6d80ea7ec64cc1d68cb423ec21a3 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Wed, 20 May 2015 17:05:38 +1000 Subject: Document -target:jvm-1.8 in the man page We neglected to do this earlier. --- src/manual/scala/man1/scalac.scala | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/src/manual/scala/man1/scalac.scala b/src/manual/scala/man1/scalac.scala index 3954ed588e..c658fe89f8 100644 --- a/src/manual/scala/man1/scalac.scala +++ b/src/manual/scala/man1/scalac.scala @@ -152,11 +152,12 @@ object scalac extends Command { CmdOption("sourcepath", Argument("path")), "Specify location(s) of source files."), Definition( - CmdOptionBound("target:", "{jvm-1.5,jvm-1.6,jvm-1.7}"), + CmdOptionBound("target:", "{jvm-1.5,jvm-1.6,jvm-1.7,jvm-1.8}"), SeqPara( Mono("\"jvm-1.5\"") & " target JVM 1.5 (deprecated),", Mono("\"jvm-1.6\"") & " target JVM 1.6 (default),", - Mono("\"jvm-1.7\"") & " target JVM 1.7,")), + Mono("\"jvm-1.7\"") & " target JVM 1.7,", + Mono("\"jvm-1.8\"") & " target JVM 1.8,")), Definition( CmdOption("toolcp", Argument("path")), "Add to the runner classpath."), -- cgit v1.2.3 From 99fcdf758e5b52f77a138bc777692dd2461e0a9c Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Fri, 22 May 2015 09:27:07 +1000 Subject: SI-9321 Clarify spec for inheritance of qualified private MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit I checked the intent with Martin, who said: > [...] qualified private members are inherited like other members, > it’s just that their access is restricted. I've locked this in with a test as well. --- spec/05-classes-and-objects.md | 7 +++---- test/files/pos/t9321.scala | 10 ++++++++++ 2 files changed, 13 insertions(+), 4 deletions(-) create mode 100644 test/files/pos/t9321.scala diff --git a/spec/05-classes-and-objects.md b/spec/05-classes-and-objects.md index a6908ba39f..28abe6c3bc 100644 --- a/spec/05-classes-and-objects.md +++ b/spec/05-classes-and-objects.md @@ -498,9 +498,7 @@ the validity and meaning of a modifier are as follows. The `private` modifier can be used with any definition or declaration in a template. Such members can be accessed only from within the directly enclosing template and its companion module or -[companion class](#object-definitions). They -are not inherited by subclasses and they may not override definitions -in parent classes. +[companion class](#object-definitions). The modifier can be _qualified_ with an identifier $C$ (e.g. `private[$C$]`) that must denote a class or package @@ -524,7 +522,8 @@ either class-private or object-private, but not if it is marked case the member is called _qualified private_. Class-private or object-private members may not be abstract, and may -not have `protected` or `override` modifiers. +not have `protected` or `override` modifiers. They are not inherited +by subclasses and they may not override definitions in parent classes. ### `protected` The `protected` modifier applies to class member definitions. diff --git a/test/files/pos/t9321.scala b/test/files/pos/t9321.scala new file mode 100644 index 0000000000..ed3a816656 --- /dev/null +++ b/test/files/pos/t9321.scala @@ -0,0 +1,10 @@ +object p { + trait A { + private[p] val qualifiedPrivateMember = 1 + } + + def useQualifiedPrivate(b: Mix) = + b.qualifiedPrivateMember // allowed +} + +trait Mix extends p.A -- cgit v1.2.3 From 5690aca93fe3ce48a2fb95744c1dba7667d689dc Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Fri, 22 May 2015 12:09:36 +1000 Subject: Update to scala-java8-compat 0.5.0 This contains LambdaDeserializer, which we refer to in our synthetic `$deserializeLambda$` method under -target:jvm-1.8. Note: this library is only reference in the Ant build under a non-standard build flag that includes that library into scala-library.jar. This is only for our internal use in running partest for indylambda. The SBT build doesn't have the same option at the moment. --- build.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.xml b/build.xml index 6ee891661f..421646a2b0 100755 --- a/build.xml +++ b/build.xml @@ -313,7 +313,7 @@ TODO: - + -- cgit v1.2.3 From 187b866e73d2801f4309a13e1091dd35f34db7ce Mon Sep 17 00:00:00 2001 From: Simon Schäfer Date: Fri, 22 May 2015 13:10:49 +0200 Subject: Fix small grammar error in `Warnings` --- src/compiler/scala/tools/nsc/settings/Warnings.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/compiler/scala/tools/nsc/settings/Warnings.scala b/src/compiler/scala/tools/nsc/settings/Warnings.scala index 41ce0837cb..59cc13c64e 100644 --- a/src/compiler/scala/tools/nsc/settings/Warnings.scala +++ b/src/compiler/scala/tools/nsc/settings/Warnings.scala @@ -23,7 +23,7 @@ trait Warnings { val warnValueDiscard = BooleanSetting("-Ywarn-value-discard", "Warn when non-Unit expression results are unused.") val warnNumericWiden = BooleanSetting("-Ywarn-numeric-widen", "Warn when numerics are widened.") // SI-7712, SI-7707 warnUnused not quite ready for prime-time - val warnUnused = BooleanSetting("-Ywarn-unused", "Warn when local and private vals, vars, defs, and types are are unused.") + val warnUnused = BooleanSetting("-Ywarn-unused", "Warn when local and private vals, vars, defs, and types are unused.") // currently considered too noisy for general use val warnUnusedImport = BooleanSetting("-Ywarn-unused-import", "Warn when imports are unused.") -- cgit v1.2.3 From 57be8a33ebbc8e7a7d64404fe5db74ef895c5891 Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Thu, 30 Apr 2015 14:11:57 +0200 Subject: Nullness Analysis Tracks nullness of values using an ASM analyzer. Tracking nullness requires alias tracking for local variables and stack values. For example, after an instance call, local variables that point to the same object as the receiver are treated not-null. --- .../nsc/backend/jvm/analysis/AliasingFrame.scala | 247 +++++++++++++++++++ .../jvm/analysis/InstructionStackEffect.scala | 248 +++++++++++++++++++ .../backend/jvm/analysis/NullnessAnalyzer.scala | 262 +++++++++++++++++++++ .../tools/nsc/backend/jvm/opt/BytecodeUtils.scala | 37 ++- .../scala/tools/nsc/backend/jvm/opt/Inliner.scala | 4 +- .../backend/jvm/opt/InstructionResultSize.scala | 240 +++++++++++++++++++ .../jvm/analysis/NullnessAnalyzerTest.scala | 205 ++++++++++++++++ versions.properties | 2 +- 8 files changed, 1237 insertions(+), 8 deletions(-) create mode 100644 src/compiler/scala/tools/nsc/backend/jvm/analysis/AliasingFrame.scala create mode 100644 src/compiler/scala/tools/nsc/backend/jvm/analysis/InstructionStackEffect.scala create mode 100644 src/compiler/scala/tools/nsc/backend/jvm/analysis/NullnessAnalyzer.scala create mode 100644 src/compiler/scala/tools/nsc/backend/jvm/opt/InstructionResultSize.scala create mode 100644 test/junit/scala/tools/nsc/backend/jvm/analysis/NullnessAnalyzerTest.scala diff --git a/src/compiler/scala/tools/nsc/backend/jvm/analysis/AliasingFrame.scala b/src/compiler/scala/tools/nsc/backend/jvm/analysis/AliasingFrame.scala new file mode 100644 index 0000000000..9494553ce1 --- /dev/null +++ b/src/compiler/scala/tools/nsc/backend/jvm/analysis/AliasingFrame.scala @@ -0,0 +1,247 @@ +package scala.tools.nsc +package backend.jvm +package analysis + +import scala.annotation.switch +import scala.collection.{mutable, immutable} +import scala.tools.asm.Opcodes +import scala.tools.asm.tree._ +import scala.tools.asm.tree.analysis.{Analyzer, Value, Frame, Interpreter} +import opt.BytecodeUtils._ + +object AliasingFrame { + private var _idCounter: Long = 0l + private def nextId = { _idCounter += 1; _idCounter } +} + +class AliasingFrame[V <: Value](nLocals: Int, nStack: Int) extends Frame[V](nLocals, nStack) { + import Opcodes._ + + // Auxiliary constructor required for implementing `AliasingAnalyzer.newFrame` + def this(src: Frame[_ <: V]) { + this(src.getLocals, src.getMaxStackSize) + init(src) + } + + /** + * For each slot (entry in the `values` array of the frame), an id that uniquely represents + * the object stored in it. If two values have the same id, they are aliases of the same + * object. + */ + private val aliasIds: Array[Long] = Array.fill(nLocals + nStack)(AliasingFrame.nextId) + + /** + * The object alias id of for a value index. + */ + def aliasId(entry: Int) = aliasIds(entry) + + /** + * Returns the indices of the values array which are aliases of the object `id`. + */ + def valuesWithAliasId(id: Long): Set[Int] = immutable.BitSet.empty ++ aliasIds.indices.filter(i => aliasId(i) == id) + + /** + * The set of aliased values for a given entry in the `values` array. + */ + def aliasesOf(entry: Int): Set[Int] = valuesWithAliasId(aliasIds(entry)) + + /** + * Define a new alias. For example, given + * var a = this // this, a have the same aliasId + * then an assignment + * b = a + * will set the same the aliasId for `b`. + */ + private def newAlias(assignee: Int, source: Int): Unit = { + aliasIds(assignee) = aliasIds(source) + } + + /** + * An assignment + * a = someUnknownValue() + * sets a fresh alias id for `a`. + * A stack value is also removed from its alias set when being consumed. + */ + private def removeAlias(assignee: Int): Unit = { + aliasIds(assignee) = AliasingFrame.nextId + } + + override def execute(insn: AbstractInsnNode, interpreter: Interpreter[V]): Unit = { + // Make the extendsion methods easier to use (otherwise we have to repeat `this`.stackTop) + def stackTop: Int = this.stackTop + def peekStack(n: Int): V = this.peekStack(n) + + val (consumed, produced) = InstructionStackEffect(insn, this) // needs to be called before super.execute, see its doc + super.execute(insn, interpreter) + + (insn.getOpcode: @switch) match { + case ALOAD => + newAlias(assignee = stackTop, source = insn.asInstanceOf[VarInsnNode].`var`) + + case DUP => + val top = stackTop + newAlias(assignee = top, source = top - 1) + + case DUP_X1 => + val top = stackTop + newAlias(assignee = top, source = top - 1) + newAlias(assignee = top - 1, source = top - 2) + newAlias(assignee = top - 2, source = top) + + case DUP_X2 => + // Check if the second element on the stack is size 2 + // https://docs.oracle.com/javase/specs/jvms/se7/html/jvms-6.html#jvms-6.5.dup_x2 + val isSize2 = peekStack(1).getSize == 2 + val top = stackTop + newAlias(assignee = top, source = top - 1) + newAlias(assignee = top - 1, source = top - 2) + if (isSize2) { + // Size 2 values on the stack only take one slot in the `values` array + newAlias(assignee = top - 2, source = top) + } else { + newAlias(assignee = top - 2, source = top - 3) + newAlias(assignee = top - 3, source = top) + } + + case DUP2 => + val isSize2 = peekStack(0).getSize == 2 + val top = stackTop + if (isSize2) { + newAlias(assignee = top, source = top - 1) + } else { + newAlias(assignee = top - 1, source = top - 3) + newAlias(assignee = top, source = top - 2) + } + + case DUP2_X1 => + val isSize2 = peekStack(0).getSize == 2 + val top = stackTop + if (isSize2) { + newAlias(assignee = top, source = top - 1) + newAlias(assignee = top - 1, source = top - 2) + newAlias(assignee = top - 2, source = top) + } else { + newAlias(assignee = top, source = top - 2) + newAlias(assignee = top - 1, source = top - 3) + newAlias(assignee = top - 2, source = top - 4) + newAlias(assignee = top - 4, source = top) + newAlias(assignee = top - 5, source = top - 1) + } + + case DUP2_X2 => + val top = stackTop + // https://docs.oracle.com/javase/specs/jvms/se7/html/jvms-6.html#jvms-6.5.dup2_x2 + val v1isSize2 = peekStack(0).getSize == 2 + if (v1isSize2) { + newAlias(assignee = top, source = top - 1) + newAlias(assignee = top - 1, source = top - 2) + val v2isSize2 = peekStack(1).getSize == 2 + if (v2isSize2) { + // Form 4 + newAlias(assignee = top - 2, source = top) + } else { + // Form 2 + newAlias(assignee = top - 2, source = top - 3) + newAlias(assignee = top - 3, source = top) + } + } else { + newAlias(assignee = top, source = top - 2) + newAlias(assignee = top - 1, source = top - 3) + newAlias(assignee = top - 2, source = top - 4) + val v3isSize2 = peekStack(2).getSize == 2 + if (v3isSize2) { + // Form 3 + newAlias(assignee = top - 3, source = top) + newAlias(assignee = top - 4, source = top - 1) + } else { + // Form 1 + newAlias(assignee = top - 3, source = top - 5) + newAlias(assignee = top - 4, source = top) + newAlias(assignee = top - 5, source = top - 1) + } + } + + case SWAP => + val top = stackTop + val idTop = aliasIds(top) + aliasIds(top) = aliasIds(top - 1) + aliasIds(top - 1) = idTop + + case opcode => + if (opcode == ASTORE) { + // Not a separate case because we need to remove the consumed stack value from alias sets after. + val stackTopBefore = stackTop - produced + consumed + val local = insn.asInstanceOf[VarInsnNode].`var` + newAlias(assignee = local, source = stackTopBefore) + // if the value written is size 2, it overwrites the subsequent slot, which is then no + // longer an alias of anything. see the corresponding case in `Frame.execute`. + if (getLocal(local).getSize == 2) + removeAlias(local + 1) + + // if the value at the preceding index is size 2, it is no longer valid, so we remove its + // aliasing. see corresponding case in `Frame.execute` + if (local > 0) { + val precedingValue = getLocal(local - 1) + if (precedingValue != null && precedingValue.getSize == 2) + removeAlias(local - 1) + } + } + + // Remove consumed stack values from aliasing sets. + // Example: iadd + // - before: local1, local2, stack1, consumed1, consumed2 + // - after: local1, local2, stack1, produced1 // stackTop = 3 + val firstConsumed = stackTop - produced + 1 // firstConsumed = 3 + for (i <- 0 until consumed) + removeAlias(firstConsumed + i) // remove aliases for 3 and 4 + + // We don't need to set the aliases ids for the produced values: the aliasIds array already + // contains fresh ids for non-used stack values (ensured by removeAlias). + } + } + + /** + * Merge the AliasingFrame `other` into this AliasingFrame. + * + * Aliases that are common in both frames are kept. Example: + * + * var x, y = null + * if (...) { + * x = a + * y = a // (x, y, a) are aliases + * } else { + * x = a + * y = b // (x, a) and (y, b) + * } + * [...] // (x, a) + */ + override def merge(other: Frame[_ <: V], interpreter: Interpreter[V]): Boolean = { + val valuesChanged = super.merge(other, interpreter) + var aliasesChanged = false + val aliasingOther = other.asInstanceOf[AliasingFrame[_]] + for (i <- aliasIds.indices) { + val thisAliases = aliasesOf(i) + val thisNotOther = thisAliases diff (thisAliases intersect aliasingOther.aliasesOf(i)) + if (thisNotOther.nonEmpty) { + aliasesChanged = true + thisNotOther foreach removeAlias + } + } + valuesChanged || aliasesChanged + } + + override def init(src: Frame[_ <: V]): Frame[V] = { + super.init(src) + compat.Platform.arraycopy(src.asInstanceOf[AliasingFrame[_]].aliasIds, 0, aliasIds, 0, aliasIds.length) + this + } +} + +/** + * An analyzer that uses AliasingFrames instead of bare Frames. This can be used when an analysis + * needs to track aliases, but doesn't require a more specific Frame subclass. + */ +class AliasingAnalyzer[V <: Value](interpreter: Interpreter[V]) extends Analyzer[V](interpreter) { + override def newFrame(nLocals: Int, nStack: Int): AliasingFrame[V] = new AliasingFrame(nLocals, nStack) + override def newFrame(src: Frame[_ <: V]): AliasingFrame[V] = new AliasingFrame(src) +} diff --git a/src/compiler/scala/tools/nsc/backend/jvm/analysis/InstructionStackEffect.scala b/src/compiler/scala/tools/nsc/backend/jvm/analysis/InstructionStackEffect.scala new file mode 100644 index 0000000000..3d6c53765e --- /dev/null +++ b/src/compiler/scala/tools/nsc/backend/jvm/analysis/InstructionStackEffect.scala @@ -0,0 +1,248 @@ +package scala.tools.nsc +package backend.jvm +package analysis + +import scala.annotation.switch +import scala.tools.asm.Opcodes._ +import scala.tools.asm.Type +import scala.tools.asm.tree.{MultiANewArrayInsnNode, InvokeDynamicInsnNode, MethodInsnNode, AbstractInsnNode} +import scala.tools.asm.tree.analysis.{Frame, Value} +import opt.BytecodeUtils._ + +object InstructionStackEffect { + /** + * Returns a pair with the number of stack values consumed and produced by `insn`. + * This method requires the `frame` to be in the state **before** executing / interpreting + * the `insn`. + */ + def apply[V <: Value](insn: AbstractInsnNode, frame: Frame[V]): (Int, Int) = { + def peekStack(n: Int): V = frame.peekStack(n) + + (insn.getOpcode: @switch) match { + // The order of opcodes is the same as in Frame.execute. + case NOP => (0, 0) + + case ACONST_NULL | + ICONST_M1 | + ICONST_0 | + ICONST_1 | + ICONST_2 | + ICONST_3 | + ICONST_4 | + ICONST_5 | + LCONST_0 | + LCONST_1 | + FCONST_0 | + FCONST_1 | + FCONST_2 | + DCONST_0 | + DCONST_1 | + BIPUSH | + SIPUSH | + LDC | + ILOAD | + LLOAD | + FLOAD | + DLOAD | + ALOAD => (0, 1) + + case IALOAD | + LALOAD | + FALOAD | + DALOAD | + AALOAD | + BALOAD | + CALOAD | + SALOAD => (2, 1) + + case ISTORE | + LSTORE | + FSTORE | + DSTORE | + ASTORE => (1, 0) + + case IASTORE | + LASTORE | + FASTORE | + DASTORE | + AASTORE | + BASTORE | + CASTORE | + SASTORE => (3, 0) + + case POP => (1, 0) + + case POP2 => + val isSize2 = peekStack(0).getSize == 2 + if (isSize2) (1, 0) else (2, 0) + + case DUP => (0, 1) + + case DUP_X1 => (2, 3) + + case DUP_X2 => + val isSize2 = peekStack(1).getSize == 2 + if (isSize2) (2, 3) else (3, 4) + + case DUP2 => + val isSize2 = peekStack(0).getSize == 2 + if (isSize2) (0, 1) else (0, 2) + + case DUP2_X1 => + val isSize2 = peekStack(0).getSize == 2 + if (isSize2) (2, 3) else (3, 4) + + case DUP2_X2 => + val v1isSize2 = peekStack(0).getSize == 2 + if (v1isSize2) { + val v2isSize2 = peekStack(1).getSize == 2 + if (v2isSize2) (2, 3) else (3, 4) + } else { + val v3isSize2 = peekStack(2).getSize == 2 + if (v3isSize2) (3, 5) else (4, 6) + } + + case SWAP => (2, 2) + + case IADD | + LADD | + FADD | + DADD | + ISUB | + LSUB | + FSUB | + DSUB | + IMUL | + LMUL | + FMUL | + DMUL | + IDIV | + LDIV | + FDIV | + DDIV | + IREM | + LREM | + FREM | + DREM => (2, 1) + + case INEG | + LNEG | + FNEG | + DNEG => (1, 1) + + case ISHL | + LSHL | + ISHR | + LSHR | + IUSHR | + LUSHR | + IAND | + LAND | + IOR | + LOR | + IXOR | + LXOR => (2, 1) + + case IINC => (0, 0) + + case I2L | + I2F | + I2D | + L2I | + L2F | + L2D | + F2I | + F2L | + F2D | + D2I | + D2L | + D2F | + I2B | + I2C | + I2S => (1, 1) + + case LCMP | + FCMPL | + FCMPG | + DCMPL | + DCMPG => (2, 1) + + case IFEQ | + IFNE | + IFLT | + IFGE | + IFGT | + IFLE => (1, 0) + + case IF_ICMPEQ | + IF_ICMPNE | + IF_ICMPLT | + IF_ICMPGE | + IF_ICMPGT | + IF_ICMPLE | + IF_ACMPEQ | + IF_ACMPNE => (2, 0) + + case GOTO => (0, 0) + + case JSR => (0, 1) + + case RET => (0, 0) + + case TABLESWITCH | + LOOKUPSWITCH => (1, 0) + + case IRETURN | + LRETURN | + FRETURN | + DRETURN | + ARETURN => (frame.getStackSize, 0) + + case RETURN => (frame.getStackSize, 0) + + case GETSTATIC => (0, 1) + + case PUTSTATIC => (1, 0) + + case GETFIELD => (1, 1) + + case PUTFIELD => (2, 0) + + case INVOKEVIRTUAL | + INVOKESPECIAL | + INVOKESTATIC | + INVOKEINTERFACE => + val desc = insn.asInstanceOf[MethodInsnNode].desc + val cons = Type.getArgumentTypes(desc).length + (if (insn.getOpcode == INVOKESTATIC) 0 else 1) + val prod = if (Type.getReturnType(desc) == Type.VOID_TYPE) 0 else 1 + (cons, prod) + + case INVOKEDYNAMIC => + val desc = insn.asInstanceOf[InvokeDynamicInsnNode].desc + val cons = Type.getArgumentTypes(desc).length + val prod = if (Type.getReturnType(desc) == Type.VOID_TYPE) 0 else 1 + (cons, prod) + + case NEW => (0, 1) + + case NEWARRAY | + ANEWARRAY | + ARRAYLENGTH => (1, 1) + + case ATHROW => (frame.getStackSize, 0) + + case CHECKCAST => (0, 0) + + case INSTANCEOF => (1, 1) + + case MONITORENTER | + MONITOREXIT => (1, 0) + + case MULTIANEWARRAY => (insn.asInstanceOf[MultiANewArrayInsnNode].dims, 1) + + case IFNULL | + IFNONNULL => (1, 0) + } + } + +} diff --git a/src/compiler/scala/tools/nsc/backend/jvm/analysis/NullnessAnalyzer.scala b/src/compiler/scala/tools/nsc/backend/jvm/analysis/NullnessAnalyzer.scala new file mode 100644 index 0000000000..18c17bc992 --- /dev/null +++ b/src/compiler/scala/tools/nsc/backend/jvm/analysis/NullnessAnalyzer.scala @@ -0,0 +1,262 @@ +package scala.tools.nsc +package backend.jvm +package analysis + +import java.util + +import scala.annotation.switch +import scala.tools.asm.{Type, Opcodes} +import scala.tools.asm.tree.{MethodInsnNode, LdcInsnNode, AbstractInsnNode} +import scala.tools.asm.tree.analysis.{Frame, Analyzer, Interpreter, Value} +import scala.tools.nsc.backend.jvm.opt.BytecodeUtils +import BytecodeUtils._ + +/** + * Some notes on the ASM ananlyzer framework. + * + * Value + * - Abstract, needs to be implemented for each analysis. + * - Represents the desired information about local variables and stack values, for example: + * - Is this value known to be null / not null? + * - What are the instructions that could potentially have produced this value? + * + * Interpreter + * - Abstract, needs to be implemented for each analysis. Sometimes one can subclass an existing + * interpreter, e.g., SourceInterpreter or BasicInterpreter. + * - Multiple abstract methods that receive an instruction and the instruction's input values, and + * return a value representing the result of that instruction. + * - Note: due to control flow, the interpreter can be invoked multiple times for the same + * instruction, until reaching a fixed point. + * - Abstract `merge` function that computes the least upper bound of two values. Used by + * Frame.merge (see below). + * + * Frame + * - Can be used directly for many analyses, no subclass required. + * - Every frame has an array of values: one for each local variable and for each stack slot. + * - A `top` index stores the index of the current stack top + * - NOTE: for a size-2 local variable at index i, the local variable at i+1 is set to an empty + * value. However, for a size-2 value at index i on the stack, the value at i+1 holds the next + * stack value. + * - Defines the `execute(instruction)` method. + * - executing mutates the state of the frame according to the effect of the instruction + * - pop consumed values from the stack + * - pass them to the interpreter together with the instruction + * - if applicable, push the resulting value on the stack + * - Defines the `merge(otherFrame)` method + * - called by the analyzer when multiple control flow paths lead to an instruction + * - the frame at the branching instruction is merged into the current frame of the + * instruction (held by the analyzer) + * - mutates the values of the current frame, merges all values using interpreter.merge. + * + * Analyzer + * - Stores a frame for each instruction + * - `merge` function takes an instruction and a frame, merges the existing frame for that instr + * (from the frames array) with the new frame passed as argument. + * if the frame changed, puts the instruction on the work queue (fixpiont). + * - initial frame: initialized for first instr by calling interpreter.new[...]Value + * for each slot (locals and params), stored in frames[firstInstr] by calling `merge` + * - work queue of instructions (`queue` array, `top` index for next instruction to analyze) + * - analyze(method): simulate control flow. while work queue non-empty: + * - copy the state of `frames[instr]` into a local frame `current` + * - call `current.execute(instr, interpreter)`, mutating the `current` frame + * - if it's a branching instruction + * - for all potential destination instructions + * - merge the destination instruction frame with the `current` frame + * (this enqueues the destination instr if its frame changed) + * - invoke `newControlFlowEdge` (see below) + * - the analyzer also tracks active exception handlers at each instruction + * - the empty method `newControlFlowEdge` can be overridden to track control flow if required + * + * + * Some notes on nullness analysis. + * + * For an instance method, `this` is non-null at entry. So we have to return a NotNull value when + * the analyzer is initializing the first frame of a method (see above). This required a change of + * the analyzer: before it would simply call `interpreter.newValue`, where we don't have the + * required context. See https://github.com/scala/scala-asm/commit/8133d75032. + * + * After some operations we know that a certain value is not null (e.g. the receiver of an instance + * call). However, the receiver is an value on the stack and consumed while interpreting the + * instruction - so we can only gain some knowledge if we know that the receiver was an alias of + * some other local variable or stack slot. Therefore we use the AliasingFrame class. + * + * TODO: + * Finally, we'd also like to exploit the knowledge gained from `if (x == null)` tests: x is known + * to be null in one branch, not null in the other. This will make use of alias tracking as well. + * We still have to figure out how to do this exactly in the analyzer framework. + */ + +/** + * Type to represent nullness of values. + */ +sealed trait Nullness { + final def merge(other: Nullness) = if (this == other) this else Unknown +} +case object NotNull extends Nullness +case object Unknown extends Nullness +case object Null extends Nullness + +/** + * Represents the nullness state for a local variable or stack value. + * + * Note that nullness of primitive values is not tracked, it will be always [[Unknown]]. + * + * @param nullness The nullness of this value. + * @param longOrDouble True if this value is a long or double. The Analyzer framework needs to know + * the size of each value when interpreting instructions, see `Frame.execute`. + */ +final case class NullnessValue(nullness: Nullness, longOrDouble: Boolean) extends Value { + def this(nullness: Nullness, insn: AbstractInsnNode) = this(nullness, longOrDouble = BytecodeUtils.instructionResultSize(insn) == 2) + + /** + * The size of the slot described by this value. Cannot be 0 because no values are allocated + * for void-typed slots, see NullnessInterpreter.newValue. + **/ + def getSize: Int = if (longOrDouble) 2 else 1 + + def merge(other: NullnessValue) = NullnessValue(nullness merge other.nullness, longOrDouble) +} + +object NullnessValue { + def apply(nullness: Nullness, insn: AbstractInsnNode) = new NullnessValue(nullness, insn) +} + +final class NullnessInterpreter extends Interpreter[NullnessValue](Opcodes.ASM5) { + def newValue(tp: Type): NullnessValue = { + // ASM loves giving semantics to null. The behavior here is the same as in SourceInterpreter, + // which is provided by the framework. + // + // (1) For the void type, the ASM framework expects newValue to return `null`. + // Also, the Frame.returnValue field is `null` for methods with return type void. + // Example callsite passing VOID_TYPE: in Analyzer, `newValue(Type.getReturnType(m.desc))`. + // + // (2) `tp` may also be `null`. When creating the initial frame, the analyzer invokes + // `newValue(null)` for each local variable. We have to return a value of size 1. + if (tp == Type.VOID_TYPE) null // (1) + else NullnessValue(Unknown, longOrDouble = tp != null /*(2)*/ && tp.getSize == 2 ) + } + + override def newParameterValue(isInstanceMethod: Boolean, local: Int, tp: Type): NullnessValue = { + // For instance methods, the `this` parameter is known to be not null. + if (isInstanceMethod && local == 0) NullnessValue(NotNull, longOrDouble = false) + else super.newParameterValue(isInstanceMethod, local, tp) + } + + def newOperation(insn: AbstractInsnNode): NullnessValue = { + val nullness = (insn.getOpcode: @switch) match { + case Opcodes.ACONST_NULL => Null + + case Opcodes.LDC => insn.asInstanceOf[LdcInsnNode].cst match { + case _: String | _: Type => NotNull + case _ => Unknown + } + + case _ => Unknown + } + + // for Opcodes.NEW, we use Unknown. The value will become NotNull after the constructor call. + NullnessValue(nullness, insn) + } + + def copyOperation(insn: AbstractInsnNode, value: NullnessValue): NullnessValue = value + + def unaryOperation(insn: AbstractInsnNode, value: NullnessValue): NullnessValue = (insn.getOpcode: @switch) match { + case Opcodes.NEWARRAY | + Opcodes.ANEWARRAY => NullnessValue(NotNull, longOrDouble = false) + + case _ => NullnessValue(Unknown, insn) + } + + def binaryOperation(insn: AbstractInsnNode, value1: NullnessValue, value2: NullnessValue): NullnessValue = { + NullnessValue(Unknown, insn) + } + + def ternaryOperation(insn: AbstractInsnNode, value1: NullnessValue, value2: NullnessValue, value3: NullnessValue): NullnessValue = { + NullnessValue(Unknown, longOrDouble = false) + } + + def naryOperation(insn: AbstractInsnNode, values: util.List[_ <: NullnessValue]): NullnessValue = (insn.getOpcode: @switch) match { + case Opcodes.MULTIANEWARRAY => + NullnessValue(NotNull, longOrDouble = false) + + case _ => + // TODO: use a list of methods that are known to return non-null values + NullnessValue(Unknown, insn) + } + + def returnOperation(insn: AbstractInsnNode, value: NullnessValue, expected: NullnessValue): Unit = () + + def merge(a: NullnessValue, b: NullnessValue): NullnessValue = a merge b +} + +class NullnessFrame(nLocals: Int, nStack: Int) extends AliasingFrame[NullnessValue](nLocals, nStack) { + // Auxiliary constructor required for implementing `NullnessAnalyzer.newFrame` + def this(src: Frame[_ <: NullnessValue]) { + this(src.getLocals, src.getMaxStackSize) + init(src) + } + + override def execute(insn: AbstractInsnNode, interpreter: Interpreter[NullnessValue]): Unit = { + import Opcodes._ + + // get the object id of the object that is known to be not-null after this operation + val nullCheckedAliasId: Long = (insn.getOpcode: @switch) match { + case IALOAD | + LALOAD | + FALOAD | + DALOAD | + AALOAD | + BALOAD | + CALOAD | + SALOAD => + aliasId(this.stackTop - 1) + + case IASTORE | + FASTORE | + AASTORE | + BASTORE | + CASTORE | + SASTORE | + LASTORE | + DASTORE => + aliasId(this.stackTop - 2) + + case GETFIELD => + aliasId(this.stackTop) + + case PUTFIELD => + aliasId(this.stackTop - 1) + + case INVOKEVIRTUAL | + INVOKESPECIAL | + INVOKEINTERFACE => + val desc = insn.asInstanceOf[MethodInsnNode].desc + val numArgs = Type.getArgumentTypes(desc).length + aliasId(this.stackTop - numArgs) + + case ARRAYLENGTH | + MONITORENTER | + MONITOREXIT => + aliasId(this.stackTop) + + case _ => + -1 + } + + super.execute(insn, interpreter) + + if (nullCheckedAliasId != -1) { + for (i <- valuesWithAliasId(nullCheckedAliasId)) + this.setValue(i, this.getValue(i).copy(nullness = NotNull)) + } + } +} + +/** + * This class is required to override the `newFrame` methods, which makes makes sure the analyzer + * uses NullnessFrames. + */ +class NullnessAnalyzer extends Analyzer[NullnessValue](new NullnessInterpreter) { + override def newFrame(nLocals: Int, nStack: Int): NullnessFrame = new NullnessFrame(nLocals, nStack) + override def newFrame(src: Frame[_ <: NullnessValue]): NullnessFrame = new NullnessFrame(src) +} diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/BytecodeUtils.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/BytecodeUtils.scala index 201ab15177..314105da44 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/opt/BytecodeUtils.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/BytecodeUtils.scala @@ -170,6 +170,8 @@ object BytecodeUtils { new InsnNode(op) } + def instructionResultSize(instruction: AbstractInsnNode) = InstructionResultSize(instruction) + def labelReferences(method: MethodNode): Map[LabelNode, Set[AnyRef]] = { val res = mutable.Map.empty[LabelNode, Set[AnyRef]] def add(l: LabelNode, ref: AnyRef) = if (res contains l) res(l) = res(l) + ref else res(l) = Set(ref) @@ -328,13 +330,38 @@ object BytecodeUtils { class AsmAnalyzer[V <: Value](methodNode: MethodNode, classInternalName: InternalName, interpreter: Interpreter[V] = new BasicInterpreter) { val analyzer = new Analyzer(interpreter) analyzer.analyze(classInternalName, methodNode) - def frameAt(instruction: AbstractInsnNode): Frame[V] = analyzer.getFrames()(methodNode.instructions.indexOf(instruction)) + def frameAt(instruction: AbstractInsnNode): Frame[V] = analyzer.frameAt(instruction, methodNode) + } + + implicit class AnalyzerExtendsions[V <: Value](val analyzer: Analyzer[V]) extends AnyVal { + def frameAt(instruction: AbstractInsnNode, methodNode: MethodNode): Frame[V] = analyzer.getFrames()(methodNode.instructions.indexOf(instruction)) } - implicit class `frame extensions`[V <: Value](val frame: Frame[V]) extends AnyVal { - def peekDown(n: Int): V = { - val topIndex = frame.getStackSize - 1 - frame.getStack(topIndex - n) + implicit class FrameExtensions[V <: Value](val frame: Frame[V]) extends AnyVal { + /** + * The value `n` positions down the stack. + */ + def peekStack(n: Int): V = frame.getStack(frame.getMaxStackSize - 1 - n) + + /** + * The index of the current stack top. + */ + def stackTop = frame.getLocals + frame.getStackSize - 1 + + /** + * Gets the value at slot i, where i may be a local or a stack index. + */ + def getValue(i: Int): V = { + if (i < frame.getLocals) frame.getLocal(i) + else frame.getStack(i - frame.getLocals) + } + + /** + * Sets the value at slot i, where i may be a local or a stack index. + */ + def setValue(i: Int, value: V): Unit = { + if (i < frame.getLocals) frame.setLocal(i, value) + else frame.setStack(i - frame.getLocals, value) } } } diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/Inliner.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/Inliner.scala index ac5c9ce2e6..3aca15da69 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/opt/Inliner.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/Inliner.scala @@ -189,7 +189,7 @@ class Inliner[BT <: BTypes](val btypes: BT) { // there's no need to run eliminateUnreachableCode here. building the call graph does that // already, no code can become unreachable in the meantime. val analyzer = new AsmAnalyzer(callsite.callsiteMethod, callsite.callsiteClass.internalName, new SourceInterpreter) - val receiverValue = analyzer.frameAt(callsite.callsiteInstruction).peekDown(traitMethodArgumentTypes.length) + val receiverValue = analyzer.frameAt(callsite.callsiteInstruction).peekStack(traitMethodArgumentTypes.length) for (i <- receiverValue.insns.asScala) { val cast = new TypeInsnNode(CHECKCAST, selfParamType.internalName) callsite.callsiteMethod.instructions.insert(i, cast) @@ -400,7 +400,7 @@ class Inliner[BT <: BTypes](val btypes: BT) { val inlinedReturn = instructionMap(originalReturn) val returnReplacement = new InsnList - def drop(slot: Int) = returnReplacement add getPop(frame.peekDown(slot).getSize) + def drop(slot: Int) = returnReplacement add getPop(frame.peekStack(slot).getSize) // for non-void methods, store the stack top into the return local variable if (hasReturnValue) { diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/InstructionResultSize.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/InstructionResultSize.scala new file mode 100644 index 0000000000..8d744f6d13 --- /dev/null +++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/InstructionResultSize.scala @@ -0,0 +1,240 @@ +package scala.tools.nsc.backend.jvm.opt + +import scala.annotation.switch +import scala.tools.asm.{Handle, Type, Opcodes} +import scala.tools.asm.tree._ + +object InstructionResultSize { + import Opcodes._ + def apply(instruction: AbstractInsnNode): Int = (instruction.getOpcode: @switch) match { + // The order of opcodes is (almost) the same as in Opcodes.java + case ACONST_NULL => 1 + + case ICONST_M1 | + ICONST_0 | + ICONST_1 | + ICONST_2 | + ICONST_3 | + ICONST_4 | + ICONST_5 => 1 + + case LCONST_0 | + LCONST_1 => 2 + + case FCONST_0 | + FCONST_1 | + FCONST_2 => 1 + + case DCONST_0 | + DCONST_1 => 2 + + case BIPUSH | + SIPUSH => 1 + + case LDC => + instruction.asInstanceOf[LdcInsnNode].cst match { + case _: java.lang.Integer | + _: java.lang.Float | + _: String | + _: Type | + _: Handle => 1 + + case _: java.lang.Long | + _: java.lang.Double => 2 + } + + case ILOAD | + FLOAD | + ALOAD => 1 + + case LLOAD | + DLOAD => 2 + + case IALOAD | + FALOAD | + AALOAD | + BALOAD | + CALOAD | + SALOAD => 1 + + case LALOAD | + DALOAD => 2 + + case ISTORE | + LSTORE | + FSTORE | + DSTORE | + ASTORE => 0 + + case IASTORE | + LASTORE | + FASTORE | + DASTORE | + AASTORE | + BASTORE | + CASTORE | + SASTORE => 0 + + case POP | + POP2 => 0 + + case DUP | + DUP_X1 | + DUP_X2 | + DUP2 | + DUP2_X1 | + DUP2_X2 | + SWAP => throw new IllegalArgumentException("Can't compute the size of DUP/SWAP without knowing what's on stack top") + + case IADD | + FADD => 1 + + case LADD | + DADD => 2 + + case ISUB | + FSUB => 1 + + case LSUB | + DSUB => 2 + + case IMUL | + FMUL => 1 + + case LMUL | + DMUL => 2 + + case IDIV | + FDIV => 1 + + case LDIV | + DDIV => 2 + + case IREM | + FREM => 1 + + case LREM | + DREM => 2 + + case INEG | + FNEG => 1 + + case LNEG | + DNEG => 2 + + case ISHL | + ISHR => 1 + + case LSHL | + LSHR => 2 + + case IUSHR => 1 + + case LUSHR => 2 + + case IAND | + IOR | + IXOR => 1 + + case LAND | + LOR | + LXOR => 2 + + case IINC => 1 + + case I2F | + L2I | + L2F | + F2I | + D2I | + D2F | + I2B | + I2C | + I2S => 1 + + case I2L | + I2D | + L2D | + F2L | + F2D | + D2L => 2 + + case LCMP | + FCMPL | + FCMPG | + DCMPL | + DCMPG => 1 + + case IFEQ | + IFNE | + IFLT | + IFGE | + IFGT | + IFLE => 0 + + case IF_ICMPEQ | + IF_ICMPNE | + IF_ICMPLT | + IF_ICMPGE | + IF_ICMPGT | + IF_ICMPLE | + IF_ACMPEQ | + IF_ACMPNE => 0 + + case GOTO => 0 + + case JSR => throw new IllegalArgumentException("Subroutines are not supported.") + + case RET => 0 + + case TABLESWITCH | + LOOKUPSWITCH => 0 + + case IRETURN | + FRETURN | + ARETURN => 1 + + case LRETURN | + DRETURN => 2 + + case RETURN => 0 + + case GETSTATIC => Type.getType(instruction.asInstanceOf[FieldInsnNode].desc).getSize + + case PUTSTATIC => 0 + + case GETFIELD => Type.getType(instruction.asInstanceOf[FieldInsnNode].desc).getSize + + case PUTFIELD => 0 + + case INVOKEVIRTUAL | + INVOKESPECIAL | + INVOKESTATIC | + INVOKEINTERFACE => + val desc = instruction.asInstanceOf[MethodInsnNode].desc + Type.getReturnType(desc).getSize + + case INVOKEDYNAMIC => + val desc = instruction.asInstanceOf[InvokeDynamicInsnNode].desc + Type.getReturnType(desc).getSize + + case NEW => 1 + + case NEWARRAY | + ANEWARRAY | + ARRAYLENGTH => 1 + + case ATHROW => 0 + + case CHECKCAST | + INSTANCEOF => 1 + + case MONITORENTER | + MONITOREXIT => 0 + + case MULTIANEWARRAY => 1 + + case IFNULL | + IFNONNULL => 0 + } +} diff --git a/test/junit/scala/tools/nsc/backend/jvm/analysis/NullnessAnalyzerTest.scala b/test/junit/scala/tools/nsc/backend/jvm/analysis/NullnessAnalyzerTest.scala new file mode 100644 index 0000000000..92574329db --- /dev/null +++ b/test/junit/scala/tools/nsc/backend/jvm/analysis/NullnessAnalyzerTest.scala @@ -0,0 +1,205 @@ +package scala.tools.nsc +package backend.jvm +package analysis + +import org.junit.runner.RunWith +import org.junit.runners.JUnit4 +import org.junit.Test +import scala.tools.asm.Opcodes._ +import org.junit.Assert._ + +import CodeGenTools._ +import scala.tools.asm.tree.{AbstractInsnNode, MethodNode} +import scala.tools.nsc.backend.jvm.BTypes._ +import scala.tools.partest.ASMConverters +import ASMConverters._ +import scala.tools.testing.ClearAfterClass +import scala.tools.nsc.backend.jvm.opt.BytecodeUtils._ +import AsmUtils._ + +import scala.collection.convert.decorateAsScala._ + +object NullnessAnalyzerTest extends ClearAfterClass.Clearable { + var noOptCompiler = newCompiler(extraArgs = "-Ybackend:GenBCode -Yopt:l:none") + + def clear(): Unit = { + noOptCompiler = null + } +} + +@RunWith(classOf[JUnit4]) +class NullnessAnalyzerTest extends ClearAfterClass { + ClearAfterClass.stateToClear = NullnessAnalyzerTest + val noOptCompiler = NullnessAnalyzerTest.noOptCompiler + + def newNullnessAnalyzer(methodNode: MethodNode, classInternalName: InternalName = "C"): NullnessAnalyzer = { + val nullnessAnalyzer = new NullnessAnalyzer + nullnessAnalyzer.analyze(classInternalName, methodNode) + nullnessAnalyzer + } + + /** + * Instructions that match `query` when textified. + * If `query` starts with a `+`, the next instruction is returned. + */ + def findInstr(method: MethodNode, query: String): List[AbstractInsnNode] = { + val useNext = query(0) == '+' + val instrPart = if (useNext) query.drop(1) else query + val insns = method.instructions.iterator.asScala.find(i => textify(i) contains instrPart).toList + if (useNext) insns.map(_.getNext) else insns + } + + def testNullness(analyzer: NullnessAnalyzer, method: MethodNode, query: String, index: Int, nullness: Nullness): Unit = { + for (i <- findInstr(method, query)) { + val r = analyzer.frameAt(i, method).getValue(index).nullness + assertTrue(s"Expected: $nullness, found: $r. At instr ${textify(i)}", nullness == r) + } + } + + // debug / helper for writing tests + def showAllNullnessFrames(analyzer: NullnessAnalyzer, method: MethodNode): String = { + val instrLength = method.instructions.iterator.asScala.map(textify(_).length).max + val lines = for (i <- method.instructions.iterator.asScala) yield { + val f = analyzer.frameAt(i, method) + val frameString = { + if (f == null) "null" + else f.toString.split("NullnessValue").iterator + .map(_.trim).filter(_.nonEmpty) + .map(s => "%7s".format(s.replaceAll("""\((.*),false\)""", "$1"))) + .zipWithIndex.map({case (s, i) => s"$i: $s"}) + .mkString(", ") + } + ("%"+ instrLength +"s: %s").format(textify(i), frameString) + } + lines.mkString("\n") + } + + @Test + def showNullnessFramesTest(): Unit = { + val List(m) = compileMethods(noOptCompiler)("def f = this.toString") + + // NOTE: the frame for an instruction represents the state *before* executing that instr. + // So in the frame for `ALOAD 0`, the stack is still empty. + + val res = + """ L0: 0: NotNull + | LINENUMBER 1 L0: 0: NotNull + | ALOAD 0: 0: NotNull + |INVOKEVIRTUAL java/lang/Object.toString ()Ljava/lang/String;: 0: NotNull, 1: NotNull + | ARETURN: 0: NotNull, 1: Unknown + | L0: null""".stripMargin + assertTrue(showAllNullnessFrames(newNullnessAnalyzer(m), m) == res) + } + + @Test + def thisNonNull(): Unit = { + val List(m) = compileMethods(noOptCompiler)("def f = this.toString") + val a = newNullnessAnalyzer(m) + testNullness(a, m, "ALOAD 0", 0, NotNull) + } + + @Test + def instanceMethodCall(): Unit = { + val List(m) = compileMethods(noOptCompiler)("def f(a: String) = a.trim") + val a = newNullnessAnalyzer(m) + testNullness(a, m, "INVOKEVIRTUAL java/lang/String.trim", 1, Unknown) + testNullness(a, m, "ARETURN", 1, NotNull) + } + + @Test + def constructorCall(): Unit = { + val List(m) = compileMethods(noOptCompiler)("def f = { val a = new Object; a.toString }") + val a = newNullnessAnalyzer(m) + + // for reference, the output of showAllNullnessFrames(a, m) - note that the frame represents the state *before* executing the instr. + // NEW java/lang/Object: 0: NotNull, 1: Unknown + // DUP: 0: NotNull, 1: Unknown, 2: Unknown + // INVOKESPECIAL java/lang/Object.: 0: NotNull, 1: Unknown, 2: Unknown, 3: Unknown + // ASTORE 1: 0: NotNull, 1: Unknown, 2: NotNull + // ALOAD 1: 0: NotNull, 1: NotNull + // INVOKEVIRTUAL java/lang/Object.toString: 0: NotNull, 1: NotNull, 2: NotNull + // ARETURN: 0: NotNull, 1: NotNull, 2: Unknown + + for ((insn, index, nullness) <- List( + ("+NEW", 2, Unknown), // new value at slot 2 on the stack + ("+DUP", 3, Unknown), + ("+INVOKESPECIAL java/lang/Object", 2, NotNull), // after calling the initializer on 3, the value at 2 becomes NotNull + ("ASTORE 1", 1, Unknown), // before the ASTORE 1, nullness of the value in local 1 is Unknown + ("+ASTORE 1", 1, NotNull), // after storing the value at 2 in local 1, the local 1 is NotNull + ("+ALOAD 1", 2, NotNull), // loading the value 1 puts a NotNull value on the stack (at 2) + ("+INVOKEVIRTUAL java/lang/Object.toString", 2, Unknown) // nullness of value returned by `toString` is Unknown + )) testNullness(a, m, insn, index, nullness) + } + + @Test + def explicitNull(): Unit = { + val List(m) = compileMethods(noOptCompiler)("def f = { var a: Object = null; a }") + val a = newNullnessAnalyzer(m) + for ((insn, index, nullness) <- List( + ("+ACONST_NULL", 2, Null), + ("+ASTORE 1", 1, Null), + ("+ALOAD 1", 2, Null) + )) testNullness(a, m, insn, index, nullness) + } + + @Test + def stringLiteralsNotNull(): Unit = { + val List(m) = compileMethods(noOptCompiler)("""def f = { val a = "hi"; a.trim }""") + val a = newNullnessAnalyzer(m) + testNullness(a, m, "+ASTORE 1", 1, NotNull) + } + + @Test + def newArraynotNull() { + val List(m) = compileMethods(noOptCompiler)("def f = { val a = new Array[Int](2); a(0) }") + val a = newNullnessAnalyzer(m) + testNullness(a, m, "+NEWARRAY T_INT", 2, NotNull) // new array on stack + testNullness(a, m, "+ASTORE 1", 1, NotNull) // local var (a) + } + + @Test + def aliasBranching(): Unit = { + val code = + """def f(o: Object) = { + | var a: Object = o // a and o are aliases + | var b: Object = null + | var c: Object = null + | var d: Object = o + | if ("".trim == "") { + | b = o + | c = o // a, o, b, aliases + | d = null + | } else { + | b = a // a, o, b aliases + | d = null + | } + | b.toString // a, o, b aliases (so they become NotNull), but not c + | // d is null here, assinged in both branches. + |} + """.stripMargin + val List(m) = compileMethods(noOptCompiler)(code) + val a = newNullnessAnalyzer(m) + + val trim = "INVOKEVIRTUAL java/lang/String.trim" + val toSt = "INVOKEVIRTUAL java/lang/Object.toString" + val end = s"+$toSt" + for ((insn, index, nullness) <- List( + (trim, 0, NotNull), // this + (trim, 1, Unknown), // parameter o + (trim, 2, Unknown), // a + (trim, 3, Null), // b + (trim, 4, Null), // c + (trim, 5, Unknown), // d + + (toSt, 2, Unknown), // a, still the same + (toSt, 3, Unknown), // b, was re-assinged in both branches to Unknown + (toSt, 4, Unknown), // c, was re-assigned in one branch to Unknown + (toSt, 5, Null), // d, was assigned to null in both branches + + (end, 2, NotNull), // a, NotNull (alias of b) + (end, 3, NotNull), // b, receiver of toString + (end, 4, Unknown), // c, no change (not an alias of b) + (end, 5, Null) // d, no change + )) testNullness(a, m, insn, index, nullness) + } +} diff --git a/versions.properties b/versions.properties index 406690861e..a7ec8caedc 100644 --- a/versions.properties +++ b/versions.properties @@ -33,7 +33,7 @@ scala-swing.version.number=1.0.2 akka-actor.version.number=2.3.10 actors-migration.version.number=1.1.0 jline.version=2.12.1 -scala-asm.version=5.0.3-scala-3 +scala-asm.version=5.0.4-scala-1 # external modules, used internally (not shipped) partest.version.number=1.0.7 -- cgit v1.2.3 From 4f1ac5ee06a1e4218bc517a4328d5460b6da1815 Mon Sep 17 00:00:00 2001 From: kenji yoshida <6b656e6a69@gmail.com> Date: Mon, 25 May 2015 13:25:07 +0900 Subject: fix typo --- spec/09-top-level-definitions.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/spec/09-top-level-definitions.md b/spec/09-top-level-definitions.md index e3185d8b7d..b8a8dc7e0a 100644 --- a/spec/09-top-level-definitions.md +++ b/spec/09-top-level-definitions.md @@ -151,7 +151,7 @@ contain a class `B`, a compiler-time error would result. A _program_ is a top-level object that has a member method _main_ of type `(Array[String])Unit`. Programs can be -executed from a command shell. The program's command arguments are are +executed from a command shell. The program's command arguments are passed to the `main` method as a parameter of type `Array[String]`. -- cgit v1.2.3 From f4381866a8560ed65ce411c2f28ffd9b4df945e2 Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Fri, 22 May 2015 17:34:06 +0200 Subject: Enable nullness analysis in the inliner When inlining an instance call, the inliner has to ensure that a NPE is still thrown if the receiver object is null. By using the nullness analysis, we can avoid emitting this code in case the receiver object is known to be not-null. --- .../tools/nsc/backend/jvm/opt/CallGraph.scala | 26 +++++++++++++++------- .../scala/tools/nsc/backend/jvm/opt/Inliner.scala | 10 +++++---- test/files/neg/inlineMaxSize.check | 4 ++-- test/files/neg/inlineMaxSize.scala | 2 +- .../tools/nsc/backend/jvm/opt/InlinerTest.scala | 16 +++++++++++++ 5 files changed, 43 insertions(+), 15 deletions(-) diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/CallGraph.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/CallGraph.scala index 028f0f8fa6..c6df86b297 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/opt/CallGraph.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/CallGraph.scala @@ -8,12 +8,14 @@ package backend.jvm package opt import scala.reflect.internal.util.{NoPosition, Position} +import scala.tools.asm.{Opcodes, Type} import scala.tools.asm.tree._ import scala.collection.convert.decorateAsScala._ -import scala.tools.nsc.backend.jvm.BTypes.{MethodInlineInfo, InternalName} +import scala.tools.nsc.backend.jvm.BTypes.InternalName import scala.tools.nsc.backend.jvm.BackendReporting._ -import scala.tools.nsc.backend.jvm.opt.BytecodeUtils.AsmAnalyzer +import scala.tools.nsc.backend.jvm.analysis.{NotNull, NullnessAnalyzer} import ByteCodeRepository.{Source, CompilationUnit} +import BytecodeUtils._ class CallGraph[BT <: BTypes](val btypes: BT) { import btypes._ @@ -93,12 +95,13 @@ class CallGraph[BT <: BTypes](val btypes: BT) { // TODO: run dataflow analyses to make the call graph more precise // - producers to get forwarded parameters (ForwardedParam) // - typeAnalysis for more precise argument types, more precise callee - // - nullAnalysis to skip emitting the receiver-null-check when inlining - // TODO: for now we run a basic analyzer to get the stack height at the call site. - // once we run a more elaborate analyzer (types, nullness), we can get the stack height out of there. + // For now we run a NullnessAnalyzer. It is used to determine if the receiver of an instance + // call is known to be not-null, in which case we don't have to emit a null check when inlining. + // It is also used to get the stack height at the call site. localOpt.minimalRemoveUnreachableCode(methodNode, definingClass.internalName) - val analyzer = new AsmAnalyzer(methodNode, definingClass.internalName) + val analyzer = new NullnessAnalyzer + analyzer.analyze(definingClass.internalName, methodNode) methodNode.instructions.iterator.asScala.collect({ case call: MethodInsnNode => @@ -126,13 +129,20 @@ class CallGraph[BT <: BTypes](val btypes: BT) { Nil } + val receiverNotNull = call.getOpcode == Opcodes.INVOKESTATIC || { + val numArgs = Type.getArgumentTypes(call.desc).length + val frame = analyzer.frameAt(call, methodNode) + frame.getStack(frame.getStackSize - 1 - numArgs).nullness == NotNull + } + Callsite( callsiteInstruction = call, callsiteMethod = methodNode, callsiteClass = definingClass, callee = callee, argInfos = argInfos, - callsiteStackHeight = analyzer.frameAt(call).getStackSize, + callsiteStackHeight = analyzer.frameAt(call, methodNode).getStackSize, + receiverKnownNotNull = receiverNotNull, callsitePosition = callsitePositions.getOrElse(call, NoPosition) ) }).toList @@ -154,7 +164,7 @@ class CallGraph[BT <: BTypes](val btypes: BT) { */ final case class Callsite(callsiteInstruction: MethodInsnNode, callsiteMethod: MethodNode, callsiteClass: ClassBType, callee: Either[OptimizerWarning, Callee], argInfos: List[ArgInfo], - callsiteStackHeight: Int, callsitePosition: Position) { + callsiteStackHeight: Int, receiverKnownNotNull: Boolean, callsitePosition: Position) { override def toString = "Invocation of" + s" ${callee.map(_.calleeDeclarationClass.internalName).getOrElse("?")}.${callsiteInstruction.name + callsiteInstruction.desc}" + diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/Inliner.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/Inliner.scala index 3aca15da69..814c78b69c 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/opt/Inliner.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/Inliner.scala @@ -49,7 +49,7 @@ class Inliner[BT <: BTypes](val btypes: BT) { if (callGraph.callsites contains request.callsiteInstruction) { val r = inline(request.callsiteInstruction, request.callsiteStackHeight, request.callsiteMethod, request.callsiteClass, callee.callee, callee.calleeDeclarationClass, - receiverKnownNotNull = false, keepLineNumbers = false) + request.receiverKnownNotNull, keepLineNumbers = false) for (warning <- r) { if ((callee.annotatedInline && btypes.compilerSettings.YoptWarningEmitAtInlineFailed) || warning.emitWarning(compilerSettings)) { @@ -89,7 +89,7 @@ class Inliner[BT <: BTypes](val btypes: BT) { */ def selectCallsitesForInlining: List[Callsite] = { callsites.valuesIterator.filter({ - case callsite @ Callsite(_, _, _, Right(Callee(callee, calleeDeclClass, safeToInline, _, annotatedInline, _, warning)), _, _, pos) => + case callsite @ Callsite(_, _, _, Right(Callee(callee, calleeDeclClass, safeToInline, _, annotatedInline, _, warning)), _, _, _, pos) => val res = doInlineCallsite(callsite) if (!res) { @@ -112,7 +112,7 @@ class Inliner[BT <: BTypes](val btypes: BT) { res - case Callsite(ins, _, _, Left(warning), _, _, pos) => + case Callsite(ins, _, _, Left(warning), _, _, _, pos) => if (warning.emitWarning(compilerSettings)) backendReporting.inlinerWarning(pos, s"failed to determine if ${ins.name} should be inlined:\n$warning") false @@ -123,7 +123,7 @@ class Inliner[BT <: BTypes](val btypes: BT) { * The current inlining heuristics are simple: inline calls to methods annotated @inline. */ def doInlineCallsite(callsite: Callsite): Boolean = callsite match { - case Callsite(_, _, _, Right(Callee(callee, calleeDeclClass, safeToInline, _, annotatedInline, _, warning)), _, _, pos) => + case Callsite(_, _, _, Right(Callee(callee, calleeDeclClass, safeToInline, _, annotatedInline, _, warning)), _, _, _, pos) => if (compilerSettings.YoptInlineHeuristics.value == "everything") safeToInline else annotatedInline && safeToInline @@ -215,6 +215,7 @@ class Inliner[BT <: BTypes](val btypes: BT) { calleeInfoWarning = infoWarning)), argInfos = Nil, callsiteStackHeight = callsite.callsiteStackHeight, + receiverKnownNotNull = callsite.receiverKnownNotNull, callsitePosition = callsite.callsitePosition ) callGraph.callsites(newCallsiteInstruction) = staticCallsite @@ -444,6 +445,7 @@ class Inliner[BT <: BTypes](val btypes: BT) { callee = originalCallsite.callee, argInfos = Nil, // TODO: re-compute argInfos for new destination (once we actually compute them) callsiteStackHeight = callsiteStackHeight + originalCallsite.callsiteStackHeight, + receiverKnownNotNull = originalCallsite.receiverKnownNotNull, callsitePosition = originalCallsite.callsitePosition ) diff --git a/test/files/neg/inlineMaxSize.check b/test/files/neg/inlineMaxSize.check index d218a8b6e2..9d790e154c 100644 --- a/test/files/neg/inlineMaxSize.check +++ b/test/files/neg/inlineMaxSize.check @@ -2,8 +2,8 @@ inlineMaxSize.scala:7: warning: C::i()I is annotated @inline but could not be in The size of the callsite method C::j()I would exceed the JVM method size limit after inlining C::i()I. - @inline final def j = i + i - ^ + @inline final def j = i + i + i + ^ error: No warnings can be incurred under -Xfatal-warnings. one warning found one error found diff --git a/test/files/neg/inlineMaxSize.scala b/test/files/neg/inlineMaxSize.scala index 16dc0d9538..9d2db1a357 100644 --- a/test/files/neg/inlineMaxSize.scala +++ b/test/files/neg/inlineMaxSize.scala @@ -4,5 +4,5 @@ class C { @inline final def g = f + f + f + f + f + f + f + f + f + f @inline final def h = g + g + g + g + g + g + g + g + g + g @inline final def i = h + h + h + h + h + h + h + h + h + h - @inline final def j = i + i + @inline final def j = i + i + i } diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerTest.scala index 0fc3601603..b8c5f85c49 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerTest.scala @@ -975,4 +975,20 @@ class InlinerTest extends ClearAfterClass { val List(c) = compile(code) assertInvoke(getSingleMethod(c, "t"), "java/lang/Error", "") } + + @Test + def noRedunantNullChecks(): Unit = { + val code = + """class C { + | @inline final def f: String = "hai!" + | def t(c: C) = {c.f; c.f} // null check on the first, but not the second + |} + """.stripMargin + + val List(c) = compile(code) + val t = getSingleMethod(c, "t").instructions + assertNoInvoke(t) + assert(2 == t.collect({case Ldc(_, "hai!") => }).size) // twice the body of f + assert(1 == t.collect({case Jump(IFNONNULL, _) => }).size) // one single null check + } } -- cgit v1.2.3 From 6372c71c7c6b3661b51c8dca85d7a241f0fa4813 Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Sat, 23 May 2015 10:35:52 +0200 Subject: Fix wrong indexing in FrameExtensions.peekStack --- src/compiler/scala/tools/nsc/backend/jvm/opt/BytecodeUtils.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/BytecodeUtils.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/BytecodeUtils.scala index 314105da44..911bf3d189 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/opt/BytecodeUtils.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/BytecodeUtils.scala @@ -341,7 +341,7 @@ object BytecodeUtils { /** * The value `n` positions down the stack. */ - def peekStack(n: Int): V = frame.getStack(frame.getMaxStackSize - 1 - n) + def peekStack(n: Int): V = frame.getStack(frame.getStackSize - 1 - n) /** * The index of the current stack top. -- cgit v1.2.3 From 53a274e3f1258bd7d26a72d4394108b2f4d04579 Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Sun, 24 May 2015 09:04:52 +0200 Subject: Fix wrong result in InstructionStackEffect for ATHROW, RETURN --- .../tools/nsc/backend/jvm/analysis/InstructionStackEffect.scala | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/compiler/scala/tools/nsc/backend/jvm/analysis/InstructionStackEffect.scala b/src/compiler/scala/tools/nsc/backend/jvm/analysis/InstructionStackEffect.scala index 3d6c53765e..56c8c2e4e3 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/analysis/InstructionStackEffect.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/analysis/InstructionStackEffect.scala @@ -196,9 +196,9 @@ object InstructionStackEffect { LRETURN | FRETURN | DRETURN | - ARETURN => (frame.getStackSize, 0) + ARETURN => (1, 0) // Frame.execute consumes one stack value - case RETURN => (frame.getStackSize, 0) + case RETURN => (0, 0) // Frame.execute does not change the stack case GETSTATIC => (0, 1) @@ -229,7 +229,7 @@ object InstructionStackEffect { ANEWARRAY | ARRAYLENGTH => (1, 1) - case ATHROW => (frame.getStackSize, 0) + case ATHROW => (1, 0) // Frame.execute consumes one stack value case CHECKCAST => (0, 0) -- cgit v1.2.3 From 460e10cdb2fdfb9becaed5590ec77c7d5324a4db Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Sun, 24 May 2015 14:05:24 +0200 Subject: Address review feedback Address feedback in #4516 / 57b8da4cd8. Save allocations of NullnessValue - there's only 4 possible instances. Also save tuple allocations in InstructionStackEffect. --- .../nsc/backend/jvm/analysis/AliasingFrame.scala | 8 +- .../jvm/analysis/InstructionStackEffect.scala | 104 ++++++++++++--------- .../backend/jvm/analysis/NullnessAnalyzer.scala | 48 +++++++--- .../jvm/analysis/NullnessAnalyzerTest.scala | 18 ++-- 4 files changed, 109 insertions(+), 69 deletions(-) diff --git a/src/compiler/scala/tools/nsc/backend/jvm/analysis/AliasingFrame.scala b/src/compiler/scala/tools/nsc/backend/jvm/analysis/AliasingFrame.scala index 9494553ce1..7bbe1e2a49 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/analysis/AliasingFrame.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/analysis/AliasingFrame.scala @@ -38,7 +38,7 @@ class AliasingFrame[V <: Value](nLocals: Int, nStack: Int) extends Frame[V](nLoc /** * Returns the indices of the values array which are aliases of the object `id`. */ - def valuesWithAliasId(id: Long): Set[Int] = immutable.BitSet.empty ++ aliasIds.indices.filter(i => aliasId(i) == id) + def valuesWithAliasId(id: Long): Set[Int] = immutable.BitSet.empty ++ aliasIds.indices.iterator.filter(i => aliasId(i) == id) /** * The set of aliased values for a given entry in the `values` array. @@ -71,7 +71,11 @@ class AliasingFrame[V <: Value](nLocals: Int, nStack: Int) extends Frame[V](nLoc def stackTop: Int = this.stackTop def peekStack(n: Int): V = this.peekStack(n) - val (consumed, produced) = InstructionStackEffect(insn, this) // needs to be called before super.execute, see its doc + // the val pattern `val (p, c) = f` still allocates a tuple (https://github.com/scala-opt/scala/issues/28) + val prodCons = InstructionStackEffect(insn, this) // needs to be called before super.execute, see its doc + val consumed = prodCons._1 + val produced = prodCons._2 + super.execute(insn, interpreter) (insn.getOpcode: @switch) match { diff --git a/src/compiler/scala/tools/nsc/backend/jvm/analysis/InstructionStackEffect.scala b/src/compiler/scala/tools/nsc/backend/jvm/analysis/InstructionStackEffect.scala index 56c8c2e4e3..a7d6f74557 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/analysis/InstructionStackEffect.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/analysis/InstructionStackEffect.scala @@ -8,8 +8,26 @@ import scala.tools.asm.Type import scala.tools.asm.tree.{MultiANewArrayInsnNode, InvokeDynamicInsnNode, MethodInsnNode, AbstractInsnNode} import scala.tools.asm.tree.analysis.{Frame, Value} import opt.BytecodeUtils._ +import collection.immutable object InstructionStackEffect { + private var cache: immutable.IntMap[(Int, Int)] = immutable.IntMap.empty + private def t(x: Int, y: Int): (Int, Int) = { + // x can go up to 255 (number of parameters of a method, dimensions in multianewarray) we cache + // x up to 10, which covers most cases and limits the cache. y doesn't go above 6 (see cases). + if (x > 10 || y > 6) (x, y) + else { + val key = (x << 8) + y // this would work for any x < 256 + if (cache contains key) { + cache(key) + } else { + val r = (x, y) + cache += key -> r + r + } + } + } + /** * Returns a pair with the number of stack values consumed and produced by `insn`. * This method requires the `frame` to be in the state **before** executing / interpreting @@ -20,7 +38,7 @@ object InstructionStackEffect { (insn.getOpcode: @switch) match { // The order of opcodes is the same as in Frame.execute. - case NOP => (0, 0) + case NOP => t(0, 0) case ACONST_NULL | ICONST_M1 | @@ -44,7 +62,7 @@ object InstructionStackEffect { LLOAD | FLOAD | DLOAD | - ALOAD => (0, 1) + ALOAD => t(0, 1) case IALOAD | LALOAD | @@ -53,13 +71,13 @@ object InstructionStackEffect { AALOAD | BALOAD | CALOAD | - SALOAD => (2, 1) + SALOAD => t(2, 1) case ISTORE | LSTORE | FSTORE | DSTORE | - ASTORE => (1, 0) + ASTORE => t(1, 0) case IASTORE | LASTORE | @@ -68,41 +86,41 @@ object InstructionStackEffect { AASTORE | BASTORE | CASTORE | - SASTORE => (3, 0) + SASTORE => t(3, 0) - case POP => (1, 0) + case POP => t(1, 0) case POP2 => val isSize2 = peekStack(0).getSize == 2 - if (isSize2) (1, 0) else (2, 0) + if (isSize2) t(1, 0) else t(2, 0) - case DUP => (0, 1) + case DUP => t(0, 1) - case DUP_X1 => (2, 3) + case DUP_X1 => t(2, 3) case DUP_X2 => val isSize2 = peekStack(1).getSize == 2 - if (isSize2) (2, 3) else (3, 4) + if (isSize2) t(2, 3) else t(3, 4) case DUP2 => val isSize2 = peekStack(0).getSize == 2 - if (isSize2) (0, 1) else (0, 2) + if (isSize2) t(0, 1) else t(0, 2) case DUP2_X1 => val isSize2 = peekStack(0).getSize == 2 - if (isSize2) (2, 3) else (3, 4) + if (isSize2) t(2, 3) else t(3, 4) case DUP2_X2 => val v1isSize2 = peekStack(0).getSize == 2 if (v1isSize2) { val v2isSize2 = peekStack(1).getSize == 2 - if (v2isSize2) (2, 3) else (3, 4) + if (v2isSize2) t(2, 3) else t(3, 4) } else { val v3isSize2 = peekStack(2).getSize == 2 - if (v3isSize2) (3, 5) else (4, 6) + if (v3isSize2) t(3, 5) else t(4, 6) } - case SWAP => (2, 2) + case SWAP => t(2, 2) case IADD | LADD | @@ -123,12 +141,12 @@ object InstructionStackEffect { IREM | LREM | FREM | - DREM => (2, 1) + DREM => t(2, 1) case INEG | LNEG | FNEG | - DNEG => (1, 1) + DNEG => t(1, 1) case ISHL | LSHL | @@ -141,9 +159,9 @@ object InstructionStackEffect { IOR | LOR | IXOR | - LXOR => (2, 1) + LXOR => t(2, 1) - case IINC => (0, 0) + case IINC => t(0, 0) case I2L | I2F | @@ -159,20 +177,20 @@ object InstructionStackEffect { D2F | I2B | I2C | - I2S => (1, 1) + I2S => t(1, 1) case LCMP | FCMPL | FCMPG | DCMPL | - DCMPG => (2, 1) + DCMPG => t(2, 1) case IFEQ | IFNE | IFLT | IFGE | IFGT | - IFLE => (1, 0) + IFLE => t(1, 0) case IF_ICMPEQ | IF_ICMPNE | @@ -181,32 +199,32 @@ object InstructionStackEffect { IF_ICMPGT | IF_ICMPLE | IF_ACMPEQ | - IF_ACMPNE => (2, 0) + IF_ACMPNE => t(2, 0) - case GOTO => (0, 0) + case GOTO => t(0, 0) - case JSR => (0, 1) + case JSR => t(0, 1) - case RET => (0, 0) + case RET => t(0, 0) case TABLESWITCH | - LOOKUPSWITCH => (1, 0) + LOOKUPSWITCH => t(1, 0) case IRETURN | LRETURN | FRETURN | DRETURN | - ARETURN => (1, 0) // Frame.execute consumes one stack value + ARETURN => t(1, 0) // Frame.execute consumes one stack value - case RETURN => (0, 0) // Frame.execute does not change the stack + case RETURN => t(0, 0) // Frame.execute does not change the stack - case GETSTATIC => (0, 1) + case GETSTATIC => t(0, 1) - case PUTSTATIC => (1, 0) + case PUTSTATIC => t(1, 0) - case GETFIELD => (1, 1) + case GETFIELD => t(1, 1) - case PUTFIELD => (2, 0) + case PUTFIELD => t(2, 0) case INVOKEVIRTUAL | INVOKESPECIAL | @@ -215,33 +233,33 @@ object InstructionStackEffect { val desc = insn.asInstanceOf[MethodInsnNode].desc val cons = Type.getArgumentTypes(desc).length + (if (insn.getOpcode == INVOKESTATIC) 0 else 1) val prod = if (Type.getReturnType(desc) == Type.VOID_TYPE) 0 else 1 - (cons, prod) + t(cons, prod) case INVOKEDYNAMIC => val desc = insn.asInstanceOf[InvokeDynamicInsnNode].desc val cons = Type.getArgumentTypes(desc).length val prod = if (Type.getReturnType(desc) == Type.VOID_TYPE) 0 else 1 - (cons, prod) + t(cons, prod) - case NEW => (0, 1) + case NEW => t(0, 1) case NEWARRAY | ANEWARRAY | - ARRAYLENGTH => (1, 1) + ARRAYLENGTH => t(1, 1) - case ATHROW => (1, 0) // Frame.execute consumes one stack value + case ATHROW => t(1, 0) // Frame.execute consumes one stack value - case CHECKCAST => (0, 0) + case CHECKCAST => t(0, 0) - case INSTANCEOF => (1, 1) + case INSTANCEOF => t(1, 1) case MONITORENTER | - MONITOREXIT => (1, 0) + MONITOREXIT => t(1, 0) - case MULTIANEWARRAY => (insn.asInstanceOf[MultiANewArrayInsnNode].dims, 1) + case MULTIANEWARRAY => t(insn.asInstanceOf[MultiANewArrayInsnNode].dims, 1) case IFNULL | - IFNONNULL => (1, 0) + IFNONNULL => t(1, 0) } } diff --git a/src/compiler/scala/tools/nsc/backend/jvm/analysis/NullnessAnalyzer.scala b/src/compiler/scala/tools/nsc/backend/jvm/analysis/NullnessAnalyzer.scala index 18c17bc992..4c81b85d0a 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/analysis/NullnessAnalyzer.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/analysis/NullnessAnalyzer.scala @@ -100,25 +100,43 @@ case object Null extends Nullness * Represents the nullness state for a local variable or stack value. * * Note that nullness of primitive values is not tracked, it will be always [[Unknown]]. - * - * @param nullness The nullness of this value. - * @param longOrDouble True if this value is a long or double. The Analyzer framework needs to know - * the size of each value when interpreting instructions, see `Frame.execute`. */ -final case class NullnessValue(nullness: Nullness, longOrDouble: Boolean) extends Value { - def this(nullness: Nullness, insn: AbstractInsnNode) = this(nullness, longOrDouble = BytecodeUtils.instructionResultSize(insn) == 2) +sealed trait NullnessValue extends Value { + /** + * The nullness of this value. + */ + def nullness: Nullness + /** + * True if this value is a long or double. The Analyzer framework needs to know + * the size of each value when interpreting instructions, see `Frame.execute`. + */ + def isSize2: Boolean /** * The size of the slot described by this value. Cannot be 0 because no values are allocated * for void-typed slots, see NullnessInterpreter.newValue. **/ - def getSize: Int = if (longOrDouble) 2 else 1 + def getSize: Int = if (isSize2) 2 else 1 - def merge(other: NullnessValue) = NullnessValue(nullness merge other.nullness, longOrDouble) + def merge(other: NullnessValue) = NullnessValue(nullness merge other.nullness, isSize2) } +object NullValue extends NullnessValue { def nullness = Null; def isSize2 = false; override def toString = "Null" } +object UnknownValue1 extends NullnessValue { def nullness = Unknown; def isSize2 = false; override def toString = "Unknown1" } +object UnknownValue2 extends NullnessValue { def nullness = Unknown; def isSize2 = true; override def toString = "Unknown2" } +object NotNullValue extends NullnessValue { def nullness = NotNull; def isSize2 = false; override def toString = "NotNull" } + object NullnessValue { - def apply(nullness: Nullness, insn: AbstractInsnNode) = new NullnessValue(nullness, insn) + def apply(nullness: Nullness, isSize2: Boolean): NullnessValue = { + if (nullness == Null) NullValue + else if (nullness == NotNull) NotNullValue + else if (isSize2) UnknownValue2 + else UnknownValue1 + } + + def apply(nullness: Nullness, insn: AbstractInsnNode): NullnessValue = { + apply(nullness, isSize2 = BytecodeUtils.instructionResultSize(insn) == 2) + } } final class NullnessInterpreter extends Interpreter[NullnessValue](Opcodes.ASM5) { @@ -133,12 +151,12 @@ final class NullnessInterpreter extends Interpreter[NullnessValue](Opcodes.ASM5) // (2) `tp` may also be `null`. When creating the initial frame, the analyzer invokes // `newValue(null)` for each local variable. We have to return a value of size 1. if (tp == Type.VOID_TYPE) null // (1) - else NullnessValue(Unknown, longOrDouble = tp != null /*(2)*/ && tp.getSize == 2 ) + else NullnessValue(Unknown, isSize2 = tp != null /*(2)*/ && tp.getSize == 2 ) } override def newParameterValue(isInstanceMethod: Boolean, local: Int, tp: Type): NullnessValue = { // For instance methods, the `this` parameter is known to be not null. - if (isInstanceMethod && local == 0) NullnessValue(NotNull, longOrDouble = false) + if (isInstanceMethod && local == 0) NullnessValue(NotNull, isSize2 = false) else super.newParameterValue(isInstanceMethod, local, tp) } @@ -162,7 +180,7 @@ final class NullnessInterpreter extends Interpreter[NullnessValue](Opcodes.ASM5) def unaryOperation(insn: AbstractInsnNode, value: NullnessValue): NullnessValue = (insn.getOpcode: @switch) match { case Opcodes.NEWARRAY | - Opcodes.ANEWARRAY => NullnessValue(NotNull, longOrDouble = false) + Opcodes.ANEWARRAY => NullnessValue(NotNull, isSize2 = false) case _ => NullnessValue(Unknown, insn) } @@ -172,12 +190,12 @@ final class NullnessInterpreter extends Interpreter[NullnessValue](Opcodes.ASM5) } def ternaryOperation(insn: AbstractInsnNode, value1: NullnessValue, value2: NullnessValue, value3: NullnessValue): NullnessValue = { - NullnessValue(Unknown, longOrDouble = false) + NullnessValue(Unknown, isSize2 = false) } def naryOperation(insn: AbstractInsnNode, values: util.List[_ <: NullnessValue]): NullnessValue = (insn.getOpcode: @switch) match { case Opcodes.MULTIANEWARRAY => - NullnessValue(NotNull, longOrDouble = false) + NullnessValue(NotNull, isSize2 = false) case _ => // TODO: use a list of methods that are known to return non-null values @@ -247,7 +265,7 @@ class NullnessFrame(nLocals: Int, nStack: Int) extends AliasingFrame[NullnessVal if (nullCheckedAliasId != -1) { for (i <- valuesWithAliasId(nullCheckedAliasId)) - this.setValue(i, this.getValue(i).copy(nullness = NotNull)) + this.setValue(i, NotNullValue) } } } diff --git a/test/junit/scala/tools/nsc/backend/jvm/analysis/NullnessAnalyzerTest.scala b/test/junit/scala/tools/nsc/backend/jvm/analysis/NullnessAnalyzerTest.scala index 92574329db..3d5343e395 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/analysis/NullnessAnalyzerTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/analysis/NullnessAnalyzerTest.scala @@ -63,9 +63,9 @@ class NullnessAnalyzerTest extends ClearAfterClass { val f = analyzer.frameAt(i, method) val frameString = { if (f == null) "null" - else f.toString.split("NullnessValue").iterator - .map(_.trim).filter(_.nonEmpty) - .map(s => "%7s".format(s.replaceAll("""\((.*),false\)""", "$1"))) + else (0 until (f.getLocals + f.getStackSize)).iterator + .map(f.getValue(_).toString) + .map(s => "%8s".format(s)) .zipWithIndex.map({case (s, i) => s"$i: $s"}) .mkString(", ") } @@ -82,13 +82,13 @@ class NullnessAnalyzerTest extends ClearAfterClass { // So in the frame for `ALOAD 0`, the stack is still empty. val res = - """ L0: 0: NotNull - | LINENUMBER 1 L0: 0: NotNull - | ALOAD 0: 0: NotNull - |INVOKEVIRTUAL java/lang/Object.toString ()Ljava/lang/String;: 0: NotNull, 1: NotNull - | ARETURN: 0: NotNull, 1: Unknown + """ L0: 0: NotNull + | LINENUMBER 1 L0: 0: NotNull + | ALOAD 0: 0: NotNull + |INVOKEVIRTUAL java/lang/Object.toString ()Ljava/lang/String;: 0: NotNull, 1: NotNull + | ARETURN: 0: NotNull, 1: Unknown1 | L0: null""".stripMargin - assertTrue(showAllNullnessFrames(newNullnessAnalyzer(m), m) == res) + assertEquals(showAllNullnessFrames(newNullnessAnalyzer(m), m), res) } @Test -- cgit v1.2.3 From a3bb887e0200cf47a1fa2382a18948b3c553cf26 Mon Sep 17 00:00:00 2001 From: Prashant Sharma Date: Wed, 6 May 2015 16:26:03 +0530 Subject: SI-7747 Make REPL wrappers serialization friendly Spark has been shipping a forked version of our REPL for sometime. We have been trying to fold the patches back into the mainline so they can defork. This is the last outstanding issue. Consider this REPL session: ``` scala> val x = StdIn.readInt scala> class A(a: Int) scala> serializedAndExecuteRemotely { () => new A(x) } ``` As shown by the enclosed test, the REPL, even with the Spark friendly option `-Yrepl-class-based`, will re-initialize `x` on the remote system. This test simulates this by running a REPL session, and then deserializing the resulting closure into a fresh classloader based on the class files generated by that session. Before this patch, it printed "evaluating x" twice. This is based on the Spark change described: https://github.com/mesos/spark/pull/535#discussion_r3541925 A followup commit will avoid the `val lineN$read = ` part if we import classes or type aliases only. [Original commit from Prashant Sharma, test case from Jason Zaugg] --- src/repl/scala/tools/nsc/interpreter/IMain.scala | 48 +++++++++++----- src/repl/scala/tools/nsc/interpreter/Imports.scala | 19 ++++++- test/files/run/repl-serialization.check | 21 +++++++ test/files/run/repl-serialization.scala | 65 ++++++++++++++++++++++ test/files/run/t7747-repl.check | 32 ++++++++--- test/files/run/t7747-repl.scala | 2 + 6 files changed, 162 insertions(+), 25 deletions(-) create mode 100644 test/files/run/repl-serialization.check create mode 100644 test/files/run/repl-serialization.scala diff --git a/src/repl/scala/tools/nsc/interpreter/IMain.scala b/src/repl/scala/tools/nsc/interpreter/IMain.scala index c281126d5f..e355d9f864 100644 --- a/src/repl/scala/tools/nsc/interpreter/IMain.scala +++ b/src/repl/scala/tools/nsc/interpreter/IMain.scala @@ -69,6 +69,8 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set // Used in a test case. def showDirectory() = replOutput.show(out) + lazy val isClassBased: Boolean = settings.Yreplclassbased.value + private[nsc] var printResults = true // whether to print result lines private[nsc] var totalSilence = false // whether to print anything private var _initializeComplete = false // compiler is initialized @@ -310,8 +312,14 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set } def originalPath(name: String): String = originalPath(TermName(name)) - def originalPath(name: Name): String = typerOp path name - def originalPath(sym: Symbol): String = typerOp path sym + def originalPath(name: Name): String = translateOriginalPath(typerOp path name) + def originalPath(sym: Symbol): String = translateOriginalPath(typerOp path sym) + /** For class based repl mode we use an .INSTANCE accessor. */ + val readInstanceName = if(isClassBased) ".INSTANCE" else "" + def translateOriginalPath(p: String): String = { + val readName = java.util.regex.Matcher.quoteReplacement(sessionNames.read) + p.replaceFirst(readName, readName + readInstanceName) + } def flatPath(sym: Symbol): String = flatOp shift sym.javaClassName def translatePath(path: String) = { @@ -758,11 +766,13 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set // object and we can do that much less wrapping. def packageDecl = "package " + packageName + def pathToInstance(name: String) = packageName + "." + name + readInstanceName def pathTo(name: String) = packageName + "." + name def packaged(code: String) = packageDecl + "\n\n" + code - def readPath = pathTo(readName) - def evalPath = pathTo(evalName) + def readPathInstance = pathToInstance(readName) + def readPath = pathTo(readName) + def evalPath = pathTo(evalName) def call(name: String, args: Any*): AnyRef = { val m = evalMethod(name) @@ -802,7 +812,8 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set /** The innermost object inside the wrapper, found by * following accessPath into the outer one. */ - def resolvePathToSymbol(accessPath: String): Symbol = { + def resolvePathToSymbol(fullAccessPath: String): Symbol = { + val accessPath = fullAccessPath.stripPrefix(readPath) val readRoot = readRootPath(readPath) // the outermost wrapper (accessPath split '.').foldLeft(readRoot: Symbol) { case (sym, "") => sym @@ -849,7 +860,6 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set def defines = defHandlers flatMap (_.definedSymbols) def imports = importedSymbols def value = Some(handlers.last) filter (h => h.definesValue) map (h => definedSymbols(h.definesTerm.get)) getOrElse NoSymbol - val lineRep = new ReadEvalPrint() private var _originalLine: String = null @@ -858,6 +868,11 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set /** handlers for each tree in this request */ val handlers: List[MemberHandler] = trees map (memberHandlers chooseHandler _) + val definesClass = handlers.exists { + case _: ClassHandler => true + case _ => false + } + def defHandlers = handlers collect { case x: MemberDefHandler => x } /** list of names used by this expression */ @@ -875,13 +890,13 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set * append to objectName to access anything bound by request. */ lazy val ComputedImports(importsPreamble, importsTrailer, accessPath) = - exitingTyper(importsCode(referencedNames.toSet, ObjectSourceCode)) + exitingTyper(importsCode(referencedNames.toSet, ObjectSourceCode, definesClass)) /** the line of code to compute */ def toCompute = line /** The path of the value that contains the user code. */ - def fullAccessPath = s"${lineRep.readPath}$accessPath" + def fullAccessPath = s"${lineRep.readPathInstance}$accessPath" /** The path of the given member of the wrapping instance. */ def fullPath(vname: String) = s"$fullAccessPath.`$vname`" @@ -911,7 +926,7 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set def postwrap: String } - private class ObjectBasedWrapper extends Wrapper { + class ObjectBasedWrapper extends Wrapper { def preambleHeader = "object %s {" def postamble = importsTrailer + "\n}" @@ -919,13 +934,16 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set def postwrap = "}\n" } - private class ClassBasedWrapper extends Wrapper { - def preambleHeader = "class %s extends Serializable {" + class ClassBasedWrapper extends Wrapper { + def preambleHeader = "class %s extends Serializable { " /** Adds an object that instantiates the outer wrapping class. */ - def postamble = s"""$importsTrailer + def postamble = s""" + |$importsTrailer + |} + |object ${lineRep.readName} { + | val INSTANCE = new ${lineRep.readName}(); |} - |object ${lineRep.readName} extends ${lineRep.readName} |""".stripMargin import nme.{ INTERPRETER_IMPORT_WRAPPER => iw } @@ -935,7 +953,7 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set } private lazy val ObjectSourceCode: Wrapper = - if (settings.Yreplclassbased) new ClassBasedWrapper else new ObjectBasedWrapper + if (isClassBased) new ClassBasedWrapper else new ObjectBasedWrapper private object ResultObjectSourceCode extends IMain.CodeAssembler[MemberHandler] { /** We only want to generate this code when the result @@ -994,7 +1012,7 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set } } - lazy val resultSymbol = lineRep.resolvePathToSymbol(accessPath) + lazy val resultSymbol = lineRep.resolvePathToSymbol(fullAccessPath) def applyToResultMember[T](name: Name, f: Symbol => T) = exitingTyper(f(resultSymbol.info.nonPrivateDecl(name))) /* typeOf lookup with encoding */ diff --git a/src/repl/scala/tools/nsc/interpreter/Imports.scala b/src/repl/scala/tools/nsc/interpreter/Imports.scala index 5244858a62..97798cd017 100644 --- a/src/repl/scala/tools/nsc/interpreter/Imports.scala +++ b/src/repl/scala/tools/nsc/interpreter/Imports.scala @@ -92,7 +92,7 @@ trait Imports { * last one imported is actually usable. */ case class ComputedImports(prepend: String, append: String, access: String) - protected def importsCode(wanted: Set[Name], wrapper: Request#Wrapper): ComputedImports = { + protected def importsCode(wanted: Set[Name], wrapper: Request#Wrapper, definesClass: Boolean): ComputedImports = { /** Narrow down the list of requests from which imports * should be taken. Removes requests which cannot contribute * useful imports for the specified set of wanted names. @@ -107,6 +107,8 @@ trait Imports { // Single symbol imports might be implicits! See bug #1752. Rather than // try to finesse this, we will mimic all imports for now. def keepHandler(handler: MemberHandler) = handler match { + /* While defining classes in class based mode - implicits are not needed. */ + case h: ImportHandler if isClassBased && definesClass => h.importedNames.exists(x => wanted.contains(x)) case _: ImportHandler => true case x => x.definesImplicit || (x.definedNames exists wanted) } @@ -146,7 +148,10 @@ trait Imports { // loop through previous requests, adding imports for each one wrapBeforeAndAfter { + // Reusing a single temporary value when import from a line with multiple definitions. + val tempValLines = mutable.Set[Int]() for (ReqAndHandler(req, handler) <- reqsToUse) { + val objName = req.lineRep.readPathInstance handler match { // If the user entered an import, then just use it; add an import wrapping // level if the import might conflict with some other import @@ -157,6 +162,18 @@ trait Imports { code append (x.member + "\n") currentImps ++= x.importedNames + case x if isClassBased => + for (imv <- x.definedNames) { + if (!currentImps.contains(imv)) { + val valName = req.lineRep.packageName + req.lineRep.readName + if (!tempValLines.contains(req.lineRep.lineId)) { + code.append(s"val $valName = $objName\n") + tempValLines += req.lineRep.lineId + } + code.append(s"import $valName ${req.accessPath}.`$imv`;\n") + currentImps += imv + } + } // For other requests, import each defined name. // import them explicitly instead of with _, so that // ambiguity errors will not be generated. Also, quote diff --git a/test/files/run/repl-serialization.check b/test/files/run/repl-serialization.check new file mode 100644 index 0000000000..c8439ea023 --- /dev/null +++ b/test/files/run/repl-serialization.check @@ -0,0 +1,21 @@ +== evaluating lines +extract: AnyRef => Unit = + evaluating x +x: Int = 0 +y: Int = + evaluating z + evaluating zz +defined class D +z: Int = 0 +zz: Int = 0 +defined object O +defined class A +defined type alias AA +== evaluating lambda + evaluating y + evaluating O + constructing A +== reconstituting into a fresh classloader + evaluating O +== evaluating reconstituted lambda + constructing A diff --git a/test/files/run/repl-serialization.scala b/test/files/run/repl-serialization.scala new file mode 100644 index 0000000000..64915ce51e --- /dev/null +++ b/test/files/run/repl-serialization.scala @@ -0,0 +1,65 @@ +import java.io._ + +import scala.reflect.io.AbstractFile +import scala.tools.nsc.Settings +import scala.tools.nsc.interpreter.IMain +import scala.tools.nsc.util._ +import scala.reflect.internal.util.AbstractFileClassLoader + +object Test { + def main(args: Array[String]) { + run() + } + + def run(): Unit = { + val settings = new Settings() + settings.Yreplclassbased.value = true + settings.usejavacp.value = true + + var imain: IMain = null + object extract extends ((AnyRef) => Unit) with Serializable { + var value: AnyRef = null + + def apply(a: AnyRef) = value = a + } + + val code = + """val x = {println(" evaluating x"); 0 } + |lazy val y = {println(" evaluating y"); 0 } + |class D; val z = {println(" evaluating z"); 0}; val zz = {println(" evaluating zz"); 0} + |object O extends Serializable { val apply = {println(" evaluating O"); 0} } + |class A(i: Int) { println(" constructing A") } + |type AA = A + |extract(() => new AA(x + y + z + zz + O.apply)) + """.stripMargin + + imain = new IMain(settings) + println("== evaluating lines") + imain.directBind("extract", "(AnyRef => Unit)", extract) + code.lines.foreach(imain.interpret) + + val virtualFile: AbstractFile = extract.value.getClass.getClassLoader.asInstanceOf[AbstractFileClassLoader].root + val newLoader = new AbstractFileClassLoader(virtualFile, getClass.getClassLoader) + + def deserializeInNewLoader(string: Array[Byte]): AnyRef = { + val bis = new ByteArrayInputStream(string) + val in = new ObjectInputStream(bis) { + override def resolveClass(desc: ObjectStreamClass) = Class.forName(desc.getName, false, newLoader) + } + in.readObject() + } + def serialize(o: AnyRef): Array[Byte] = { + val bos = new ByteArrayOutputStream() + val out = new ObjectOutputStream(bos) + out.writeObject(o) + out.close() + bos.toByteArray + } + println("== evaluating lambda") + extract.value.asInstanceOf[() => Any].apply() + println("== reconstituting into a fresh classloader") + val reconstituted = deserializeInNewLoader(serialize(extract.value)).asInstanceOf[() => Any] + println("== evaluating reconstituted lambda") + reconstituted.apply() // should not print("evaluating x") a second time + } +} diff --git a/test/files/run/t7747-repl.check b/test/files/run/t7747-repl.check index 105b238d01..6eb8c8cd99 100644 --- a/test/files/run/t7747-repl.check +++ b/test/files/run/t7747-repl.check @@ -112,7 +112,7 @@ scala> 55 ; ((2 + 2)) ; (1, 2, 3) res15: (Int, Int, Int) = (1,2,3) scala> 55 ; (x: Int) => x + 1 ; () => ((5)) -:8: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses +:9: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses 55 ; (x: Int) => x + 1 ;; ^ res16: () => Int = @@ -258,12 +258,15 @@ class $read extends Serializable { super.; () }; - import $line44.$read.$iw.$iw.BippyBups; - import $line44.$read.$iw.$iw.BippyBups; - import $line45.$read.$iw.$iw.PuppyPups; - import $line45.$read.$iw.$iw.PuppyPups; - import $line46.$read.$iw.$iw.Bingo; - import $line46.$read.$iw.$iw.Bingo; + val $line44$read = $line44.$read.INSTANCE; + import $line44$read.$iw.$iw.BippyBups; + import $line44$read.$iw.$iw.BippyBups; + val $line45$read = $line45.$read.INSTANCE; + import $line45$read.$iw.$iw.PuppyPups; + import $line45$read.$iw.$iw.PuppyPups; + val $line46$read = $line46.$read.INSTANCE; + import $line46$read.$iw.$iw.Bingo; + import $line46$read.$iw.$iw.Bingo; class $iw extends Serializable { def () = { super.; @@ -275,12 +278,23 @@ class $read extends Serializable { }; val $iw = new $iw. } -object $read extends $read { +object $read extends scala.AnyRef { def () = { super.; () - } + }; + val INSTANCE = new $read. } res3: List[Product with Serializable] = List(BippyBups(), PuppyPups(), Bingo()) +scala> :power +** Power User mode enabled - BEEP WHIR GYVE ** +** :phase has been set to 'typer'. ** +** scala.tools.nsc._ has been imported ** +** global._, definitions._ also imported ** +** Try :help, :vals, power. ** + +scala> intp.lastRequest +res4: $r.intp.Request = Request(line=def $ires3 = intp.global, 1 trees) + scala> :quit diff --git a/test/files/run/t7747-repl.scala b/test/files/run/t7747-repl.scala index 0e64210460..a681eb8851 100644 --- a/test/files/run/t7747-repl.scala +++ b/test/files/run/t7747-repl.scala @@ -65,5 +65,7 @@ object Test extends ReplTest { |case class PuppyPups() |case class Bingo() |List(BippyBups(), PuppyPups(), Bingo()) // show + |:power + |intp.lastRequest |""".stripMargin } -- cgit v1.2.3 From db114aa44efe63763a883dcf07d667fe7d744ff6 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Tue, 26 May 2015 16:42:01 +1000 Subject: SI-7747 Limit previous change to imports of REPL vals We only need to introduce the temporary val in the imports wrapper when we are importing a val or module defined in the REPL. The test case from the previous commit still passes, but we are generating slightly simpler code. Compared to 2.11.6, these two commits result in the following diff: https://gist.github.com/retronym/aa4bd3aeef1ab1b85fe9 --- src/repl/scala/tools/nsc/interpreter/Imports.scala | 15 ++++++++++----- test/files/run/t7747-repl.check | 15 ++++++--------- 2 files changed, 16 insertions(+), 14 deletions(-) diff --git a/src/repl/scala/tools/nsc/interpreter/Imports.scala b/src/repl/scala/tools/nsc/interpreter/Imports.scala index 97798cd017..c52869e208 100644 --- a/src/repl/scala/tools/nsc/interpreter/Imports.scala +++ b/src/repl/scala/tools/nsc/interpreter/Imports.scala @@ -165,12 +165,17 @@ trait Imports { case x if isClassBased => for (imv <- x.definedNames) { if (!currentImps.contains(imv)) { - val valName = req.lineRep.packageName + req.lineRep.readName - if (!tempValLines.contains(req.lineRep.lineId)) { - code.append(s"val $valName = $objName\n") - tempValLines += req.lineRep.lineId + x match { + case _: ValHandler | _: ModuleHandler => + val valName = req.lineRep.packageName + req.lineRep.readName + if (!tempValLines.contains(req.lineRep.lineId)) { + code.append(s"val $valName = $objName\n") + tempValLines += req.lineRep.lineId + } + code.append(s"import $valName ${req.accessPath}.`$imv`;\n") + case _ => + code.append("import " + objName + req.accessPath + ".`" + imv + "`\n") } - code.append(s"import $valName ${req.accessPath}.`$imv`;\n") currentImps += imv } } diff --git a/test/files/run/t7747-repl.check b/test/files/run/t7747-repl.check index 6eb8c8cd99..af47f23c62 100644 --- a/test/files/run/t7747-repl.check +++ b/test/files/run/t7747-repl.check @@ -258,15 +258,12 @@ class $read extends Serializable { super.; () }; - val $line44$read = $line44.$read.INSTANCE; - import $line44$read.$iw.$iw.BippyBups; - import $line44$read.$iw.$iw.BippyBups; - val $line45$read = $line45.$read.INSTANCE; - import $line45$read.$iw.$iw.PuppyPups; - import $line45$read.$iw.$iw.PuppyPups; - val $line46$read = $line46.$read.INSTANCE; - import $line46$read.$iw.$iw.Bingo; - import $line46$read.$iw.$iw.Bingo; + import $line44.$read.INSTANCE.$iw.$iw.BippyBups; + import $line44.$read.INSTANCE.$iw.$iw.BippyBups; + import $line45.$read.INSTANCE.$iw.$iw.PuppyPups; + import $line45.$read.INSTANCE.$iw.$iw.PuppyPups; + import $line46.$read.INSTANCE.$iw.$iw.Bingo; + import $line46.$read.INSTANCE.$iw.$iw.Bingo; class $iw extends Serializable { def () = { super.; -- cgit v1.2.3 From 1f5c3f8ba84a8d85502f7f54926b1e220c1b4129 Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Tue, 26 May 2015 14:33:38 +0200 Subject: Fix several tests under GenBCode - private-inline, t8601-closure-elim, inline-in-constructors - test closure inlining / elimination, which is not yet implemented in GenBCode. noted in https://github.com/scala-opt/scala/issues/14. - constant-optimization, t7006 - no constant folding in GenBCode yet. noted in https://github.com/scala-opt/scala/issues/29. - patmat_opt_ignore_underscore, patmat_opt_no_nullcheck, patmat_opt_primitive_typetest - not all optimizations in GenBCode yet. noted in https://github.com/scala-opt/scala/issues/30. - t3234 - tests a warning of trait inlining - trait inlining works in GenBCode - synchronized - ignore inliner warnings (they changed a bit) - t6102 - account for the changed outputo of -Ydebug has under GenBCode --- test/files/instrumented/inline-in-constructors.flags | 2 +- test/files/jvm/constant-optimization/Foo_1.flags | 2 +- test/files/jvm/patmat_opt_ignore_underscore.flags | 2 +- test/files/jvm/patmat_opt_no_nullcheck.flags | 2 +- test/files/jvm/patmat_opt_primitive_typetest.flags | 2 +- test/files/jvm/t7006/Foo_1.flags | 2 +- test/files/neg/t3234.flags | 2 +- test/files/run/private-inline.flags | 2 +- test/files/run/synchronized.check | 5 ----- test/files/run/synchronized.scala | 3 +++ test/files/run/t6102.check | 5 +++++ test/files/run/t8601-closure-elim.flags | 2 +- 12 files changed, 17 insertions(+), 14 deletions(-) diff --git a/test/files/instrumented/inline-in-constructors.flags b/test/files/instrumented/inline-in-constructors.flags index 068318e8ac..d1ebc4c940 100644 --- a/test/files/instrumented/inline-in-constructors.flags +++ b/test/files/instrumented/inline-in-constructors.flags @@ -1 +1 @@ --optimise -Ydelambdafy:inline +-optimise -Ydelambdafy:inline -Ybackend:GenASM diff --git a/test/files/jvm/constant-optimization/Foo_1.flags b/test/files/jvm/constant-optimization/Foo_1.flags index 86f52af447..67a1dbe8da 100644 --- a/test/files/jvm/constant-optimization/Foo_1.flags +++ b/test/files/jvm/constant-optimization/Foo_1.flags @@ -1 +1 @@ --Ynooptimise -Yconst-opt \ No newline at end of file +-Ynooptimise -Yconst-opt -Ybackend:GenASM \ No newline at end of file diff --git a/test/files/jvm/patmat_opt_ignore_underscore.flags b/test/files/jvm/patmat_opt_ignore_underscore.flags index 1182725e86..2cd4b38726 100644 --- a/test/files/jvm/patmat_opt_ignore_underscore.flags +++ b/test/files/jvm/patmat_opt_ignore_underscore.flags @@ -1 +1 @@ --optimize \ No newline at end of file +-optimize -Ybackend:GenASM \ No newline at end of file diff --git a/test/files/jvm/patmat_opt_no_nullcheck.flags b/test/files/jvm/patmat_opt_no_nullcheck.flags index 1182725e86..2cd4b38726 100644 --- a/test/files/jvm/patmat_opt_no_nullcheck.flags +++ b/test/files/jvm/patmat_opt_no_nullcheck.flags @@ -1 +1 @@ --optimize \ No newline at end of file +-optimize -Ybackend:GenASM \ No newline at end of file diff --git a/test/files/jvm/patmat_opt_primitive_typetest.flags b/test/files/jvm/patmat_opt_primitive_typetest.flags index 49d036a887..b9bb09167e 100644 --- a/test/files/jvm/patmat_opt_primitive_typetest.flags +++ b/test/files/jvm/patmat_opt_primitive_typetest.flags @@ -1 +1 @@ --optimize +-optimize -Ybackend:GenASM diff --git a/test/files/jvm/t7006/Foo_1.flags b/test/files/jvm/t7006/Foo_1.flags index 37b2116413..29a9d424f0 100644 --- a/test/files/jvm/t7006/Foo_1.flags +++ b/test/files/jvm/t7006/Foo_1.flags @@ -1 +1 @@ --optimise -Ydebug -Xfatal-warnings +-optimise -Ydebug -Xfatal-warnings -Ybackend:GenASM diff --git a/test/files/neg/t3234.flags b/test/files/neg/t3234.flags index cc3d9fb6f0..406231bd96 100644 --- a/test/files/neg/t3234.flags +++ b/test/files/neg/t3234.flags @@ -1 +1 @@ --Yinline -Yinline-warnings -Xfatal-warnings +-Yinline -Yinline-warnings -Xfatal-warnings -Ybackend:GenASM diff --git a/test/files/run/private-inline.flags b/test/files/run/private-inline.flags index 00d3643fd4..c550fdce16 100644 --- a/test/files/run/private-inline.flags +++ b/test/files/run/private-inline.flags @@ -1 +1 @@ --optimise -Yinline-warnings +-optimise -Yinline-warnings -Ybackend:GenASM diff --git a/test/files/run/synchronized.check b/test/files/run/synchronized.check index 9add05ea0c..dd9f4ef424 100644 --- a/test/files/run/synchronized.check +++ b/test/files/run/synchronized.check @@ -1,8 +1,3 @@ -#partest !-Ybackend:GenBCode -warning: there were 14 inliner warnings; re-run with -Yinline-warnings for details -#partest -Ybackend:GenBCode -warning: there were 14 inliner warnings; re-run with -Yopt-warnings for details -#partest .|. c1.f1: OK .|. c1.fi: OK .|... c1.fv: OK diff --git a/test/files/run/synchronized.scala b/test/files/run/synchronized.scala index 1f0e32992b..077e9d02e8 100644 --- a/test/files/run/synchronized.scala +++ b/test/files/run/synchronized.scala @@ -1,3 +1,6 @@ +/* + * filter: inliner warnings; re-run with + */ import java.lang.Thread.holdsLock import scala.collection.mutable.StringBuilder diff --git a/test/files/run/t6102.check b/test/files/run/t6102.check index aa3e6cc9e2..07378f5ed4 100644 --- a/test/files/run/t6102.check +++ b/test/files/run/t6102.check @@ -28,5 +28,10 @@ [running phase constopt on t6102.scala] #partest [running phase dce on t6102.scala] +#partest !-Ybackend:GenBCode [running phase jvm on icode] +#partest -Ybackend:GenBCode +[running phase jvm on t6102.scala] +[running phase jvm on t6102.scala] +#partest hello diff --git a/test/files/run/t8601-closure-elim.flags b/test/files/run/t8601-closure-elim.flags index 2b5fd8a7b2..9158076b71 100644 --- a/test/files/run/t8601-closure-elim.flags +++ b/test/files/run/t8601-closure-elim.flags @@ -1 +1 @@ --optimize -Ydelambdafy:inline +-optimize -Ydelambdafy:inline -Ybackend:GenASM -- cgit v1.2.3 From 2be2c0d9866352286470e74a9bd878cbe3b37222 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Wed, 20 May 2015 16:21:40 +1000 Subject: Make two tests work under -Ydelambdafy:method Recently, in 029cce7, I changed uncurry to selectively fallback to the old method of emitting lambdas when we detect that `-Ydelambdafy:method`. The change in classfile names breaks the expectations of the test `innerClassAttribute`. This commit changes that test to avoid using specialized functions, so that under -Ydelambdafy:method all functions are uniform. This changes a few fresh suffixes for anonymous class names under both `-Ydelambdafy:{inline,method}`, so the expectations have been duly updated. Similarly, I have changed `javaReflection` in the same manner. Its checkfiles remained unchanged. --- test/files/jvm/innerClassAttribute.check | 30 ++++++------- test/files/jvm/innerClassAttribute/Classes_1.scala | 30 ++++++------- test/files/jvm/innerClassAttribute/Test.scala | 49 ++++++++++++---------- test/files/jvm/javaReflection/Classes_1.scala | 18 ++++---- 4 files changed, 66 insertions(+), 61 deletions(-) diff --git a/test/files/jvm/innerClassAttribute.check b/test/files/jvm/innerClassAttribute.check index bb532e4f36..41448f359b 100644 --- a/test/files/jvm/innerClassAttribute.check +++ b/test/files/jvm/innerClassAttribute.check @@ -14,27 +14,27 @@ A19 / null / null A19 / null / null A19 / null / null -- A20 -- -A20$$anonfun$6 / null / null / 17 +A20$$anonfun$4 / null / null / 17 fun1: attribute for itself and the two child closures `() => ()` and `() => () => 1` -A20$$anonfun$6 / null / null / 17 -A20$$anonfun$6$$anonfun$apply$1 / null / null / 17 -A20$$anonfun$6$$anonfun$apply$3 / null / null / 17 +A20$$anonfun$4 / null / null / 17 +A20$$anonfun$4$$anonfun$apply$1 / null / null / 17 +A20$$anonfun$4$$anonfun$apply$2 / null / null / 17 fun2 () => (): itself and the outer closure -A20$$anonfun$6 / null / null / 17 -A20$$anonfun$6$$anonfun$apply$1 / null / null / 17 +A20$$anonfun$4 / null / null / 17 +A20$$anonfun$4$$anonfun$apply$1 / null / null / 17 fun3 () => () => (): itself, the outer closure and its child closure -A20$$anonfun$6 / null / null / 17 -A20$$anonfun$6$$anonfun$apply$3 / null / null / 17 -A20$$anonfun$6$$anonfun$apply$3$$anonfun$apply$2 / null / null / 17 +A20$$anonfun$4 / null / null / 17 +A20$$anonfun$4$$anonfun$apply$2 / null / null / 17 +A20$$anonfun$4$$anonfun$apply$2$$anonfun$apply$3 / null / null / 17 fun4: () => 1: itself and the two outer closures -A20$$anonfun$6 / null / null / 17 -A20$$anonfun$6$$anonfun$apply$3 / null / null / 17 -A20$$anonfun$6$$anonfun$apply$3$$anonfun$apply$2 / null / null / 17 +A20$$anonfun$4 / null / null / 17 +A20$$anonfun$4$$anonfun$apply$2 / null / null / 17 +A20$$anonfun$4$$anonfun$apply$2$$anonfun$apply$3 / null / null / 17 enclosing: nested closures have outer class defined, but no outer method A20 / null / null -A20$$anonfun$6 / null / null -A20$$anonfun$6 / null / null -A20$$anonfun$6$$anonfun$apply$3 / null / null +A20$$anonfun$4 / null / null +A20$$anonfun$4 / null / null +A20$$anonfun$4$$anonfun$apply$2 / null / null #partest -Ydelambdafy:method -- A4 -- null / null / null diff --git a/test/files/jvm/innerClassAttribute/Classes_1.scala b/test/files/jvm/innerClassAttribute/Classes_1.scala index fb1f32aa3d..62c7d94d90 100644 --- a/test/files/jvm/innerClassAttribute/Classes_1.scala +++ b/test/files/jvm/innerClassAttribute/Classes_1.scala @@ -12,8 +12,8 @@ object A3 { } class A4 { - def f(l: List[Int]): List[Int] = { - l map (_ + 1) + def f(l: List[String]): List[String] = { + l map (_ + "1") } } @@ -114,21 +114,21 @@ class A18 { } class A19 { - ((x: Int) => x + 3) + ((x: String) => x + "3") val x = { - ((x: Int) => x + 1) + ((x: String) => x + "1") } { - ((x: Int) => x + 2) + ((x: String) => x + "2") } } class A20 { - () => { - {() => ()} - {() => () => 1} + (s: String) => { + {(s: String) => ()} + {(s: String) => (s: String) => 1} } } @@ -189,13 +189,13 @@ trait A24 extends A24Base { class SI_9105 { // the EnclosingMethod attributes depend on the delambdafy strategy (inline vs method) - // outerClass-inline enclMeth-inline outerClass-method enclMeth-method - val fun = () => { + // outerClass-inline enclMeth-inline outerClass-method enclMeth-method + val fun = (s: String) => { class A // closure null (*) SI_9105 null def m: Object = { class B; new B } // closure m$1 SI_9105 m$1 val f: Object = { class C; new C } // closure null (*) SI_9105 null } - def met = () => { + def met = (s: String) => { class D // closure null (*) SI_9105 met def m: Object = { class E; new E } // closure m$1 SI_9105 m$1 val f: Object = { class F; new F } // closure null (*) SI_9105 met @@ -210,17 +210,19 @@ class SI_9105 { // So using `null` looks more like the situation in the source code: C / F are nested classes of the anon-fun, and // there's no method in between. - def byName[T](op: => T) = 0 + def byName(op: => Any) = 0 val bnV = byName { class G // closure null (*) SI_9105 null def m: Object = { class H; new H } // closure m$1 SI_9105 m$1 val f: Object = { class I; new I } // closure null (*) SI_9105 null + "" } def bnM = byName { class J // closure null (*) SI_9105 bnM def m: Object = { class K; new K } // closure m$1 SI_9105 m$1 val f: Object = { class L; new L } // closure null (*) SI_9105 bnM + "" } } @@ -283,8 +285,8 @@ object NestedInValueClass { class A(val arg: String) extends AnyVal { // A has InnerClass entries for the two closures (and for A and A$). not for B / C def f = { - def g = List().map(x => (() => x)) // outer class A, no outer method (g is moved to the companion, doesn't exist in A) - g.map(x => (() => x)) // outer class A, outer method f + def g = List().map(x => ((s: String) => x)) // outer class A, no outer method (g is moved to the companion, doesn't exist in A) + g.map(x => ((s: String) => x)) // outer class A, outer method f } // statements and field declarations are not allowed in value classes } diff --git a/test/files/jvm/innerClassAttribute/Test.scala b/test/files/jvm/innerClassAttribute/Test.scala index bc9aa2376a..376b3c895b 100644 --- a/test/files/jvm/innerClassAttribute/Test.scala +++ b/test/files/jvm/innerClassAttribute/Test.scala @@ -5,11 +5,14 @@ import asm.{Opcodes => Flags} import scala.collection.JavaConverters._ object Test extends BytecodeTest { + // Helpful for debugging the test: + // println(new java.io.File(classpath.asURLs.head.toURI).list().sorted.mkString("\n")) + def assertSame(a: Any, b: Any) = { assert(a == b, s"\na: $a\nb: $b") } - val publicStatic = Flags.ACC_PUBLIC | Flags.ACC_STATIC + val publicStatic = Flags.ACC_PUBLIC | Flags.ACC_STATIC val publicAbstractInterface = Flags.ACC_PUBLIC | Flags.ACC_ABSTRACT | Flags.ACC_INTERFACE def innerClassNodes(className: String): List[InnerClassNode] = { @@ -266,10 +269,10 @@ object Test extends BytecodeTest { printInnerClassNodes("A20") - val fun1 = lambdaClass("A20$$anonfun$6", "A20$lambda$1") - val fun2 = lambdaClass("A20$$anonfun$6$$anonfun$apply$1", "A20$lambda$$$nestedInAnonfun$5$1") - val fun3 = lambdaClass("A20$$anonfun$6$$anonfun$apply$3", "A20$lambda$$$nestedInAnonfun$5$2") - val fun4 = lambdaClass("A20$$anonfun$6$$anonfun$apply$3$$anonfun$apply$2", "A20$lambda$$$nestedInAnonfun$7$1") + val fun1 = lambdaClass("A20$$anonfun$4", "A20$lambda$1") + val fun2 = lambdaClass("A20$$anonfun$4$$anonfun$apply$1", "A20$lambda$$$nestedInAnonfun$5$1") + val fun3 = lambdaClass("A20$$anonfun$4$$anonfun$apply$2", "A20$lambda$$$nestedInAnonfun$5$2") + val fun4 = lambdaClass("A20$$anonfun$4$$anonfun$apply$2$$anonfun$apply$3", "A20$lambda$$$nestedInAnonfun$7$1") println("fun1: attribute for itself and the two child closures `() => ()` and `() => () => 1`") printInnerClassNodes(fun1) @@ -339,9 +342,9 @@ object Test extends BytecodeTest { assertEnclosingMethod ("SI_9105$A$3" , "SI_9105", null , null) assertEnclosingMethod ("SI_9105$B$5" , "SI_9105", "m$1", "()Ljava/lang/Object;") assertEnclosingMethod ("SI_9105$C$1" , "SI_9105", null , null) - assertEnclosingMethod ("SI_9105$D$1" , "SI_9105", "met", "()Lscala/Function0;") + assertEnclosingMethod ("SI_9105$D$1" , "SI_9105", "met", "()Lscala/Function1;") assertEnclosingMethod ("SI_9105$E$1" , "SI_9105", "m$3", "()Ljava/lang/Object;") - assertEnclosingMethod ("SI_9105$F$1" , "SI_9105", "met", "()Lscala/Function0;") + assertEnclosingMethod ("SI_9105$F$1" , "SI_9105", "met", "()Lscala/Function1;") assertNoEnclosingMethod("SI_9105$lambda$$met$1") assertNoEnclosingMethod("SI_9105$lambda$1") assertNoEnclosingMethod("SI_9105") @@ -366,35 +369,35 @@ object Test extends BytecodeTest { assert(innerClassNodes("SI_9105").length == 12) // the 12 local classes } else { // comment in innerClassAttribute/Classes_1.scala explains the difference between A / C and D / F. - assertEnclosingMethod ("SI_9105$$anonfun$4$A$3" , "SI_9105$$anonfun$4" , null , null) - assertEnclosingMethod ("SI_9105$$anonfun$4$B$5" , "SI_9105$$anonfun$4" , "m$1" , "()Ljava/lang/Object;") - assertEnclosingMethod ("SI_9105$$anonfun$4$C$1" , "SI_9105$$anonfun$4" , null , null) + assertEnclosingMethod ("SI_9105$$anonfun$5$A$3" , "SI_9105$$anonfun$5" , null , null) + assertEnclosingMethod ("SI_9105$$anonfun$5$B$5" , "SI_9105$$anonfun$5" , "m$1" , "()Ljava/lang/Object;") + assertEnclosingMethod ("SI_9105$$anonfun$5$C$1" , "SI_9105$$anonfun$5" , null , null) assertEnclosingMethod ("SI_9105$$anonfun$met$1$D$1", "SI_9105$$anonfun$met$1", null , null) assertEnclosingMethod ("SI_9105$$anonfun$met$1$E$1", "SI_9105$$anonfun$met$1", "m$3" , "()Ljava/lang/Object;") assertEnclosingMethod ("SI_9105$$anonfun$met$1$F$1", "SI_9105$$anonfun$met$1", null , null) - assertEnclosingMethod ("SI_9105$$anonfun$4" , "SI_9105" , null , null) - assertEnclosingMethod ("SI_9105$$anonfun$met$1" , "SI_9105" , "met" , "()Lscala/Function0;") + assertEnclosingMethod ("SI_9105$$anonfun$5" , "SI_9105" , null , null) + assertEnclosingMethod ("SI_9105$$anonfun$met$1" , "SI_9105" , "met" , "()Lscala/Function1;") assertNoEnclosingMethod("SI_9105") - assertLocal(ownInnerClassNode("SI_9105$$anonfun$4$A$3"), "SI_9105$$anonfun$4$A$3" , "A$3") - assertLocal(ownInnerClassNode("SI_9105$$anonfun$4$B$5"), "SI_9105$$anonfun$4$B$5" , "B$5") - assertLocal(ownInnerClassNode("SI_9105$$anonfun$4$C$1"), "SI_9105$$anonfun$4$C$1" , "C$1") + assertLocal(ownInnerClassNode("SI_9105$$anonfun$5$A$3"), "SI_9105$$anonfun$5$A$3" , "A$3") + assertLocal(ownInnerClassNode("SI_9105$$anonfun$5$B$5"), "SI_9105$$anonfun$5$B$5" , "B$5") + assertLocal(ownInnerClassNode("SI_9105$$anonfun$5$C$1"), "SI_9105$$anonfun$5$C$1" , "C$1") assertLocal(ownInnerClassNode("SI_9105$$anonfun$met$1$D$1"), "SI_9105$$anonfun$met$1$D$1", "D$1") assertLocal(ownInnerClassNode("SI_9105$$anonfun$met$1$E$1"), "SI_9105$$anonfun$met$1$E$1", "E$1") assertLocal(ownInnerClassNode("SI_9105$$anonfun$met$1$F$1"), "SI_9105$$anonfun$met$1$F$1", "F$1") // by-name - assertEnclosingMethod("SI_9105$$anonfun$5$G$1", "SI_9105$$anonfun$5", null, null) - assertEnclosingMethod("SI_9105$$anonfun$5$H$1", "SI_9105$$anonfun$5", "m$2", "()Ljava/lang/Object;") - assertEnclosingMethod("SI_9105$$anonfun$5$I$1", "SI_9105$$anonfun$5", null, null) + assertEnclosingMethod("SI_9105$$anonfun$6$G$1", "SI_9105$$anonfun$6", null, null) + assertEnclosingMethod("SI_9105$$anonfun$6$H$1", "SI_9105$$anonfun$6", "m$2", "()Ljava/lang/Object;") + assertEnclosingMethod("SI_9105$$anonfun$6$I$1", "SI_9105$$anonfun$6", null, null) assertEnclosingMethod("SI_9105$$anonfun$bnM$1$J$1", "SI_9105$$anonfun$bnM$1", null, null) assertEnclosingMethod("SI_9105$$anonfun$bnM$1$K$2", "SI_9105$$anonfun$bnM$1", "m$4", "()Ljava/lang/Object;") assertEnclosingMethod("SI_9105$$anonfun$bnM$1$L$1", "SI_9105$$anonfun$bnM$1", null, null) - assertAnonymous(ownInnerClassNode("SI_9105$$anonfun$4"), "SI_9105$$anonfun$4") + assertAnonymous(ownInnerClassNode("SI_9105$$anonfun$5"), "SI_9105$$anonfun$5") assertAnonymous(ownInnerClassNode("SI_9105$$anonfun$met$1"), "SI_9105$$anonfun$met$1") - assert(innerClassNodes("SI_9105$$anonfun$4").length == 4) // itself and three of the local classes + assert(innerClassNodes("SI_9105$$anonfun$5").length == 4) // itself and three of the local classes assert(innerClassNodes("SI_9105$$anonfun$met$1").length == 4) // itself and three of the local classes assert(innerClassNodes("SI_9105").length == 4) // the four anon funs } @@ -474,7 +477,7 @@ object Test extends BytecodeTest { testInner("ImplClassesAreTopLevel$B2$1$class", b2) testInner("ImplClassesAreTopLevel$B3$1$class", b3) testInner("ImplClassesAreTopLevel$B4$class", b4) - + testInner("ImplClassesAreTopLevel$B1", b1) testInner("ImplClassesAreTopLevel$B2$1", b2) testInner("ImplClassesAreTopLevel$B3$1", b3) @@ -533,13 +536,13 @@ object Test extends BytecodeTest { "NestedInValueClass$A$lambda$$g$2$1", "NestedInValueClass$A$lambda$$f$extension$1", "NestedInValueClass$A$lambda$$$nestedInAnonfun$13$1", - "NestedInValueClass$A$lambda$$$nestedInAnonfun$15$1").foreach(assertNoEnclosingMethod) + "NestedInValueClass$A$lambda$$NestedInValueClass$A$$$nestedInAnonfun$15$1").foreach(assertNoEnclosingMethod) testInner("NestedInValueClass$A", a, am) testInner("NestedInValueClass$A$", a, am, b, c) testInner("NestedInValueClass$A$lambda$$g$2$1", am) testInner("NestedInValueClass$A$lambda$$f$extension$1", am) testInner("NestedInValueClass$A$lambda$$$nestedInAnonfun$13$1", am) - testInner("NestedInValueClass$A$lambda$$$nestedInAnonfun$15$1", am) + testInner("NestedInValueClass$A$lambda$$NestedInValueClass$A$$$nestedInAnonfun$15$1", am) } else { assertEnclosingMethod("NestedInValueClass$A$$anonfun$g$2$1" , "NestedInValueClass$A" , null, null) assertEnclosingMethod("NestedInValueClass$A$$anonfun$g$2$1$$anonfun$apply$4" , "NestedInValueClass$A$$anonfun$g$2$1" , null, null) diff --git a/test/files/jvm/javaReflection/Classes_1.scala b/test/files/jvm/javaReflection/Classes_1.scala index 11963e2770..e9cd4f756a 100644 --- a/test/files/jvm/javaReflection/Classes_1.scala +++ b/test/files/jvm/javaReflection/Classes_1.scala @@ -13,7 +13,7 @@ class A { trait C object D new T { } - (() => -1) + (() => "-1") def f = { class KB } } @@ -21,21 +21,21 @@ class A { new T { } // anonymous function, not a member - (() => 1) + (() => "1") def f = { class E trait F object G new T { } - (() => 2) + (() => "2") if (new Object().hashCode == 1) { class H trait I object J new T { } - (() => 3) + (() => "3") } else { () } @@ -46,7 +46,7 @@ class A { trait L object M new T { } - (() => 4) + (() => "4") } val x = { @@ -54,7 +54,7 @@ class A { trait O object P new T { } - (() => 5) + (() => "5") } def this(x: Int) { @@ -63,7 +63,7 @@ class A { trait R object S new T { } - (() => () => 5) + (() => () => "5") } } @@ -72,7 +72,7 @@ object AO { trait C object D new T { } - (() => 1) + (() => "1") } trait AT { @@ -80,5 +80,5 @@ trait AT { trait C object D new T { } - (() => 1) + (() => "1") } -- cgit v1.2.3 From 96571a9688401865967e680c77109bd70f1cd7e9 Mon Sep 17 00:00:00 2001 From: Adriaan Moors Date: Tue, 26 May 2015 10:27:41 -0700 Subject: Update README.md --- README.md | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/README.md b/README.md index 5bcbbb6229..e722c88e41 100644 --- a/README.md +++ b/README.md @@ -13,15 +13,15 @@ If you need some help with your PR at any time, please feel free to @-mention an | username | talk to me about... | --------------------------------------------------------------------------------------------------|----------------------------------------------------------------|---------------------------------------------------| - | [`@adriaanm`](https://github.com/adriaanm) | anything (type checker, pattern matcher, CI,...) | - | [`@gkossakowski`](https://github.com/gkossakowski) | infrastructure, incremental compilation, back-end | + | [`@adriaanm`](https://github.com/adriaanm) | how we can help // type checker, pattern matcher, infrastructure | + | [`@SethTisue`](https://github.com/SethTisue) | back-end, library, improving the *welcome to Scala* experience | | [`@retronym`](https://github.com/retronym) | Java 8 lambdas, tricky bug detective work | | [`@Ichoran`](https://github.com/Ichoran) | the collections library, performance | | [`@lrytz`](https://github.com/lrytz) | optimizer, named & default arguments | - | [`@dickwall`](https://github.com/dickwall) | process & documentation | | [`@VladUreche`](https://github.com/VladUreche) | specialization & the scaladoc tool | | [`@densh`](https://github.com/densh) | quasiquotes, parser, string interpolators, macros in standard library | | [`@xeno-by`](https://github.com/xeno-by) | macros and reflection | + | [`@dickwall`](https://github.com/dickwall) | process & community | PS: If you have some spare time to help out around here, we would be delighted to add your name to this list! @@ -29,8 +29,8 @@ PS: If you have some spare time to help out around here, we would be delighted t # Handy Links - [A wealth of documentation](http://docs.scala-lang.org) - [Scala CI](https://scala-ci.typesafe.com/) - - [Scala CI at EPFL](https://scala-webapps.epfl.ch/jenkins/) - [Download the latest nightly](http://www.scala-lang.org/files/archive/nightly/2.11.x/); + - [(Deprecated) Scala CI at EPFL](https://scala-webapps.epfl.ch/jenkins/) - Scala mailing lists: - [Compiler and standard library development](https://groups.google.com/group/scala-internals) - [Users of Scala](https://groups.google.com/group/scala-user) @@ -91,6 +91,12 @@ To help you plan your contributions, we communicate our plans on a regular basis Once you've gained some experience with the code base and the process, the logical next step is to offers reviews for others's contributions. The main goal of this whole process, in the end, is to ensure the health of the Scala project by improving the quality of the code base, the documentation, as well as this process itself. Thank you for doing your part! +## [Labels](https://github.com/scala/scala/labels) + - `reviewed` automatically added by scabot when a comment prefixed with LGTM is posted + - `welcome` reviewer / queue curator adds to welcome someone's first PR (for highlighting in the release notes) + - `release-notes` reviewer / queue curator adds to make sure this PR is highlighted in the release notes + - `on-hold` added when this PR should not yet be merged, even though CI is green + ### Tips & Tricks Once the `publish-core` task has completed on a commit, you can try it out in sbt as follows: -- cgit v1.2.3 From 3e159fa67afe8992a62ebdb00a324e4f133590cb Mon Sep 17 00:00:00 2001 From: Sean Riggin Date: Wed, 20 May 2015 14:03:55 -0600 Subject: SI-9322 Elapsed times in compiler calculated with System.currentTimeMillis and System.nanoTime Reverted elapsedTime calculation in compiler to use System.currentTimeMillis, consistent with the start time. --- src/compiler/scala/tools/nsc/Global.scala | 3 ++- src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala | 3 +-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala index b233acf271..4430a84f06 100644 --- a/src/compiler/scala/tools/nsc/Global.scala +++ b/src/compiler/scala/tools/nsc/Global.scala @@ -10,7 +10,6 @@ package nsc import java.io.{ File, FileOutputStream, PrintWriter, IOException, FileNotFoundException } import java.net.URL import java.nio.charset.{ Charset, CharsetDecoder, IllegalCharsetNameException, UnsupportedCharsetException } -import scala.compat.Platform.currentTime import scala.collection.{ mutable, immutable } import io.{ SourceReader, AbstractFile, Path } import reporters.{ Reporter, ConsoleReporter } @@ -1487,6 +1486,8 @@ class Global(var currentSettings: Settings, var reporter: Reporter) compileUnitsInternal(units, fromPhase) private def compileUnitsInternal(units: List[CompilationUnit], fromPhase: Phase) { + def currentTime = java.util.concurrent.TimeUnit.NANOSECONDS.toMillis(System.nanoTime()) + units foreach addUnit val startTime = currentTime diff --git a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala index 8fd2ea45e4..a22428075c 100644 --- a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala +++ b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala @@ -8,7 +8,6 @@ package symtab import classfile.ClassfileParser import java.io.IOException -import scala.compat.Platform.currentTime import scala.reflect.internal.MissingRequirementError import scala.reflect.internal.util.Statistics import scala.reflect.io.{ AbstractFile, NoAbstractFile } @@ -207,7 +206,7 @@ abstract class SymbolLoaders { override def complete(root: Symbol) { try { - val start = currentTime + val start = java.util.concurrent.TimeUnit.NANOSECONDS.toMillis(System.nanoTime()) val currentphase = phase doComplete(root) phase = currentphase -- cgit v1.2.3 From 4d2a92c0dcf5f18bd7c77c2ae4b7a04c2e472a03 Mon Sep 17 00:00:00 2001 From: Zhong Sheng Date: Wed, 27 May 2015 17:47:21 +0800 Subject: fix BigDecimal loosing MathContext --- src/library/scala/math/BigDecimal.scala | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/library/scala/math/BigDecimal.scala b/src/library/scala/math/BigDecimal.scala index d6e2963ad8..6bb35606a6 100644 --- a/src/library/scala/math/BigDecimal.scala +++ b/src/library/scala/math/BigDecimal.scala @@ -49,7 +49,7 @@ object BigDecimal { /** Constructs a `BigDecimal` using the decimal text representation of `Double` value `d`, rounding if necessary. */ def decimal(d: Double, mc: MathContext): BigDecimal = - new BigDecimal(new BigDec(java.lang.Double.toString(d), mc)) + new BigDecimal(new BigDec(java.lang.Double.toString(d), mc), mc) /** Constructs a `BigDecimal` using the decimal text representation of `Double` value `d`. */ def decimal(d: Double): BigDecimal = decimal(d, defaultMathContext) @@ -59,7 +59,7 @@ object BigDecimal { * `0.1 != 0.1f`. */ def decimal(f: Float, mc: MathContext): BigDecimal = - new BigDecimal(new BigDec(java.lang.Float.toString(f), mc)) + new BigDecimal(new BigDec(java.lang.Float.toString(f), mc), mc) /** Constructs a `BigDecimal` using the decimal text representation of `Float` value `f`. * Note that `BigDecimal.decimal(0.1f) != 0.1f` since equality agrees with the `Double` representation, and -- cgit v1.2.3 From ee28b3a917b619ed139c0b079122d6250f8b43a2 Mon Sep 17 00:00:00 2001 From: kenji yoshida <6b656e6a69@gmail.com> Date: Thu, 28 May 2015 00:27:39 +0900 Subject: fix typo --- spec/06-expressions.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/spec/06-expressions.md b/spec/06-expressions.md index da9e21f267..85e288bf5f 100644 --- a/spec/06-expressions.md +++ b/spec/06-expressions.md @@ -512,7 +512,7 @@ In the latter case, the concrete self type of the expression is the compound type `$T$ with $x$.type`. The expression is evaluated by creating a fresh -object of type $T$ which is is initialized by evaluating $c$. The +object of type $T$ which is initialized by evaluating $c$. The type of the expression is $T$. A general instance creation expression is of the form -- cgit v1.2.3 From 5b02bb4f8993f8e09ff1223fbdd4acf78eabd964 Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Wed, 27 May 2015 12:07:10 -0700 Subject: SI-9332 Iterator.span exhausts leading iterator Since the leading and trailing iterators returned by span share the underlying iterator, the leading iterator must flag when it is exhausted (when the span predicate fails) since the trailing iterator will advance the underlying iterator. It would also be possible to leave the failing element in the leading lookahead buffer, where it would forever fail the predicate, but that entails evaluating the predicate twice, on both enqueue and dequeue. --- src/library/scala/collection/Iterator.scala | 29 +++++++++++--------------- test/junit/scala/collection/IteratorTest.scala | 10 +++++++++ 2 files changed, 22 insertions(+), 17 deletions(-) diff --git a/src/library/scala/collection/Iterator.scala b/src/library/scala/collection/Iterator.scala index 0783beac0f..0f6ae47e89 100644 --- a/src/library/scala/collection/Iterator.scala +++ b/src/library/scala/collection/Iterator.scala @@ -10,7 +10,7 @@ package scala package collection import mutable.ArrayBuffer -import scala.annotation.migration +import scala.annotation.{ migration, tailrec } import immutable.Stream import scala.collection.generic.CanBuildFrom import scala.annotation.unchecked.{ uncheckedVariance => uV } @@ -580,29 +580,24 @@ trait Iterator[+A] extends TraversableOnce[A] { def span(p: A => Boolean): (Iterator[A], Iterator[A]) = { val self = buffered - /* - * Giving a name to following iterator (as opposed to trailing) because - * anonymous class is represented as a structural type that trailing - * iterator is referring (the finish() method) and thus triggering - * handling of structural calls. It's not what's intended here. - */ + // Must be a named class to avoid structural call to finish from trailing iterator class Leading extends AbstractIterator[A] { - val lookahead = new mutable.Queue[A] - def advance() = { - self.hasNext && p(self.head) && { + private val lookahead = new mutable.Queue[A] + private var finished = false + private def advance() = !finished && { + if (self.hasNext && p(self.head)) { lookahead += self.next true + } else { + finished = true + false } } - def finish() = { - while (advance()) () - } + @tailrec final def finish(): Unit = if (advance()) finish() def hasNext = lookahead.nonEmpty || advance() def next() = { - if (lookahead.isEmpty) - advance() - - lookahead.dequeue() + if (!hasNext) empty.next() + else lookahead.dequeue() } } val leading = new Leading diff --git a/test/junit/scala/collection/IteratorTest.scala b/test/junit/scala/collection/IteratorTest.scala index d5389afd0c..1c1e50aed9 100644 --- a/test/junit/scala/collection/IteratorTest.scala +++ b/test/junit/scala/collection/IteratorTest.scala @@ -154,4 +154,14 @@ class IteratorTest { results += (Stream from 1).toIterator.drop(10).toStream.drop(10).toIterator.next() assertSameElements(List(1,1,21), results) } + // SI-9332 + @Test def spanExhaustsLeadingIterator(): Unit = { + def it = Iterator.iterate(0)(_ + 1).take(6) + val (x, y) = it.span(_ != 1) + val z = x.toList + assertEquals(1, z.size) + assertFalse(x.hasNext) + assertEquals(1, y.next) + assertFalse(x.hasNext) // was true, after advancing underlying iterator + } } -- cgit v1.2.3 From 5ebb7bb3bfc0e5152a707e693379d5ac5b5d9355 Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Wed, 27 May 2015 14:21:58 -0700 Subject: SI-9332 Iterator.span simplified The queue is only used when the prefix is drained by finish. Since a finished flag has been introduced, distinguish between the drained state and using the underlying (buffered) iterator. --- src/library/scala/collection/Iterator.scala | 27 ++++++++++++--------------- 1 file changed, 12 insertions(+), 15 deletions(-) diff --git a/src/library/scala/collection/Iterator.scala b/src/library/scala/collection/Iterator.scala index 0f6ae47e89..c9037eb3e3 100644 --- a/src/library/scala/collection/Iterator.scala +++ b/src/library/scala/collection/Iterator.scala @@ -10,7 +10,7 @@ package scala package collection import mutable.ArrayBuffer -import scala.annotation.{ migration, tailrec } +import scala.annotation.migration import immutable.Stream import scala.collection.generic.CanBuildFrom import scala.annotation.unchecked.{ uncheckedVariance => uV } @@ -582,22 +582,19 @@ trait Iterator[+A] extends TraversableOnce[A] { // Must be a named class to avoid structural call to finish from trailing iterator class Leading extends AbstractIterator[A] { - private val lookahead = new mutable.Queue[A] - private var finished = false - private def advance() = !finished && { - if (self.hasNext && p(self.head)) { - lookahead += self.next - true - } else { - finished = true - false - } + private val drained = new mutable.Queue[A] + private var finished = false + def finish(): Unit = { + require(!finished) + finished = true + while (selfish) drained += self.next } - @tailrec final def finish(): Unit = if (advance()) finish() - def hasNext = lookahead.nonEmpty || advance() + private def selfish = self.hasNext && p(self.head) + def hasNext = if (finished) drained.nonEmpty else selfish def next() = { - if (!hasNext) empty.next() - else lookahead.dequeue() + if (finished) drained.dequeue() + else if (selfish) self.next() + else empty.next() } } val leading = new Leading -- cgit v1.2.3 From 58d1fad2711f261100d2f391e962b64f85b6d998 Mon Sep 17 00:00:00 2001 From: Kato Kazuyoshi Date: Tue, 24 Mar 2015 22:30:11 -0700 Subject: SI-9144 Scaladoc: Make generated HTML files POSIX-compatible text files According POSIX, every text file contains characters organized into zero or more lines [1], and every line must be terminated by "\n" [2]. This change makes Scaladoc's HTML files POSIX-compatible text files. [1] http://pubs.opengroup.org/onlinepubs/9699919799/basedefs/V1_chap03.html#tag_03_397 [2] http://pubs.opengroup.org/onlinepubs/9699919799/basedefs/V1_chap03.html#tag_03_206 --- src/scaladoc/scala/tools/nsc/doc/html/HtmlPage.scala | 1 + 1 file changed, 1 insertion(+) diff --git a/src/scaladoc/scala/tools/nsc/doc/html/HtmlPage.scala b/src/scaladoc/scala/tools/nsc/doc/html/HtmlPage.scala index 86155845b0..6cdd99c9ee 100644 --- a/src/scaladoc/scala/tools/nsc/doc/html/HtmlPage.scala +++ b/src/scaladoc/scala/tools/nsc/doc/html/HtmlPage.scala @@ -59,6 +59,7 @@ abstract class HtmlPage extends Page { thisPage => writeFile(site) { (w: Writer) => w.write(doctype.toString + "\n") w.write(xml.Xhtml.toXhtml(html)) + w.write('\n') } if (site.universe.settings.docRawOutput) -- cgit v1.2.3 From 23a3ac4ee21499a4d2fbffb5cecff88c7e03790c Mon Sep 17 00:00:00 2001 From: Kato Kazuyoshi Date: Fri, 27 Mar 2015 19:21:22 -0700 Subject: SI-8210 Scaladoc: Fix the false negative @inheritdoc warning on accessors This fix is just for the false negative warning. Probably we can skip setters entirely, but I'm not 100% sure. --- src/compiler/scala/tools/nsc/ast/DocComments.scala | 6 +++--- test/scaladoc/run/SI-8210.check | 1 + test/scaladoc/run/SI-8210.scala | 24 ++++++++++++++++++++++ 3 files changed, 28 insertions(+), 3 deletions(-) create mode 100644 test/scaladoc/run/SI-8210.check create mode 100644 test/scaladoc/run/SI-8210.scala diff --git a/src/compiler/scala/tools/nsc/ast/DocComments.scala b/src/compiler/scala/tools/nsc/ast/DocComments.scala index 02a199f7ac..6442ef2d54 100755 --- a/src/compiler/scala/tools/nsc/ast/DocComments.scala +++ b/src/compiler/scala/tools/nsc/ast/DocComments.scala @@ -76,9 +76,9 @@ trait DocComments { self: Global => superComment(sym) match { case None => - if (ownComment.indexOf("@inheritdoc") != -1) - reporter.warning(sym.pos, "The comment for " + sym + - " contains @inheritdoc, but no parent comment is available to inherit from.") + // SI-8210 - The warning would be false negative when this symbol is a setter + if (ownComment.indexOf("@inheritdoc") != -1 && ! sym.isSetter) + reporter.warning(sym.pos, s"The comment for ${sym} contains @inheritdoc, but no parent comment is available to inherit from.") ownComment.replaceAllLiterally("@inheritdoc", "") case Some(sc) => if (ownComment == "") sc diff --git a/test/scaladoc/run/SI-8210.check b/test/scaladoc/run/SI-8210.check new file mode 100644 index 0000000000..619c56180b --- /dev/null +++ b/test/scaladoc/run/SI-8210.check @@ -0,0 +1 @@ +Done. diff --git a/test/scaladoc/run/SI-8210.scala b/test/scaladoc/run/SI-8210.scala new file mode 100644 index 0000000000..3bd818473f --- /dev/null +++ b/test/scaladoc/run/SI-8210.scala @@ -0,0 +1,24 @@ +import scala.tools.nsc.doc.model._ +import scala.tools.partest.ScaladocModelTest + +object Test extends ScaladocModelTest { + override def code = """ +object Foo { + trait Config { + /** The bar obviously. */ + def bar: Int + } + class ConfigBuilder extends Config { + /** @inheritdoc + * + * The default value is 1234. + */ + var bar: Int = 1234 + } +} + """ + + def scaladocSettings = "" + + def testModel(root: Package) = () +} -- cgit v1.2.3 From 7db3a58872593526c2cc175df633161f2ce9cccb Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Wed, 27 May 2015 17:29:03 +0200 Subject: Fix illegal inlining of instructions accessing protected members There were two issues in the new inliner that would cause a VerifyError and an IllegalAccessError. First, an access to a public member of package protected class C can only be inlined if the destination class can access C. This is tested by t7582b. Second, an access to a protected member requires the receiver object to be a subtype of the class where the instruction is located. So when inlining such an access, we need to know the type of the receiver object - which we don't have. Therefore we don't inline in this case for now. This can be fixed once we have a type propagation analyis. https://github.com/scala-opt/scala/issues/13. This case is tested by t2106. Force kmpSliceSearch test to delambdafy:inline See discussion on https://github.com/scala/scala/pull/4505. The issue will go away when moving to indy-lambda. --- .../tools/nsc/backend/jvm/BTypesFromSymbols.scala | 34 ++++++++++++- .../scala/tools/nsc/backend/jvm/opt/Inliner.scala | 58 +++++++++++++++------- test/files/run/kmpSliceSearch.flags | 1 + test/files/run/t2106.check | 7 +++ test/files/run/t2106.flags | 2 +- .../backend/jvm/opt/InlinerIllegalAccessTest.scala | 16 +++--- 6 files changed, 93 insertions(+), 25 deletions(-) create mode 100644 test/files/run/kmpSliceSearch.flags diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala b/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala index 1b9fd5e298..fffb9286b8 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala @@ -213,6 +213,35 @@ class BTypesFromSymbols[G <: Global](val global: G) extends BTypes { assert(!primitiveTypeMap.contains(sym) || isCompilingPrimitive, sym) } + /** + * Reconstruct the classfile flags from a Java defined class symbol. + * + * The implementation of this method is slightly different that [[javaFlags]]. The javaFlags + * method is primarily used to map Scala symbol flags to sensible classfile flags that are used + * in the generated classfiles. For example, all classes emitted by the Scala compiler have + * ACC_PUBLIC. + * + * When building a [[ClassBType]] from a Java class symbol, the flags in the type's `info` have + * to correspond exactly to the flags in the classfile. For example, if the class is package + * protected (i.e., it doesn't have the ACC_PUBLIC flag), this needs to be reflected in the + * ClassBType. For example, the inliner needs the correct flags for access checks. + * + * Class flags are listed here: + * https://docs.oracle.com/javase/specs/jvms/se7/html/jvms-4.html#jvms-4.1-200-E.1 + */ + private def javaClassfileFlags(classSym: Symbol): Int = { + assert(classSym.isJava, s"Expected Java class symbol, got ${classSym.fullName}") + import asm.Opcodes._ + GenBCode.mkFlags( + if (classSym.isPublic) ACC_PUBLIC else 0, + if (classSym.isFinal) ACC_FINAL else 0, + if (classSym.isInterface) ACC_INTERFACE else ACC_SUPER, // see the link above. javac does the same: ACC_SUPER for all classes, but not interfaces. + if (classSym.hasAbstractFlag) ACC_ABSTRACT else 0, + if (classSym.isArtifact) ACC_SYNTHETIC else 0, + if (classSym.hasEnumFlag) ACC_ENUM else 0 + ) + } + private def setClassInfo(classSym: Symbol, classBType: ClassBType): ClassBType = { val superClassSym = if (classSym.isImplClass) ObjectClass else classSym.superClass assert( @@ -230,7 +259,10 @@ class BTypesFromSymbols[G <: Global](val global: G) extends BTypes { val interfaces = implementedInterfaces(classSym).map(classBTypeFromSymbol) - val flags = javaFlags(classSym) + val flags = { + if (classSym.isJava) javaClassfileFlags(classSym) // see comment on javaClassfileFlags + else javaFlags(classSym) + } /* The InnerClass table of a class C must contain all nested classes of C, even if they are only * declared but not otherwise referenced in C (from the bytecode or a method / field signature). diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/Inliner.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/Inliner.scala index ac5c9ce2e6..e1f0ef0793 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/opt/Inliner.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/Inliner.scala @@ -560,38 +560,62 @@ class Inliner[BT <: BTypes](val btypes: BT) { * @param memberDeclClass The class in which the member is declared (A) * @param memberRefClass The class used in the member reference (B) * + * (B0) JVMS 5.4.3.2 / 5.4.3.3: when resolving a member of class C in D, the class C is resolved + * first. According to 5.4.3.1, this requires C to be accessible in D. + * * JVMS 5.4.4 summary: A field or method R is accessible to a class D (destinationClass) iff * (B1) R is public * (B2) R is protected, declared in C (memberDeclClass) and D is a subclass of C. * If R is not static, R must contain a symbolic reference to a class T (memberRefClass), * such that T is either a subclass of D, a superclass of D, or D itself. + * Also (P) needs to be satisfied. * (B3) R is either protected or has default access and declared by a class in the same * run-time package as D. + * If R is protected, also (P) needs to be satisfied. * (B4) R is private and is declared in D. + * + * (P) When accessing a protected instance member, the target object on the stack (the receiver) + * has to be a subtype of D (destinationClass). This is enforced by classfile verification + * (https://docs.oracle.com/javase/specs/jvms/se8/html/jvms-4.html#jvms-4.10.1.8). + * + * TODO: we cannot currently implement (P) because we don't have the necessary information + * available. Once we have a type propagation analysis implemented, we can extract the receiver + * type from there (https://github.com/scala-opt/scala/issues/13). */ def memberIsAccessible(memberFlags: Int, memberDeclClass: ClassBType, memberRefClass: ClassBType): Either[OptimizerWarning, Boolean] = { // TODO: B3 requires "same run-time package", which seems to be package + classloader (JMVS 5.3.). is the below ok? def samePackageAsDestination = memberDeclClass.packageInternalName == destinationClass.packageInternalName - - val key = (ACC_PUBLIC | ACC_PROTECTED | ACC_PRIVATE) & memberFlags - key match { - case ACC_PUBLIC => // B1 - Right(true) - - case ACC_PROTECTED => // B2 - tryEither { - val condB2 = destinationClass.isSubtypeOf(memberDeclClass).orThrow && { - val isStatic = (ACC_STATIC & memberFlags) != 0 - isStatic || memberRefClass.isSubtypeOf(destinationClass).orThrow || destinationClass.isSubtypeOf(memberRefClass).orThrow + def targetObjectConformsToDestinationClass = false // needs type propagation analysis, see above + + def memberIsAccessibleImpl = { + val key = (ACC_PUBLIC | ACC_PROTECTED | ACC_PRIVATE) & memberFlags + key match { + case ACC_PUBLIC => // B1 + Right(true) + + case ACC_PROTECTED => // B2 + val isStatic = (ACC_STATIC & memberFlags) != 0 + tryEither { + val condB2 = destinationClass.isSubtypeOf(memberDeclClass).orThrow && { + isStatic || memberRefClass.isSubtypeOf(destinationClass).orThrow || destinationClass.isSubtypeOf(memberRefClass).orThrow + } + Right( + (condB2 || samePackageAsDestination /* B3 (protected) */) && + (isStatic || targetObjectConformsToDestinationClass) // (P) + ) } - Right(condB2 || samePackageAsDestination) // B3 (protected) - } - case 0 => // B3 (default access) - Right(samePackageAsDestination) + case 0 => // B3 (default access) + Right(samePackageAsDestination) + + case ACC_PRIVATE => // B4 + Right(memberDeclClass == destinationClass) + } + } - case ACC_PRIVATE => // B4 - Right(memberDeclClass == destinationClass) + classIsAccessible(memberDeclClass) match { // B0 + case Right(true) => memberIsAccessibleImpl + case r => r } } diff --git a/test/files/run/kmpSliceSearch.flags b/test/files/run/kmpSliceSearch.flags new file mode 100644 index 0000000000..ac96850b69 --- /dev/null +++ b/test/files/run/kmpSliceSearch.flags @@ -0,0 +1 @@ +-Ydelambdafy:inline \ No newline at end of file diff --git a/test/files/run/t2106.check b/test/files/run/t2106.check index f8f625ff46..66a0e707b3 100644 --- a/test/files/run/t2106.check +++ b/test/files/run/t2106.check @@ -1,3 +1,10 @@ +#partest -Ybackend:GenBCode +t2106.scala:7: warning: A::foo()Ljava/lang/Object; is annotated @inline but could not be inlined: +The callee A::foo()Ljava/lang/Object; contains the instruction INVOKEVIRTUAL java/lang/Object.clone ()Ljava/lang/Object; +that would cause an IllegalAccessError when inlined into class Test$. + def main(args: Array[String]): Unit = x.foo + ^ +#partest !-Ybackend:GenBCode t2106.scala:7: warning: Could not inline required method foo because access level required by callee not matched by caller. def main(args: Array[String]): Unit = x.foo ^ diff --git a/test/files/run/t2106.flags b/test/files/run/t2106.flags index 00d3643fd4..a2e413bb22 100644 --- a/test/files/run/t2106.flags +++ b/test/files/run/t2106.flags @@ -1 +1 @@ --optimise -Yinline-warnings +-optimise -Yinline-warnings -Yopt:l:classpath diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerIllegalAccessTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerIllegalAccessTest.scala index b4839dcec8..7ed0e13226 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerIllegalAccessTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerIllegalAccessTest.scala @@ -32,7 +32,8 @@ class InlinerIllegalAccessTest extends ClearAfterClass { import compiler.genBCode.bTypes._ def addToRepo(cls: List[ClassNode]): Unit = for (c <- cls) byteCodeRepository.add(c, ByteCodeRepository.Classfile) - def assertEmpty(ins: Option[AbstractInsnNode]) = for (i <- ins) throw new AssertionError(textify(i)) + def assertEmpty(ins: Option[AbstractInsnNode]) = for (i <- ins) + throw new AssertionError(textify(i)) @Test def typeAccessible(): Unit = { @@ -176,15 +177,18 @@ class InlinerIllegalAccessTest extends ClearAfterClass { // PROTECTED - // protected accessed in same class, or protected static accessed in subclass(rgD). - // can be inlined to subclasses, and classes in the same package (gCl) - for ((m, declCls) <- Set((rcC, cCl), (rgC, cCl), (rgD, dCl)); c <- Set(cCl, dCl, eCl, fCl, gCl, hCl)) check(m, declCls, c, assertEmpty) + // protected static accessed in same class, or protected static accessed in subclass(rgD). + // can be inlined to sub- and superclasses, and classes in the same package (gCl) + for ((m, declCls) <- Set((rgC, cCl), (rgD, dCl)); c <- Set(cCl, dCl, eCl, fCl, gCl, hCl)) check(m, declCls, c, assertEmpty) // protected in non-subclass and different package for (m <- Set(rcC, rgC)) check(m, cCl, iCl, cOrDOwner) - // non-static protected accessed in subclass (rcD). can be inlined to related class, or classes in the same package - for (c <- Set(cCl, dCl, eCl, fCl, gCl)) check(rcD, dCl, c, assertEmpty) + // non-static protected accessed in subclass (rcD). + // can be inlined only if the destination class is related (sub- or superclass) or in the same package, + // AND if the receiver object is a subtype of the destination class + // TODO: we cannot check this yet, so the check flags the instruction as causing an IllegalAccess. https://github.com/scala-opt/scala/issues/13 + for ((m, declCls) <- Set((rcC, cCl), (rcD, dCl)); c <- Set(cCl, dCl, eCl, fCl, gCl)) check(m, declCls, c, cOrDOwner) // rcD cannot be inlined into non-related classes, if the declaration and destination are not in the same package for (c <- Set(hCl, iCl)) check(rcD, dCl, c, cOrDOwner) -- cgit v1.2.3 From 31d08571bb19bb7b16197476a1c2f7dc319e3ba2 Mon Sep 17 00:00:00 2001 From: Zhong Sheng Date: Thu, 28 May 2015 23:47:57 +0800 Subject: add unit test for MathContext lost --- test/junit/scala/math/BigDecimalTest.scala | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/test/junit/scala/math/BigDecimalTest.scala b/test/junit/scala/math/BigDecimalTest.scala index c7a63da890..dedf2e242e 100644 --- a/test/junit/scala/math/BigDecimalTest.scala +++ b/test/junit/scala/math/BigDecimalTest.scala @@ -228,4 +228,11 @@ class BigDecimalTest { def test_SI8970() { assert((0.1).## == BigDecimal(0.1).##) } + + // Motivated by the problem of MathContext lost + @Test + def testMathContext() { + assert(BigDecimal(1.1d, MC.UNLIMITED).pow(1000) == BigDecimal("1.1", MC.UNLIMITED).pow(1000)) + assert((BigDecimal(1.23d, new MC(3)) + BigDecimal("0.005")).rounded == BigDecimal("1.24")) + } } -- cgit v1.2.3 From b8a4a97a6bb6c2946fa51e630551747b03874d1a Mon Sep 17 00:00:00 2001 From: Zhong Sheng Date: Fri, 29 May 2015 06:28:22 +0800 Subject: add more test for testMathContext --- test/junit/scala/math/BigDecimalTest.scala | 28 ++++++++++++++++++++++++++-- 1 file changed, 26 insertions(+), 2 deletions(-) diff --git a/test/junit/scala/math/BigDecimalTest.scala b/test/junit/scala/math/BigDecimalTest.scala index dedf2e242e..a801204cb2 100644 --- a/test/junit/scala/math/BigDecimalTest.scala +++ b/test/junit/scala/math/BigDecimalTest.scala @@ -232,7 +232,31 @@ class BigDecimalTest { // Motivated by the problem of MathContext lost @Test def testMathContext() { - assert(BigDecimal(1.1d, MC.UNLIMITED).pow(1000) == BigDecimal("1.1", MC.UNLIMITED).pow(1000)) - assert((BigDecimal(1.23d, new MC(3)) + BigDecimal("0.005")).rounded == BigDecimal("1.24")) + def testPrecision() { + val e = 1000 + + val n = BigDecimal("1.1", MC.UNLIMITED).pow(e) + assert(BigDecimal(1.1d, MC.UNLIMITED).pow(e) == n) + assert(BigDecimal.decimal(1.1d, MC.UNLIMITED).pow(e) == n) + assert(BigDecimal.decimal(1.1f, MC.UNLIMITED).pow(e) == n) + assert(BigDecimal.decimal(new BD("1.1"), MC.UNLIMITED).pow(e) == n) + + val m = BigDecimal(java.lang.Double.toString(1.1f.toDouble), MC.UNLIMITED).pow(e) + assert(BigDecimal(1.1f, MC.UNLIMITED).pow(e) == m) // deprecated + + val l = BigDecimal("11", MC.UNLIMITED).pow(e) + assert(BigDecimal(11, MC.UNLIMITED).pow(e) == l) + assert(BigDecimal.decimal(11, MC.UNLIMITED).pow(e) == l) + } + + def testRounded() { + assert((BigDecimal(1.23d, new MC(3)) + BigDecimal("0.005")).rounded == BigDecimal("1.24")) + assert((BigDecimal(1.23f, new MC(3)) + BigDecimal("0.005")).rounded == BigDecimal("1.24")) // deprecated + assert((BigDecimal.decimal(1.23d, new MC(3)) + BigDecimal("0.005")).rounded == BigDecimal("1.24")) + assert((BigDecimal.decimal(1.23f, new MC(3)) + BigDecimal("0.005")).rounded == BigDecimal("1.24")) + } + + testPrecision() + testRounded() } } -- cgit v1.2.3 From 9aae16aa231ad64e3987aaad2a206beaf10c76e0 Mon Sep 17 00:00:00 2001 From: Rex Kerr Date: Sat, 30 May 2015 11:35:25 -0700 Subject: Clean implementation of sorts for scala.util.Sorting. Removed code based on Sun JDK sorts and implemented new (basic) sorts from scratch. Deferred to Java Arrays.sort whenever practical. Behavior of `scala.util.Sorting` should be unchanged, but changed documentation to specify when the Java methods are being used (as they're typically very fast). A JUnit test is provided. Performance is important for sorts. Everything is better with this patch, though it could be better yet, as described below. Below are sort times (in microseconds, SEM < 5%) for various 1024-element arrays of small case classes that compare on an int field (quickSort), or int arrays that use custom ordering (stableSort). Note: "degenerate" means there are only 16 values possible, so there are lots of ties. Times are all with fresh data (no re-using cache from run to run). Results: ``` random sorted reverse degenerate big:64k tiny:16 Old Sorting.quickSort 234 181 178 103 25,700 1.4 New Sorting.quickSort 170 27 115 74 18,600 0.8 Old Sorting.stableSort 321 234 236 282 32,600 2.1 New Sorting.stableSort 239 16 194 194 25,100 1.2 java.util.Arrays.sort 124 4 8 105 13,500 0.8 java.util.Arrays.sort|Box 126 15 13 112 13,200 0.9 ``` The new versions are uniformly faster, but uniformly slower than Java sorting. scala.util.Sorting has use cases that don't map easily in to Java unless everything is pre-boxed, but the overhead of pre-boxing is minimal compared to the sort. A snapshot of some of my benchmarking code is below. (Yes, lots of repeating myself--it's dangerous not to when trying to get somewhat accurate benchmarks.) ``` import java.util.Arrays import java.util.Comparator import math.Ordering import util.Sorting import reflect.ClassTag val th = ichi.bench.Thyme.warmed() case class N(i: Int, j: Int) {} val a = Array.fill(1024)( Array.tabulate(1024)(i => N(util.Random.nextInt, i)) ) var ai = 0 val b = Array.fill(1024)( Array.tabulate(1024)(i => N(i, i)) ) var bi = 0 val c = Array.fill(1024)( Array.tabulate(1024)(i => N(1024-i, i)) ) var ci = 0 val d = Array.fill(1024)( Array.tabulate(1024)(i => N(util.Random.nextInt(16), i)) ) var di = 0 val e = Array.fill(16)( Array.tabulate(65536)(i => N(util.Random.nextInt, i)) ) var ei = 0 val f = Array.fill(65535)( Array.tabulate(16)(i => N(util.Random.nextInt, i)) ) var fi = 0 val o = new Ordering[N]{ def compare(a: N, b: N) = if (a.i < b.i) -1 else if (a.i > b.i) 1 else 0 } for (s <- Seq("one", "two", "three")) { println(s) th.pbench{ val x = a(ai).clone; ai = (ai+1)%a.length; Sorting.quickSort(x)(o); x(x.length/3) } th.pbench{ val x = b(bi).clone; bi = (bi+1)%b.length; Sorting.quickSort(x)(o); x(x.length/3) } th.pbench{ val x = c(ci).clone; ci = (ci+1)%c.length; Sorting.quickSort(x)(o); x(x.length/3) } th.pbench{ val x = d(di).clone; di = (di+1)%d.length; Sorting.quickSort(x)(o); x(x.length/3) } th.pbench{ val x = e(ei).clone; ei = (ei+1)%e.length; Sorting.quickSort(x)(o); x(x.length/3) } th.pbench{ val x = f(fi).clone; fi = (fi+1)%f.length; Sorting.quickSort(x)(o); x(x.length/3) } } def ix(ns: Array[N]) = { val is = new Array[Int](ns.length) var i = 0 while (i < ns.length) { is(i) = ns(i).i i += 1 } is } val p = new Ordering[Int]{ def compare(a: Int, b: Int) = if (a > b) 1 else if (a < b) -1 else 0 } for (s <- Seq("one", "two", "three")) { println(s) val tag: ClassTag[Int] = implicitly[ClassTag[Int]] th.pbench{ val x = ix(a(ai)); ai = (ai+1)%a.length; Sorting.stableSort(x)(tag, p); x(x.length/3) } th.pbench{ val x = ix(b(bi)); bi = (bi+1)%b.length; Sorting.stableSort(x)(tag, p); x(x.length/3) } th.pbench{ val x = ix(c(ci)); ci = (ci+1)%c.length; Sorting.stableSort(x)(tag, p); x(x.length/3) } th.pbench{ val x = ix(d(di)); di = (di+1)%d.length; Sorting.stableSort(x)(tag, p); x(x.length/3) } th.pbench{ val x = ix(e(ei)); ei = (ei+1)%e.length; Sorting.stableSort(x)(tag, p); x(x.length/3) } th.pbench{ val x = ix(f(fi)); fi = (fi+1)%f.length; Sorting.stableSort(x)(tag, p); x(x.length/3) } } for (s <- Seq("one", "two", "three")) { println(s) th.pbench{ val x = a(ai).clone; ai = (ai+1)%a.length; Arrays.sort(x, o); x(x.length/3) } th.pbench{ val x = b(bi).clone; bi = (bi+1)%b.length; Arrays.sort(x, o); x(x.length/3) } th.pbench{ val x = c(ci).clone; ci = (ci+1)%c.length; Arrays.sort(x, o); x(x.length/3) } th.pbench{ val x = d(di).clone; di = (di+1)%d.length; Arrays.sort(x, o); x(x.length/3) } th.pbench{ val x = e(ei).clone; ei = (ei+1)%e.length; Arrays.sort(x, o); x(x.length/3) } th.pbench{ val x = f(fi).clone; fi = (fi+1)%f.length; Arrays.sort(x, o); x(x.length/3) } } def bx(is: Array[Int]): Array[java.lang.Integer] = { val Is = new Array[java.lang.Integer](is.length) var i = 0 while (i < is.length) { Is(i) = java.lang.Integer.valueOf(is(i)) i += 1 } Is } def xb(Is: Array[java.lang.Integer]): Array[Int] = { val is = new Array[Int](Is.length) var i = 0 while (i < is.length) { is(i) = Is(i).intValue i += 1 } is } val q = new Comparator[java.lang.Integer]{ def compare(a: java.lang.Integer, b: java.lang.Integer) = o.compare(a.intValue, b.intValue) } for (s <- Seq("one", "two", "three")) { println(s) val tag: ClassTag[Int] = implicitly[ClassTag[Int]] th.pbench{ val x = bx(ix(a(ai))); ai = (ai+1)%a.length; Arrays.sort(x, q); xb(x)(x.length/3) } th.pbench{ val x = bx(ix(b(bi))); bi = (bi+1)%b.length; Arrays.sort(x, q); xb(x)(x.length/3) } th.pbench{ val x = bx(ix(c(ci))); ci = (ci+1)%c.length; Arrays.sort(x, q); xb(x)(x.length/3) } th.pbench{ val x = bx(ix(d(di))); di = (di+1)%d.length; Arrays.sort(x, q); xb(x)(x.length/3) } th.pbench{ val x = bx(ix(e(ei))); ei = (ei+1)%e.length; Arrays.sort(x, q); xb(x)(x.length/3) } th.pbench{ val x = bx(ix(f(fi))); fi = (fi+1)%f.length; Arrays.sort(x, q); xb(x)(x.length/3) } } ``` --- bincompat-forward.whitelist.conf | 53 +++ src/library/scala/util/Sorting.scala | 712 +++++++++++--------------------- test/junit/scala/util/SortingTest.scala | 69 ++++ 3 files changed, 357 insertions(+), 477 deletions(-) create mode 100644 test/junit/scala/util/SortingTest.scala diff --git a/bincompat-forward.whitelist.conf b/bincompat-forward.whitelist.conf index 3808083dd3..8fadb65f39 100644 --- a/bincompat-forward.whitelist.conf +++ b/bincompat-forward.whitelist.conf @@ -319,6 +319,59 @@ filter { { matchName="scala.util.Random.scala$util$Random$$nextAlphaNum$1" problemName=MissingMethodProblem + }, + // Nominally private but in practice JVM-visible methods for reworked scala.util.Sorting + { + matchName="scala.util.Sorting.scala$util$Sorting$$mergeSort$default$5" + problemName=MissingMethodProblem + }, + { + matchName="scala.util.Sorting.scala$util$Sorting$$mergeSort$mBc$sp" + problemName=MissingMethodProblem + }, + { + matchName="scala.util.Sorting.scala$util$Sorting$$mergeSort$mFc$sp" + problemName=MissingMethodProblem + }, + { + matchName="scala.util.Sorting.scala$util$Sorting$$mergeSort$mJc$sp" + problemName=MissingMethodProblem + }, + { + matchName="scala.util.Sorting.scala$util$Sorting$$mergeSort" + problemName=MissingMethodProblem + }, + { + matchName="scala.util.Sorting.scala$util$Sorting$$mergeSort$mCc$sp" + problemName=MissingMethodProblem + }, + { + matchName="scala.util.Sorting.scala$util$Sorting$$mergeSort$mSc$sp" + problemName=MissingMethodProblem + }, + { + matchName="scala.util.Sorting.scala$util$Sorting$$insertionSort" + problemName=MissingMethodProblem + }, + { + matchName="scala.util.Sorting.scala$util$Sorting$$mergeSort$mZc$sp" + problemName=MissingMethodProblem + }, + { + matchName="scala.util.Sorting.scala$util$Sorting$$mergeSort$mDc$sp" + problemName=MissingMethodProblem + }, + { + matchName="scala.util.Sorting.scala$util$Sorting$$mergeSort$mIc$sp" + problemName=MissingMethodProblem + }, + { + matchName="scala.util.Sorting.scala$util$Sorting$$mergeSorted" + problemName=MissingMethodProblem + }, + { + matchName="scala.util.Sorting.scala$util$Sorting$$booleanSort" + problemName=MissingMethodProblem } ] } diff --git a/src/library/scala/util/Sorting.scala b/src/library/scala/util/Sorting.scala index 2e021ad9d9..ee2bdbc4a7 100644 --- a/src/library/scala/util/Sorting.scala +++ b/src/library/scala/util/Sorting.scala @@ -1,6 +1,6 @@ /* __ *\ ** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2006-2009, Ross Judson ** +** / __/ __// _ | / / / _ | (c) 2006-2015, LAMP/EPFL ** ** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** ** /____/\___/_/ |_/____/_/ | | ** ** |/ ** @@ -9,518 +9,276 @@ package scala package util -import scala.reflect.{ ClassTag, classTag } -import scala.math.{ Ordering, max, min } +import scala.reflect.ClassTag +import scala.math.Ordering -/** The Sorting object provides functions that can sort various kinds of - * objects. You can provide a comparison function, or you can request a sort - * of items that are viewable as [[scala.math.Ordered]]. Some sorts that - * operate directly on a subset of value types are also provided. These - * implementations are derived from those in the Sun JDK. +/** The `Sorting` object provides convenience wrappers for `java.util.Arrays.sort`. + * Methods that defer to `java.util.Arrays.sort` say that they do or under what + * conditions that they do. * - * Note that stability doesn't matter for value types, so use the `quickSort` - * variants for those. `stableSort` is intended to be used with - * objects when the prior ordering should be preserved, where possible. + * `Sorting` also implements a general-purpose quicksort and stable (merge) sort + * for those cases where `java.util.Arrays.sort` could only be used at the cost + * of a large memory penalty. If performance rather than memory usage is the + * primary concern, one may wish to find alternate strategies to use + * `java.util.Arrays.sort` directly e.g. by boxing primitives to use + * a custom ordering on them. + * + * `Sorting` provides methods where you can provide a comparison function, or + * can request a sort of items that are [[scala.math.Ordered]] or that + * otherwise have an implicit or explicit [[scala.math.Ordering]]. + * + * Note also that high-performance non-default sorts for numeric types + * are not provided. If this is required, it is advisable to investigate + * other libraries that cover this use case. * * @author Ross Judson - * @version 1.0 + * @author Adriaan Moors + * @author Rex Kerr + * @version 1.1 */ object Sorting { - /** Quickly sort an array of Doubles. */ - def quickSort(a: Array[Double]) { sort1(a, 0, a.length) } - - /** Quickly sort an array of items with an implicit Ordering. */ - def quickSort[K: Ordering](a: Array[K]) { sort1(a, 0, a.length) } - - /** Quickly sort an array of Ints. */ - def quickSort(a: Array[Int]) { sort1(a, 0, a.length) } - - /** Quickly sort an array of Floats. */ - def quickSort(a: Array[Float]) { sort1(a, 0, a.length) } - - /** Sort an array of K where K is Ordered, preserving the existing order - * where the values are equal. */ - def stableSort[K: ClassTag: Ordering](a: Array[K]) { - stableSort(a, 0, a.length-1, new Array[K](a.length), Ordering[K].lt _) - } + /** Sort an array of Doubles using `java.util.Arrays.sort`. */ + def quickSort(a: Array[Double]): Unit = java.util.Arrays.sort(a) - /** Sorts an array of `K` given an ordering function `f`. - * `f` should return `true` iff its first parameter is strictly less than its second parameter. - */ - def stableSort[K: ClassTag](a: Array[K], f: (K, K) => Boolean) { - stableSort(a, 0, a.length-1, new Array[K](a.length), f) - } + /** Sort an array of Ints using `java.util.Arrays.sort`. */ + def quickSort(a: Array[Int]): Unit = java.util.Arrays.sort(a) - /** Sorts an arbitrary sequence into an array, given a comparison function - * that should return `true` iff parameter one is strictly less than parameter two. - * - * @param a the sequence to be sorted. - * @param f the comparison function. - * @return the sorted sequence of items. - */ - def stableSort[K: ClassTag](a: Seq[K], f: (K, K) => Boolean): Array[K] = { - val ret = a.toArray - stableSort(ret, f) - ret - } + /** Sort an array of Floats using `java.util.Arrays.sort`. */ + def quickSort(a: Array[Float]): Unit = java.util.Arrays.sort(a) + + private final val qsortThreshold = 16 - /** Sorts an arbitrary sequence of items that are viewable as ordered. */ - def stableSort[K: ClassTag: Ordering](a: Seq[K]): Array[K] = - stableSort(a, Ordering[K].lt _) - - /** Stably sorts a sequence of items given an extraction function that will - * return an ordered key from an item. - * - * @param a the sequence to be sorted. - * @param f the comparison function. - * @return the sorted sequence of items. - */ - def stableSort[K: ClassTag, M: Ordering](a: Seq[K], f: K => M): Array[K] = - stableSort(a)(implicitly[ClassTag[K]], Ordering[M] on f) - - private def sort1[K: Ordering](x: Array[K], off: Int, len: Int) { - val ord = Ordering[K] - import ord._ - - def swap(a: Int, b: Int) { - val t = x(a) - x(a) = x(b) - x(b) = t - } - def vecswap(_a: Int, _b: Int, n: Int) { - var a = _a - var b = _b - var i = 0 - while (i < n) { - swap(a, b) - i += 1 - a += 1 - b += 1 - } - } - def med3(a: Int, b: Int, c: Int) = { - if (x(a) < x(b)) { - if (x(b) < x(c)) b else if (x(a) < x(c)) c else a - } else { - if (x(b) > x(c)) b else if (x(a) > x(c)) c else a - } - } - def sort2(off: Int, len: Int) { - // Insertion sort on smallest arrays - if (len < 7) { - var i = off - while (i < len + off) { - var j = i - while (j > off && x(j-1) > x(j)) { - swap(j, j-1) - j -= 1 + /** Sort array `a` with quicksort, using the Ordering on its elements. + * This algorithm sorts in place, so no additional memory is used aside from + * what might be required to box individual elements during comparison. + */ + def quickSort[K: Ordering](a: Array[K]): Unit = { + // Must have iN >= i0 or math will fail. Also, i0 >= 0. + def inner(a: Array[K], i0: Int, iN: Int, ord: Ordering[K]): Unit = { + if (iN - i0 < qsortThreshold) insertionSort(a, i0, iN, ord) + else { + var iK = (i0 + iN) >>> 1 // Unsigned div by 2 + // Find index of median of first, central, and last elements + var pL = + if (ord.compare(a(i0), a(iN - 1)) <= 0) + if (ord.compare(a(i0), a(iK)) < 0) + if (ord.compare(a(iN - 1), a(iK)) < 0) iN - 1 else iK + else i0 + else + if (ord.compare(a(i0), a(iK)) < 0) i0 + else + if (ord.compare(a(iN - 1), a(iK)) <= 0) iN - 1 + else iK + val pivot = a(pL) + // pL is the start of the pivot block; move it into the middle if needed + if (pL != iK) { a(pL) = a(iK); a(iK) = pivot; pL = iK } + // Elements equal to the pivot will be in range pL until pR + var pR = pL + 1 + // Items known to be less than pivot are below iA (range i0 until iA) + var iA = i0 + // Items known to be greater than pivot are at or above iB (range iB until iN) + var iB = iN + // Scan through everything in the buffer before the pivot(s) + while (pL - iA > 0) { + val current = a(iA) + ord.compare(current, pivot) match { + case 0 => + // Swap current out with pivot block + a(iA) = a(pL - 1) + a(pL - 1) = current + pL -= 1 + case x if x < 0 => + // Already in place. Just update indicies. + iA += 1 + case _ if iB > pR => + // Wrong side. There's room on the other side, so swap + a(iA) = a(iB - 1) + a(iB - 1) = current + iB -= 1 + case _ => + // Wrong side and there is no room. Swap by rotating pivot block. + a(iA) = a(pL - 1) + a(pL - 1) = a(pR - 1) + a(pR - 1) = current + pL -= 1 + pR -= 1 + iB -= 1 } - i += 1 } - } else { - // Choose a partition element, v - var m = off + (len >> 1) // Small arrays, middle element - if (len > 7) { - var l = off - var n = off + len - 1 - if (len > 40) { // Big arrays, pseudomedian of 9 - val s = len / 8 - l = med3(l, l+s, l+2*s) - m = med3(m-s, m, m+s) - n = med3(n-2*s, n-s, n) + // Get anything remaining in buffer after the pivot(s) + while (iB - pR > 0) { + val current = a(iB - 1) + ord.compare(current, pivot) match { + case 0 => + // Swap current out with pivot block + a(iB - 1) = a(pR) + a(pR) = current + pR += 1 + case x if x > 0 => + // Already in place. Just update indices. + iB -= 1 + case _ => + // Wrong side and we already know there is no room. Swap by rotating pivot block. + a(iB - 1) = a(pR) + a(pR) = a(pL) + a(pL) = current + iA += 1 + pL += 1 + pR += 1 } - m = med3(l, m, n) // Mid-size, med of 3 } - val v = x(m) - - // Establish Invariant: v* (v)* v* - var a = off - var b = a - var c = off + len - 1 - var d = c - var done = false - while (!done) { - while (b <= c && x(b) <= v) { - if (x(b) equiv v) { - swap(a, b) - a += 1 - } - b += 1 - } - while (c >= b && x(c) >= v) { - if (x(c) equiv v) { - swap(c, d) - d -= 1 - } - c -= 1 - } - if (b > c) { - done = true - } else { - swap(b, c) - c -= 1 - b += 1 - } + // Use tail recursion on large half (Sedgewick's method) so we don't blow up the stack if pivots are poorly chosen + if (iA - i0 < iN - iB) { + inner(a, i0, iA, ord) // True recursion + inner(a, iB, iN, ord) // Should be tail recursion + } + else { + inner(a, iB, iN, ord) // True recursion + inner(a, i0, iA, ord) // Should be tail recursion } - - // Swap partition elements back to middle - val n = off + len - var s = math.min(a-off, b-a) - vecswap(off, b-s, s) - s = math.min(d-c, n-d-1) - vecswap(b, n-s, s) - - // Recursively sort non-partition-elements - s = b - a - if (s > 1) - sort2(off, s) - s = d - c - if (s > 1) - sort2(n-s, s) } } - sort2(off, len) + inner(a, 0, a.length, implicitly[Ordering[K]]) } - - private def sort1(x: Array[Int], off: Int, len: Int) { - def swap(a: Int, b: Int) { - val t = x(a) - x(a) = x(b) - x(b) = t + + private final val mergeThreshold = 32 + + // Ordering[T] might be slow especially for boxed primitives, so use binary search variant of insertion sort + // Caller must pass iN >= i0 or math will fail. Also, i0 >= 0. + private def insertionSort[@specialized T](a: Array[T], i0: Int, iN: Int, ord: Ordering[T]): Unit = { + val n = iN - i0 + if (n < 2) return + if (ord.compare(a(i0), a(i0+1)) > 0) { + val temp = a(i0) + a(i0) = a(i0+1) + a(i0+1) = temp } - def vecswap(_a: Int, _b: Int, n: Int) { - var a = _a - var b = _b - var i = 0 - while (i < n) { - swap(a, b) - i += 1 - a += 1 - b += 1 - } - } - def med3(a: Int, b: Int, c: Int) = { - if (x(a) < x(b)) { - if (x(b) < x(c)) b else if (x(a) < x(c)) c else a - } else { - if (x(b) > x(c)) b else if (x(a) > x(c)) c else a - } - } - def sort2(off: Int, len: Int) { - // Insertion sort on smallest arrays - if (len < 7) { - var i = off - while (i < len + off) { - var j = i - while (j>off && x(j-1) > x(j)) { - swap(j, j-1) - j -= 1 - } - i += 1 + var m = 2 + while (m < n) { + // Speed up already-sorted case by checking last element first + val next = a(i0 + m) + if (ord.compare(next, a(i0+m-1)) < 0) { + var iA = i0 + var iB = i0 + m - 1 + while (iB - iA > 1) { + val ix = (iA + iB) >>> 1 // Use bit shift to get unsigned div by 2 + if (ord.compare(next, a(ix)) < 0) iB = ix + else iA = ix } - } else { - // Choose a partition element, v - var m = off + (len >> 1) // Small arrays, middle element - if (len > 7) { - var l = off - var n = off + len - 1 - if (len > 40) { // Big arrays, pseudomedian of 9 - val s = len / 8 - l = med3(l, l+s, l+2*s) - m = med3(m-s, m, m+s) - n = med3(n-2*s, n-s, n) - } - m = med3(l, m, n) // Mid-size, med of 3 - } - val v = x(m) - - // Establish Invariant: v* (v)* v* - var a = off - var b = a - var c = off + len - 1 - var d = c - var done = false - while (!done) { - while (b <= c && x(b) <= v) { - if (x(b) == v) { - swap(a, b) - a += 1 - } - b += 1 - } - while (c >= b && x(c) >= v) { - if (x(c) == v) { - swap(c, d) - d -= 1 - } - c -= 1 - } - if (b > c) { - done = true - } else { - swap(b, c) - c -= 1 - b += 1 - } + val ix = iA + (if (ord.compare(next, a(iA)) < 0) 0 else 1) + var i = i0 + m + while (i > ix) { + a(i) = a(i-1) + i -= 1 } - - // Swap partition elements back to middle - val n = off + len - var s = math.min(a-off, b-a) - vecswap(off, b-s, s) - s = math.min(d-c, n-d-1) - vecswap(b, n-s, s) - - // Recursively sort non-partition-elements - s = b - a - if (s > 1) - sort2(off, s) - s = d - c - if (s > 1) - sort2(n-s, s) + a(ix) = next } + m += 1 } - sort2(off, len) } - - private def sort1(x: Array[Double], off: Int, len: Int) { - def swap(a: Int, b: Int) { - val t = x(a) - x(a) = x(b) - x(b) = t + + // Caller is required to pass iN >= i0, else math will fail. Also, i0 >= 0. + private def mergeSort[@specialized T: ClassTag](a: Array[T], i0: Int, iN: Int, ord: Ordering[T], scratch: Array[T] = null): Unit = { + if (iN - i0 < mergeThreshold) insertionSort(a, i0, iN, ord) + else { + val iK = (i0 + iN) >>> 1 // Bit shift equivalent to unsigned math, no overflow + val sc = if (scratch eq null) new Array[T](iK - i0) else scratch + mergeSort(a, i0, iK, ord, sc) + mergeSort(a, iK, iN, ord, sc) + mergeSorted(a, i0, iK, iN, ord, sc) } - def vecswap(_a: Int, _b: Int, n: Int) { - var a = _a - var b = _b - var i = 0 - while (i < n) { - swap(a, b) + } + + // Must have 0 <= i0 < iK < iN + private def mergeSorted[@specialized T](a: Array[T], i0: Int, iK: Int, iN: Int, ord: Ordering[T], scratch: Array[T]): Unit = { + // Check to make sure we're not already in order + if (ord.compare(a(iK-1), a(iK)) > 0) { + var i = i0 + val jN = iK - i0 + var j = 0 + while (i < iK) { + scratch (j) = a(i) i += 1 - a += 1 - b += 1 - } - } - def med3(a: Int, b: Int, c: Int) = { - val ab = x(a) compare x(b) - val bc = x(b) compare x(c) - val ac = x(a) compare x(c) - if (ab < 0) { - if (bc < 0) b else if (ac < 0) c else a - } else { - if (bc > 0) b else if (ac > 0) c else a + j += 1 } - } - def sort2(off: Int, len: Int) { - // Insertion sort on smallest arrays - if (len < 7) { - var i = off - while (i < len + off) { - var j = i - while (j > off && (x(j-1) compare x(j)) > 0) { - swap(j, j-1) - j -= 1 - } - i += 1 - } - } else { - // Choose a partition element, v - var m = off + (len >> 1) // Small arrays, middle element - if (len > 7) { - var l = off - var n = off + len - 1 - if (len > 40) { // Big arrays, pseudomedian of 9 - val s = len / 8 - l = med3(l, l+s, l+2*s) - m = med3(m-s, m, m+s) - n = med3(n-2*s, n-s, n) - } - m = med3(l, m, n) // Mid-size, med of 3 - } - val v = x(m) - - // Establish Invariant: v* (v)* v* - var a = off - var b = a - var c = off + len - 1 - var d = c - var done = false - while (!done) { - var bv = x(b) compare v - while (b <= c && bv <= 0) { - if (bv == 0) { - swap(a, b) - a += 1 - } - b += 1 - if (b <= c) bv = x(b) compare v - } - var cv = x(c) compare v - while (c >= b && cv >= 0) { - if (cv == 0) { - swap(c, d) - d -= 1 - } - c -= 1 - if (c >= b) cv = x(c) compare v - } - if (b > c) { - done = true - } else { - swap(b, c) - c -= 1 - b += 1 - } - } - - // Swap partition elements back to middle - val n = off + len - var s = math.min(a-off, b-a) - vecswap(off, b-s, s) - s = math.min(d-c, n-d-1) - vecswap(b, n-s, s) - - // Recursively sort non-partition-elements - s = b - a - if (s > 1) - sort2(off, s) - s = d - c - if (s > 1) - sort2(n-s, s) + var k = i0 + j = 0 + while (i < iN && j < jN) { + if (ord.compare(a(i), scratch(j)) < 0) { a(k) = a(i); i += 1 } + else { a(k) = scratch(j); j += 1 } + k += 1 } + while (j < jN) { a(k) = scratch(j); j += 1; k += 1 } + // Don't need to finish a(i) because it's already in place, k = i } - sort2(off, len) } - - private def sort1(x: Array[Float], off: Int, len: Int) { - def swap(a: Int, b: Int) { - val t = x(a) - x(a) = x(b) - x(b) = t + + // Why would you even do this? + private def booleanSort(a: Array[Boolean]): Unit = { + var i = 0 + var n = 0 + while (i < a.length) { + if (!a(i)) n += 1 + i += 1 } - def vecswap(_a: Int, _b: Int, n: Int) { - var a = _a - var b = _b - var i = 0 - while (i < n) { - swap(a, b) - i += 1 - a += 1 - b += 1 - } + i = 0 + while (i < n) { + a(i) = false + i += 1 } - def med3(a: Int, b: Int, c: Int) = { - val ab = x(a) compare x(b) - val bc = x(b) compare x(c) - val ac = x(a) compare x(c) - if (ab < 0) { - if (bc < 0) b else if (ac < 0) c else a - } else { - if (bc > 0) b else if (ac > 0) c else a - } + while (i < a.length) { + a(i) = true + i += 1 } - def sort2(off: Int, len: Int) { - // Insertion sort on smallest arrays - if (len < 7) { - var i = off - while (i < len + off) { - var j = i - while (j > off && (x(j-1) compare x(j)) > 0) { - swap(j, j-1) - j -= 1 - } - i += 1 - } - } else { - // Choose a partition element, v - var m = off + (len >> 1) // Small arrays, middle element - if (len > 7) { - var l = off - var n = off + len - 1 - if (len > 40) { // Big arrays, pseudomedian of 9 - val s = len / 8 - l = med3(l, l+s, l+2*s) - m = med3(m-s, m, m+s) - n = med3(n-2*s, n-s, n) - } - m = med3(l, m, n) // Mid-size, med of 3 - } - val v = x(m) + } - // Establish Invariant: v* (v)* v* - var a = off - var b = a - var c = off + len - 1 - var d = c - var done = false - while (!done) { - var bv = x(b) compare v - while (b <= c && bv <= 0) { - if (bv == 0) { - swap(a, b) - a += 1 - } - b += 1 - if (b <= c) bv = x(b) compare v - } - var cv = x(c) compare v - while (c >= b && cv >= 0) { - if (cv == 0) { - swap(c, d) - d -= 1 - } - c -= 1 - if (c >= b) cv = x(c) compare v - } - if (b > c) { - done = true - } else { - swap(b, c) - c -= 1 - b += 1 - } - } + // TODO: add upper bound: T <: AnyRef, propagate to callers below (not binary compatible) + // Maybe also rename all these methods to `sort`. + @inline private def sort[T](a: Array[T], ord: Ordering[T]): Unit = a match { + case _: Array[AnyRef] => + // Note that runtime matches are covariant, so could actually be any Array[T] s.t. T is not primitive (even boxed value classes) + if (a.length > 1 && (ord eq null)) throw new NullPointerException("Ordering") + java.util.Arrays.sort(a, ord) + case a: Array[Int] => if (ord eq Ordering.Int) java.util.Arrays.sort(a) else mergeSort[Int](a, 0, a.length, ord) + case a: Array[Double] => mergeSort[Double](a, 0, a.length, ord) // Because not all NaNs are identical, stability is meaningful! + case a: Array[Long] => if (ord eq Ordering.Long) java.util.Arrays.sort(a) else mergeSort[Long](a, 0, a.length, ord) + case a: Array[Float] => mergeSort[Float](a, 0, a.length, ord) // Because not all NaNs are identical, stability is meaningful! + case a: Array[Char] => if (ord eq Ordering.Char) java.util.Arrays.sort(a) else mergeSort[Char](a, 0, a.length, ord) + case a: Array[Byte] => if (ord eq Ordering.Byte) java.util.Arrays.sort(a) else mergeSort[Byte](a, 0, a.length, ord) + case a: Array[Short] => if (ord eq Ordering.Short) java.util.Arrays.sort(a) else mergeSort[Short](a, 0, a.length, ord) + case a: Array[Boolean] => if (ord eq Ordering.Boolean) booleanSort(a) else mergeSort[Boolean](a, 0, a.length, ord) + // Array[Unit] is matched as an Array[AnyRef] due to covariance in runtime matching. Not worth catching it as a special case. + case null => throw new NullPointerException + } - // Swap partition elements back to middle - val n = off + len - var s = math.min(a-off, b-a) - vecswap(off, b-s, s) - s = math.min(d-c, n-d-1) - vecswap(b, n-s, s) + // TODO: remove unnecessary ClassTag (not binary compatible) + /** Sort array `a` using the Ordering on its elements, preserving the original ordering where possible. Uses `java.util.Arrays.sort` unless `K` is a primitive type. */ + def stableSort[K: ClassTag: Ordering](a: Array[K]): Unit = sort(a, Ordering[K]) - // Recursively sort non-partition-elements - s = b - a - if (s > 1) - sort2(off, s) - s = d - c - if (s > 1) - sort2(n-s, s) - } - } - sort2(off, len) + // TODO: Remove unnecessary ClassTag (not binary compatible) + // TODO: make this fast for primitive K (could be specialized if it didn't go through Ordering) + /** Sort array `a` using function `f` that computes the less-than relation for each element. Uses `java.util.Arrays.sort` unless `K` is a primitive type. */ + def stableSort[K: ClassTag](a: Array[K], f: (K, K) => Boolean): Unit = sort(a, Ordering fromLessThan f) + + /** A sorted Array, using the Ordering for the elements in the sequence `a`. Uses `java.util.Arrays.sort` unless `K` is a primitive type. */ + def stableSort[K: ClassTag: Ordering](a: Seq[K]): Array[K] = { + val ret = a.toArray + sort(ret, Ordering[K]) + ret } - private def stableSort[K : ClassTag](a: Array[K], lo: Int, hi: Int, scratch: Array[K], f: (K,K) => Boolean) { - if (lo < hi) { - val mid = (lo+hi) / 2 - stableSort(a, lo, mid, scratch, f) - stableSort(a, mid+1, hi, scratch, f) - var k, t_lo = lo - var t_hi = mid + 1 - while (k <= hi) { - if ((t_lo <= mid) && ((t_hi > hi) || (!f(a(t_hi), a(t_lo))))) { - scratch(k) = a(t_lo) - t_lo += 1 - } else { - scratch(k) = a(t_hi) - t_hi += 1 - } - k += 1 - } - k = lo - while (k <= hi) { - a(k) = scratch(k) - k += 1 - } - } + // TODO: make this fast for primitive K (could be specialized if it didn't go through Ordering) + /** A sorted Array, given a function `f` that computes the less-than relation for each item in the sequence `a`. Uses `java.util.Arrays.sort` unless `K` is a primitive type. */ + def stableSort[K: ClassTag](a: Seq[K], f: (K, K) => Boolean): Array[K] = { + val ret = a.toArray + sort(ret, Ordering fromLessThan f) + ret + } + + /** A sorted Array, given an extraction function `f` that returns an ordered key for each item in the sequence `a`. Uses `java.util.Arrays.sort` unless `K` is a primitive type. */ + def stableSort[K: ClassTag, M: Ordering](a: Seq[K], f: K => M): Array[K] = { + val ret = a.toArray + sort(ret, Ordering[M] on f) + ret } } diff --git a/test/junit/scala/util/SortingTest.scala b/test/junit/scala/util/SortingTest.scala new file mode 100644 index 0000000000..15a00c8903 --- /dev/null +++ b/test/junit/scala/util/SortingTest.scala @@ -0,0 +1,69 @@ +package scala.util + +import org.junit.Test +import org.junit.Assert._ +import scala.math.{ Ordered, Ordering } +import scala.reflect.ClassTag + +class SortingTest { + case class N(i: Int, j: Int) extends Ordered[N] { def compare(n: N) = if (i < n.i) -1 else if (i > n.i) 1 else 0 } + + def mkA(n: Int, max: Int) = Array.tabulate(n)(i => N(util.Random.nextInt(max), i)) + + def isStable(a: Array[N]): Boolean = { var i = 1; while (i < a.length) { if (a(i).i < a(i-1).i || (a(i).i == a(i-1).i && a(i).j < a(i-1).j)) return false; i += 1 }; true } + + def isAntistable(a: Array[N]): Boolean = + { var i = 1; while (i < a.length) { if (a(i).i > a(i-1).i || (a(i).i == a(i-1).i && a(i).j < a(i-1).j)) return false; i += 1 }; true } + + def isSorted(a: Array[N]): Boolean = { var i = 1; while (i < a.length) { if (a(i).i < a(i-1).i) return false; i += 1 }; true } + + def isAntisorted(a: Array[N]): Boolean = { var i = 1; while (i < a.length) { if (a(i).i > a(i-1).i) return false; i += 1 }; true } + + val sizes = Seq.range(0, 65) ++ Seq(256, 1024, 9121, 65539) + val variety = Seq(1, 2, 10, 100, 1000, Int.MaxValue) + val workLimit = 1e6 + val rng = new util.Random(198571) + + val backwardsN = Ordering by ((n: N) => -n.i) + + def runOneTest(size: Int, variety: Int): Unit = { + val xs = Array.tabulate(size)(i => N(rng.nextInt(variety), i)) + val ys = Array.range(0, xs.length) + val zs = { val temp = xs.clone; java.util.Arrays.sort(temp, new java.util.Comparator[N] { def compare(a: N, b: N) = a.compare(b) }); temp } + val qxs = { val temp = xs.clone; Sorting.quickSort(temp); temp } + val pxs = { val temp = xs.clone; Sorting.quickSort(temp)(backwardsN); temp } + val sxs = { val temp = xs.clone; Sorting.stableSort(temp); temp } + val rxs = { val temp = xs.clone; Sorting.stableSort(temp)(implicitly[ClassTag[N]], backwardsN); temp } + val sys = Sorting.stableSort(ys.clone: Seq[Int], (i: Int) => xs(i)) + + assertTrue("Quicksort should be in order", isSorted(qxs)) + assertTrue("Quicksort should be in reverse order", isAntisorted(pxs)) + assertTrue("Stable sort should be sorted and stable", isStable(sxs)) + assertTrue("Stable sort should be reverse sorted but stable", isAntistable(rxs)) + assertTrue("Stable sorting by proxy should produce sorted stable list", isStable(sys.map(i => xs(i)))) + assertTrue("Quicksort should produce canonical ordering", (qxs zip zs).forall{ case (a,b) => a.i == b.i }) + assertTrue("Reverse quicksort should produce canonical ordering", (pxs.reverse zip zs).forall{ case (a,b) => a.i == b.i }) + assertTrue("Stable sort should produce exact ordering", (sxs zip zs).forall{ case (a,b) => a == b }) + assertTrue("Reverse stable sort should produce canonical ordering", (rxs.reverse zip zs).forall{ case (a,b) => a.i == b.i }) + assertTrue("Proxy sort and direct sort should produce exactly the same thing", (sxs zip sys.map(i => xs(i))).forall{ case (a,b) => a == b }) + } + + @Test def testSortConsistency: Unit = { + for { + size <- sizes + v <- variety + i <- 0 until math.min(100, math.max(math.min(math.floor(math.pow(v, size)/2), math.ceil(workLimit / (math.log(math.max(2,size))/math.log(2) * size))), 1).toInt) + } runOneTest(size, v) + + for (size <- sizes) { + val b = Array.fill(size)(rng.nextBoolean) + val bfwd = Sorting.stableSort(b.clone: Seq[Boolean]) + val bbkw = Sorting.stableSort(b.clone: Seq[Boolean], (x: Boolean, y: Boolean) => x && !y) + assertTrue("All falses should be first", bfwd.dropWhile(_ == false).forall(_ == true)) + assertTrue("All falses should be last when sorted backwards", bbkw.dropWhile(_ == true).forall(_ == false)) + assertTrue("Sorting booleans should preserve the number of trues", b.count(_ == true) == bfwd.count(_ == true)) + assertTrue("Backwards sorting booleans should preserve the number of trues", b.count(_ == true) == bbkw.count(_ == true)) + assertTrue("Sorting should not change the sizes of arrays", b.length == bfwd.length && b.length == bbkw.length) + } + } +} -- cgit v1.2.3 From 0bc7146b5bd8b8b0bcc1a0363ba4b42a58287260 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Tue, 2 Jun 2015 15:30:39 +1000 Subject: [sbt] Allow the REPL to be run from the SBT build - Tell SBT to that we're forking an interactive process - Automatically add `-usejavacp` so the REPL adds the classes from the system classloader to the compilers classpath. JLine seems to be working from within this setup. ``` % sbt Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=384m; support was removed in 8.0 [info] Loading global plugins from /Users/jason/.sbt/0.13/plugins [info] Loading project definition from /Users/jason/code/scala2/project [info] *** Welcome to the sbt build definition for Scala! *** [info] This build definition has an EXPERIMENTAL status. If you are not [info] interested in testing or working on the build itself, please use [info] the Ant build definition for now. Check README.md for more information. > repl/run [info] Running scala.tools.nsc.MainGenericRunner -usejavacp Welcome to Scala version 2.11.6-SNAPSHOT-20150528-131650-70f0b1ded8 (Java HotSpot(TM) 64-Bit Server VM, Java 1.8.0_25). Type in expressions to have them evaluated. Type :help for more information. scala> 1 + 1 res0: Int = 2 (reverse-i-search)`1': 1 + 1 ``` --- build.sbt | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index 553c217d4a..e960a4c3d2 100644 --- a/build.sbt +++ b/build.sbt @@ -192,7 +192,12 @@ lazy val interactive = configureAsSubproject(project) .dependsOn(compiler) lazy val repl = configureAsSubproject(project) - .settings(libraryDependencies += jlineDep) + .settings( + libraryDependencies += jlineDep, + connectInput in run := true, + outputStrategy in run := Some(StdoutOutput), + run <<= (run in Compile).partialInput(" -usejavacp") // Automatically add this so that `repl/run` works without additional arguments. + ) .settings(disableDocsAndPublishingTasks: _*) .dependsOn(compiler) -- cgit v1.2.3 From 3667df14ff8c5a5866d3dee0c9857ef038e05d72 Mon Sep 17 00:00:00 2001 From: Daniel Dietrich Date: Tue, 2 Jun 2015 23:21:57 +0200 Subject: Applying inverse index instead of reversing a List --- src/library/scala/collection/immutable/Queue.scala | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/library/scala/collection/immutable/Queue.scala b/src/library/scala/collection/immutable/Queue.scala index 98266716cc..e40ebdbe71 100644 --- a/src/library/scala/collection/immutable/Queue.scala +++ b/src/library/scala/collection/immutable/Queue.scala @@ -60,7 +60,8 @@ class Queue[+A] protected(protected val in: List[A], protected val out: List[A]) if (n < len) out.apply(n) else { val m = n - len - if (m < in.length) in.reverse.apply(m) + val l = in.length + if (m < l) in.apply(l - m - 1) else throw new NoSuchElementException("index out of range") } } -- cgit v1.2.3 From 79436caa98ba46de644841a67ea9ff103831e574 Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Wed, 3 Jun 2015 16:36:43 -0700 Subject: SI-9343 Xlint less strict on pattern sequences -Xlint:stars-align warns only if elementarity > 0, that is, if an extracted sequence is not matched entirely by a pattern sequence, that is, in SLS 8.1.9 on pattern sequences, n = 1 and that pattern is a pattern sequence. This is still only triggered if productarity > 0, that is, a non-pattern-sequence pattern is required for the match. This is a sensitive area because it borders on exhaustiveness checking: it would be preferable to verify just that the match is exhaustive, and to emit this warning only if it is not. --- .../transform/patmat/ScalacPatternExpanders.scala | 6 ++++-- test/files/neg/t7623.check | 24 ++++++++-------------- test/files/neg/t7623.scala | 8 ++++---- 3 files changed, 17 insertions(+), 21 deletions(-) diff --git a/src/compiler/scala/tools/nsc/transform/patmat/ScalacPatternExpanders.scala b/src/compiler/scala/tools/nsc/transform/patmat/ScalacPatternExpanders.scala index 2753baa51d..b1783dc81f 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/ScalacPatternExpanders.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/ScalacPatternExpanders.scala @@ -112,8 +112,10 @@ trait ScalacPatternExpanders { arityError("not enough") else if (elementArity > 0 && !isSeq) arityError("too many") - else if (settings.warnStarsAlign && isSeq && productArity > 0 && (elementArity > 0 || !isStar)) - warn("A repeated case parameter or extracted sequence should be matched only by a sequence wildcard (_*).") + else if (settings.warnStarsAlign && isSeq && productArity > 0 && elementArity > 0) warn { + if (isStar) "Sequence wildcard (_*) does not align with repeated case parameter or extracted sequence; the result may be unexpected." + else "A repeated case parameter or extracted sequence is not matched by a sequence wildcard (_*), and may fail at runtime." + } aligned } diff --git a/test/files/neg/t7623.check b/test/files/neg/t7623.check index db368dd369..de35023664 100644 --- a/test/files/neg/t7623.check +++ b/test/files/neg/t7623.check @@ -1,21 +1,15 @@ -t7623.scala:19: warning: A repeated case parameter or extracted sequence should be matched only by a sequence wildcard (_*). - def f = "" match { case X(s) => } +t7623.scala:21: warning: A repeated case parameter or extracted sequence is not matched by a sequence wildcard (_*), and may fail at runtime. + def g = "" match { case X(s, t) => } // warn ^ -t7623.scala:21: warning: A repeated case parameter or extracted sequence should be matched only by a sequence wildcard (_*). - def g = "" match { case X(s, t) => } +t7623.scala:23: warning: Sequence wildcard (_*) does not align with repeated case parameter or extracted sequence; the result may be unexpected. + def h = "" match { case X(s, t, u @ _*) => } // warn ^ -t7623.scala:23: warning: A repeated case parameter or extracted sequence should be matched only by a sequence wildcard (_*). - def h = "" match { case X(s, t, u @ _*) => } - ^ -t7623.scala:9: warning: A repeated case parameter or extracted sequence should be matched only by a sequence wildcard (_*). - def f = C("") match { case C(s) => } - ^ -t7623.scala:11: warning: A repeated case parameter or extracted sequence should be matched only by a sequence wildcard (_*). - def g = C("") match { case C(s, t) => } +t7623.scala:11: warning: A repeated case parameter or extracted sequence is not matched by a sequence wildcard (_*), and may fail at runtime. + def g = C("") match { case C(s, t) => } // warn ^ -t7623.scala:13: warning: A repeated case parameter or extracted sequence should be matched only by a sequence wildcard (_*). - def h = C("") match { case C(s, t, u @ _*) => } +t7623.scala:13: warning: Sequence wildcard (_*) does not align with repeated case parameter or extracted sequence; the result may be unexpected. + def h = C("") match { case C(s, t, u @ _*) => } // warn ^ error: No warnings can be incurred under -Xfatal-warnings. -6 warnings found +four warnings found one error found diff --git a/test/files/neg/t7623.scala b/test/files/neg/t7623.scala index 5c40f37bc1..5334cc5f69 100644 --- a/test/files/neg/t7623.scala +++ b/test/files/neg/t7623.scala @@ -8,9 +8,9 @@ object X { def unapplySeq(a: Any): Option[(String, Seq[Int])] = Some("", List(1, trait Ctest { def f = C("") match { case C(s) => } - def g = C("") match { case C(s, t) => } + def g = C("") match { case C(s, t) => } // warn - def h = C("") match { case C(s, t, u @ _*) => } + def h = C("") match { case C(s, t, u @ _*) => } // warn def ok = C("") match { case C(s, u @ _*) => } } @@ -18,9 +18,9 @@ trait Ctest { trait Xtest { def f = "" match { case X(s) => } - def g = "" match { case X(s, t) => } + def g = "" match { case X(s, t) => } // warn - def h = "" match { case X(s, t, u @ _*) => } + def h = "" match { case X(s, t, u @ _*) => } // warn def ok = "" match { case X(s, u @ _*) => } } -- cgit v1.2.3 From a112422e91018b7f01add6c9d40bec5eb010c321 Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Thu, 4 Jun 2015 20:05:47 +0200 Subject: Compiler option for disabling nullness analysis --- .../nsc/backend/jvm/analysis/NullnessAnalyzer.scala | 4 ++-- .../tools/nsc/backend/jvm/opt/BytecodeUtils.scala | 2 +- .../scala/tools/nsc/backend/jvm/opt/CallGraph.scala | 18 +++++++++++++++--- .../scala/tools/nsc/settings/ScalaSettings.scala | 8 +++++--- 4 files changed, 23 insertions(+), 9 deletions(-) diff --git a/src/compiler/scala/tools/nsc/backend/jvm/analysis/NullnessAnalyzer.scala b/src/compiler/scala/tools/nsc/backend/jvm/analysis/NullnessAnalyzer.scala index 4c81b85d0a..2cc6d67a3c 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/analysis/NullnessAnalyzer.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/analysis/NullnessAnalyzer.scala @@ -121,10 +121,10 @@ sealed trait NullnessValue extends Value { def merge(other: NullnessValue) = NullnessValue(nullness merge other.nullness, isSize2) } -object NullValue extends NullnessValue { def nullness = Null; def isSize2 = false; override def toString = "Null" } +object NullValue extends NullnessValue { def nullness = Null; def isSize2 = false; override def toString = "Null" } object UnknownValue1 extends NullnessValue { def nullness = Unknown; def isSize2 = false; override def toString = "Unknown1" } object UnknownValue2 extends NullnessValue { def nullness = Unknown; def isSize2 = true; override def toString = "Unknown2" } -object NotNullValue extends NullnessValue { def nullness = NotNull; def isSize2 = false; override def toString = "NotNull" } +object NotNullValue extends NullnessValue { def nullness = NotNull; def isSize2 = false; override def toString = "NotNull" } object NullnessValue { def apply(nullness: Nullness, isSize2: Boolean): NullnessValue = { diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/BytecodeUtils.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/BytecodeUtils.scala index 911bf3d189..9bd016f964 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/opt/BytecodeUtils.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/BytecodeUtils.scala @@ -333,7 +333,7 @@ object BytecodeUtils { def frameAt(instruction: AbstractInsnNode): Frame[V] = analyzer.frameAt(instruction, methodNode) } - implicit class AnalyzerExtendsions[V <: Value](val analyzer: Analyzer[V]) extends AnyVal { + implicit class AnalyzerExtensions[V <: Value](val analyzer: Analyzer[V]) extends AnyVal { def frameAt(instruction: AbstractInsnNode, methodNode: MethodNode): Frame[V] = analyzer.getFrames()(methodNode.instructions.indexOf(instruction)) } diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/CallGraph.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/CallGraph.scala index c6df86b297..0932564b1f 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/opt/CallGraph.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/CallGraph.scala @@ -8,6 +8,7 @@ package backend.jvm package opt import scala.reflect.internal.util.{NoPosition, Position} +import scala.tools.asm.tree.analysis.{Value, Analyzer, BasicInterpreter} import scala.tools.asm.{Opcodes, Type} import scala.tools.asm.tree._ import scala.collection.convert.decorateAsScala._ @@ -100,9 +101,21 @@ class CallGraph[BT <: BTypes](val btypes: BT) { // call is known to be not-null, in which case we don't have to emit a null check when inlining. // It is also used to get the stack height at the call site. localOpt.minimalRemoveUnreachableCode(methodNode, definingClass.internalName) - val analyzer = new NullnessAnalyzer + + val analyzer: Analyzer[_ <: Value] = { + if (compilerSettings.YoptNullnessTracking) new NullnessAnalyzer + else new Analyzer(new BasicInterpreter) + } analyzer.analyze(definingClass.internalName, methodNode) + def receiverNotNullByAnalysis(call: MethodInsnNode, numArgs: Int) = analyzer match { + case nullnessAnalyzer: NullnessAnalyzer => + val frame = nullnessAnalyzer.frameAt(call, methodNode) + frame.getStack(frame.getStackSize - 1 - numArgs).nullness == NotNull + + case _ => false + } + methodNode.instructions.iterator.asScala.collect({ case call: MethodInsnNode => val callee: Either[OptimizerWarning, Callee] = for { @@ -131,8 +144,7 @@ class CallGraph[BT <: BTypes](val btypes: BT) { val receiverNotNull = call.getOpcode == Opcodes.INVOKESTATIC || { val numArgs = Type.getArgumentTypes(call.desc).length - val frame = analyzer.frameAt(call, methodNode) - frame.getStack(frame.getStackSize - 1 - numArgs).nullness == NotNull + receiverNotNullByAnalysis(call, numArgs) } Callsite( diff --git a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala index 35ee889c58..953e43eaca 100644 --- a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala +++ b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala @@ -234,15 +234,16 @@ trait ScalaSettings extends AbsScalaSettings val emptyLineNumbers = Choice("empty-line-numbers", "Eliminate unnecessary line number information.") val emptyLabels = Choice("empty-labels", "Eliminate and collapse redundant labels in the bytecode.") val compactLocals = Choice("compact-locals", "Eliminate empty slots in the sequence of local variables.") - val inlineProject = Choice("inline-project", "Inline only methods defined in the files being compiled") - val inlineGlobal = Choice("inline-global", "Inline methods from any source, including classfiles on the compile classpath") + val nullnessTracking = Choice("nullness-tracking", "Track nullness / non-nullness of local variables and apply optimizations.") + val inlineProject = Choice("inline-project", "Inline only methods defined in the files being compiled.") + val inlineGlobal = Choice("inline-global", "Inline methods from any source, including classfiles on the compile classpath.") val lNone = Choice("l:none", "Don't enable any optimizations.") private val defaultChoices = List(unreachableCode) val lDefault = Choice("l:default", "Enable default optimizations: "+ defaultChoices.mkString(","), expandsTo = defaultChoices) - private val methodChoices = List(unreachableCode, simplifyJumps, emptyLineNumbers, emptyLabels, compactLocals) + private val methodChoices = List(unreachableCode, simplifyJumps, emptyLineNumbers, emptyLabels, compactLocals, nullnessTracking) val lMethod = Choice("l:method", "Enable intra-method optimizations: "+ methodChoices.mkString(","), expandsTo = methodChoices) private val projectChoices = List(lMethod, inlineProject) @@ -264,6 +265,7 @@ trait ScalaSettings extends AbsScalaSettings def YoptEmptyLineNumbers = Yopt.contains(YoptChoices.emptyLineNumbers) def YoptEmptyLabels = Yopt.contains(YoptChoices.emptyLabels) def YoptCompactLocals = Yopt.contains(YoptChoices.compactLocals) + def YoptNullnessTracking = Yopt.contains(YoptChoices.nullnessTracking) def YoptInlineProject = Yopt.contains(YoptChoices.inlineProject) def YoptInlineGlobal = Yopt.contains(YoptChoices.inlineGlobal) -- cgit v1.2.3 From b2a78b3ab536658f79e4396201c730a8408d3dd2 Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Wed, 3 Jun 2015 22:46:00 +0200 Subject: Fix aliasing / nullness of CHECKCAST --- .../jvm/analysis/InstructionStackEffect.scala | 5 ++--- .../backend/jvm/analysis/NullnessAnalyzer.scala | 2 ++ .../jvm/analysis/NullnessAnalyzerTest.scala | 26 ++++++++++++++++++++++ 3 files changed, 30 insertions(+), 3 deletions(-) diff --git a/src/compiler/scala/tools/nsc/backend/jvm/analysis/InstructionStackEffect.scala b/src/compiler/scala/tools/nsc/backend/jvm/analysis/InstructionStackEffect.scala index a7d6f74557..98e93c125b 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/analysis/InstructionStackEffect.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/analysis/InstructionStackEffect.scala @@ -249,9 +249,8 @@ object InstructionStackEffect { case ATHROW => t(1, 0) // Frame.execute consumes one stack value - case CHECKCAST => t(0, 0) - - case INSTANCEOF => t(1, 1) + case CHECKCAST | + INSTANCEOF => t(1, 1) // Frame.execute does push(pop()) for both of them case MONITORENTER | MONITOREXIT => t(1, 0) diff --git a/src/compiler/scala/tools/nsc/backend/jvm/analysis/NullnessAnalyzer.scala b/src/compiler/scala/tools/nsc/backend/jvm/analysis/NullnessAnalyzer.scala index 2cc6d67a3c..31710dcbee 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/analysis/NullnessAnalyzer.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/analysis/NullnessAnalyzer.scala @@ -179,6 +179,8 @@ final class NullnessInterpreter extends Interpreter[NullnessValue](Opcodes.ASM5) def copyOperation(insn: AbstractInsnNode, value: NullnessValue): NullnessValue = value def unaryOperation(insn: AbstractInsnNode, value: NullnessValue): NullnessValue = (insn.getOpcode: @switch) match { + case Opcodes.CHECKCAST => value + case Opcodes.NEWARRAY | Opcodes.ANEWARRAY => NullnessValue(NotNull, isSize2 = false) diff --git a/test/junit/scala/tools/nsc/backend/jvm/analysis/NullnessAnalyzerTest.scala b/test/junit/scala/tools/nsc/backend/jvm/analysis/NullnessAnalyzerTest.scala index 3d5343e395..3a85f03da2 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/analysis/NullnessAnalyzerTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/analysis/NullnessAnalyzerTest.scala @@ -202,4 +202,30 @@ class NullnessAnalyzerTest extends ClearAfterClass { (end, 5, Null) // d, no change )) testNullness(a, m, insn, index, nullness) } + + @Test + def testInstanceOf(): Unit = { + val code = + """def f(a: Object) = { + | val x = a + | x.isInstanceOf[Throwable] // x and a remain unknown - INSTANCEOF doesn't throw a NPE on null + | x.toString // x and a are not null + | a.asInstanceOf[String].trim // the stack value (LOAD of local a) is still not-null after the CHECKCAST + |} + """.stripMargin + val List(m) = compileMethods(noOptCompiler)(code) + val a = newNullnessAnalyzer(m) + + val instof = "+INSTANCEOF" + val tost = "+INVOKEVIRTUAL java/lang/Object.toString" + val trim = "INVOKEVIRTUAL java/lang/String.trim" + + for ((insn, index, nullness) <- List( + (instof, 1, Unknown), // a after INSTANCEOF + (instof, 2, Unknown), // x after INSTANCEOF + (tost, 1, NotNull), + (tost, 2, NotNull), + (trim, 3, NotNull) // receiver at `trim` + )) testNullness(a, m, insn, index, nullness) + } } -- cgit v1.2.3 From 9b82ed0b8841ecac20962b30be1fa55503a86deb Mon Sep 17 00:00:00 2001 From: YawarRaza7349 Date: Fri, 5 Jun 2015 10:08:42 -0400 Subject: Clarify the definition of inheritance closure The definition now specifically mentions that C is an element of the inheritance closure of C. --- spec/05-classes-and-objects.md | 1 + 1 file changed, 1 insertion(+) diff --git a/spec/05-classes-and-objects.md b/spec/05-classes-and-objects.md index 28abe6c3bc..8be792d3cb 100644 --- a/spec/05-classes-and-objects.md +++ b/spec/05-classes-and-objects.md @@ -395,6 +395,7 @@ class C extends A with B { type T <: C } Let $C$ be a class type. The _inheritance closure_ of $C$ is the smallest set $\mathscr{S}$ of types such that +- $C$ is in $\mathscr{S}$. - If $T$ is in $\mathscr{S}$, then every type $T'$ which forms syntactically a part of $T$ is also in $\mathscr{S}$. - If $T$ is a class type in $\mathscr{S}$, then all [parents](#templates) -- cgit v1.2.3 From c834fc2634d621599602fad3d9036df382975f9b Mon Sep 17 00:00:00 2001 From: Daniel Dietrich Date: Fri, 5 Jun 2015 23:36:10 +0200 Subject: Better names for length values --- src/library/scala/collection/immutable/Queue.scala | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/src/library/scala/collection/immutable/Queue.scala b/src/library/scala/collection/immutable/Queue.scala index e40ebdbe71..53af3ce158 100644 --- a/src/library/scala/collection/immutable/Queue.scala +++ b/src/library/scala/collection/immutable/Queue.scala @@ -56,12 +56,12 @@ class Queue[+A] protected(protected val in: List[A], protected val out: List[A]) * @throws java.util.NoSuchElementException if the queue is too short. */ override def apply(n: Int): A = { - val len = out.length - if (n < len) out.apply(n) + val olen = out.length + if (n < olen) out.apply(n) else { - val m = n - len - val l = in.length - if (m < l) in.apply(l - m - 1) + val m = n - olen + val ilen = in.length + if (m < ilen) in.apply(ilen - m - 1) else throw new NoSuchElementException("index out of range") } } -- cgit v1.2.3 From 9368663aa281a43789f124dbaf8e70128dafd46d Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Sun, 7 Jun 2015 12:55:51 -0700 Subject: SI-7773 Restore phase id to icode filename Having icode output files of the form `X-24.icode` went in and out of style using the long-form phase name because it broke the windows nightly build somehow. Here's hoping using just the phase id works on this year's infrastructure. As previously, the long name is still available under `-Ydebug`, because why not debug. --- src/compiler/scala/tools/nsc/Global.scala | 22 ++++++++++++---------- 1 file changed, 12 insertions(+), 10 deletions(-) diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala index 4430a84f06..3469726455 100644 --- a/src/compiler/scala/tools/nsc/Global.scala +++ b/src/compiler/scala/tools/nsc/Global.scala @@ -1675,23 +1675,25 @@ class Global(var currentSettings: Settings, var reporter: Reporter) def getFile(clazz: Symbol, suffix: String): File = getFile(clazz.sourceFile, clazz.fullName split '.', suffix) private def writeICode() { - val printer = new icodes.TextPrinter(null, icodes.linearizer) - icodes.classes.values.foreach((cls) => { - val moduleSfx = if (cls.symbol.hasModuleFlag) "$" else "" - val phaseSfx = if (settings.debug) phase else "" // only for debugging, appending the full phasename breaks windows build - val file = getFile(cls.symbol, s"$moduleSfx$phaseSfx.icode") + val printer = new icodes.TextPrinter(writer = null, icodes.linearizer) + icodes.classes.values foreach { cls => + val file = { + val module = if (cls.symbol.hasModuleFlag) "$" else "" + val faze = if (settings.debug) phase.name else f"${phase.id}%02d" // avoid breaking windows build with long filename + getFile(cls.symbol, s"$module-$faze.icode") + } try { val stream = new FileOutputStream(file) printer.setWriter(new PrintWriter(stream, true)) printer.printClass(cls) - informProgress("wrote " + file) + informProgress(s"wrote $file") } catch { - case ex: IOException => - if (settings.debug) ex.printStackTrace() - globalError("could not write file " + file) + case e: IOException => + if (settings.debug) e.printStackTrace() + globalError(s"could not write file $file") } - }) + } } def createJavadoc = false } -- cgit v1.2.3 From 874455a91568ed98e036e9a51e4c9cf28f5fb8d1 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Tue, 9 Jun 2015 11:22:05 +0200 Subject: SI-9212 Fix toolbox with varargs constructors It was already working for methods, but not for constructors. --- src/reflect/scala/reflect/runtime/JavaMirrors.scala | 1 + test/files/run/toolbox-varargs/Test.scala | 13 +++++++++++++ test/files/run/toolbox-varargs/Varargs.java | 8 ++++++++ 3 files changed, 22 insertions(+) create mode 100644 test/files/run/toolbox-varargs/Test.scala create mode 100644 test/files/run/toolbox-varargs/Varargs.java diff --git a/src/reflect/scala/reflect/runtime/JavaMirrors.scala b/src/reflect/scala/reflect/runtime/JavaMirrors.scala index ce60ade9f5..8c32a92ecd 100644 --- a/src/reflect/scala/reflect/runtime/JavaMirrors.scala +++ b/src/reflect/scala/reflect/runtime/JavaMirrors.scala @@ -1184,6 +1184,7 @@ private[scala] trait JavaMirrors extends internal.SymbolTable with api.JavaUnive constr setInfo GenPolyType(tparams, MethodType(clazz.newSyntheticValueParams(paramtpes), clazz.tpe)) propagatePackageBoundary(jconstr.javaFlags, constr) copyAnnotations(constr, jconstr) + if (jconstr.javaFlags.isVarargs) constr modifyInfo arrayToRepeated markAllCompleted(constr) constr } diff --git a/test/files/run/toolbox-varargs/Test.scala b/test/files/run/toolbox-varargs/Test.scala new file mode 100644 index 0000000000..be5ab45768 --- /dev/null +++ b/test/files/run/toolbox-varargs/Test.scala @@ -0,0 +1,13 @@ +object Test { + def main(args: Array[String]): Unit = { + import scala.tools.reflect.ToolBox + val m = reflect.runtime.currentMirror + val u = m.universe + import u._ + val tb = m.mkToolBox(); + tb.compile(q"new p.Varargs(null, null)") + tb.compile(q"p.Varargs.staticMethod(null, null)") + tb.compile(q"(null: p.Varargs).instanceMethod(null, null)") + } +} + diff --git a/test/files/run/toolbox-varargs/Varargs.java b/test/files/run/toolbox-varargs/Varargs.java new file mode 100644 index 0000000000..da1dbbacc9 --- /dev/null +++ b/test/files/run/toolbox-varargs/Varargs.java @@ -0,0 +1,8 @@ +package p; + +public class Varargs { + public Varargs(String... args) {} + public static void staticMethod(String... args) {} + + public void instanceMethod(String... args) {} +} -- cgit v1.2.3 From 39a5e1aed93f7b1483802a0248295229d711e072 Mon Sep 17 00:00:00 2001 From: Prashant Sharma Date: Wed, 10 Jun 2015 10:38:54 +0530 Subject: SI-7747 More tests and logic according to our conclusions on #4522. --- src/repl/scala/tools/nsc/interpreter/Imports.scala | 8 ++++---- test/files/run/repl-serialization.check | 4 ++++ test/files/run/repl-serialization.scala | 5 ++++- test/files/run/t7747-repl.check | 14 +++++++++++++- test/files/run/t7747-repl.scala | 4 ++++ 5 files changed, 29 insertions(+), 6 deletions(-) diff --git a/src/repl/scala/tools/nsc/interpreter/Imports.scala b/src/repl/scala/tools/nsc/interpreter/Imports.scala index c52869e208..3ec77e46f1 100644 --- a/src/repl/scala/tools/nsc/interpreter/Imports.scala +++ b/src/repl/scala/tools/nsc/interpreter/Imports.scala @@ -166,15 +166,15 @@ trait Imports { for (imv <- x.definedNames) { if (!currentImps.contains(imv)) { x match { - case _: ValHandler | _: ModuleHandler => + case _: ClassHandler => + code.append("import " + objName + req.accessPath + ".`" + imv + "`\n") + case _ => val valName = req.lineRep.packageName + req.lineRep.readName if (!tempValLines.contains(req.lineRep.lineId)) { code.append(s"val $valName = $objName\n") tempValLines += req.lineRep.lineId } - code.append(s"import $valName ${req.accessPath}.`$imv`;\n") - case _ => - code.append("import " + objName + req.accessPath + ".`" + imv + "`\n") + code.append(s"import $valName${req.accessPath}.`$imv`;\n") } currentImps += imv } diff --git a/test/files/run/repl-serialization.check b/test/files/run/repl-serialization.check index c8439ea023..eb62729f5c 100644 --- a/test/files/run/repl-serialization.check +++ b/test/files/run/repl-serialization.check @@ -2,6 +2,8 @@ extract: AnyRef => Unit = evaluating x x: Int = 0 +getX: ()Int +defined class U y: Int = evaluating z evaluating zz @@ -11,6 +13,8 @@ zz: Int = 0 defined object O defined class A defined type alias AA +constructing U +u: U = U == evaluating lambda evaluating y evaluating O diff --git a/test/files/run/repl-serialization.scala b/test/files/run/repl-serialization.scala index 64915ce51e..55b7519631 100644 --- a/test/files/run/repl-serialization.scala +++ b/test/files/run/repl-serialization.scala @@ -25,12 +25,15 @@ object Test { val code = """val x = {println(" evaluating x"); 0 } + |def getX() = x + |class U extends Serializable { println("constructing U"); val x = 0 ; override def toString = "U" } |lazy val y = {println(" evaluating y"); 0 } |class D; val z = {println(" evaluating z"); 0}; val zz = {println(" evaluating zz"); 0} |object O extends Serializable { val apply = {println(" evaluating O"); 0} } |class A(i: Int) { println(" constructing A") } |type AA = A - |extract(() => new AA(x + y + z + zz + O.apply)) + |val u = new U() + |extract(() => new AA(x + getX() + y + z + zz + O.apply + u.x)) """.stripMargin imain = new IMain(settings) diff --git a/test/files/run/t7747-repl.check b/test/files/run/t7747-repl.check index af47f23c62..5f436ba6b1 100644 --- a/test/files/run/t7747-repl.check +++ b/test/files/run/t7747-repl.check @@ -284,6 +284,18 @@ object $read extends scala.AnyRef { } res3: List[Product with Serializable] = List(BippyBups(), PuppyPups(), Bingo()) +scala> case class Sum(exp: String, exp2: String) +defined class Sum + +scala> val a = Sum("A", "B") +a: Sum = Sum(A,B) + +scala> def b(a: Sum): String = a match { case Sum(_, _) => "Found Sum" } +b: (a: Sum)String + +scala> b(a) +res4: String = Found Sum + scala> :power ** Power User mode enabled - BEEP WHIR GYVE ** ** :phase has been set to 'typer'. ** @@ -292,6 +304,6 @@ scala> :power ** Try :help, :vals, power. ** scala> intp.lastRequest -res4: $r.intp.Request = Request(line=def $ires3 = intp.global, 1 trees) +res5: $r.intp.Request = Request(line=def $ires3 = intp.global, 1 trees) scala> :quit diff --git a/test/files/run/t7747-repl.scala b/test/files/run/t7747-repl.scala index a681eb8851..141c2d9844 100644 --- a/test/files/run/t7747-repl.scala +++ b/test/files/run/t7747-repl.scala @@ -65,6 +65,10 @@ object Test extends ReplTest { |case class PuppyPups() |case class Bingo() |List(BippyBups(), PuppyPups(), Bingo()) // show + |case class Sum(exp: String, exp2: String) + |val a = Sum("A", "B") + |def b(a: Sum): String = a match { case Sum(_, _) => "Found Sum" } + |b(a) |:power |intp.lastRequest |""".stripMargin -- cgit v1.2.3 From b295f43e3f1968e3db21c6f5127554c885d2e3b8 Mon Sep 17 00:00:00 2001 From: YawarRaza7349 Date: Thu, 11 Jun 2015 19:41:27 -0400 Subject: Fix missing quotes in EBNF of type alias --- spec/04-basic-declarations-and-definitions.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/spec/04-basic-declarations-and-definitions.md b/spec/04-basic-declarations-and-definitions.md index 7fb5427d36..13ea72390f 100644 --- a/spec/04-basic-declarations-and-definitions.md +++ b/spec/04-basic-declarations-and-definitions.md @@ -298,7 +298,7 @@ the sequence of variable definitions ```ebnf Dcl ::= ‘type’ {nl} TypeDcl TypeDcl ::= id [TypeParamClause] [‘>:’ Type] [‘<:’ Type] -Def ::= type {nl} TypeDef +Def ::= ‘type’ {nl} TypeDef TypeDef ::= id [TypeParamClause] ‘=’ Type ``` -- cgit v1.2.3 From 9283b07d8aaca88cb8d592acaae11afeebab5cc9 Mon Sep 17 00:00:00 2001 From: Simon Ochsenreither Date: Fri, 12 Jun 2015 03:47:00 +0200 Subject: spec: Add 'Default Arguments' heading, sentence, example The sentence and the accompanying example were stolen from SID-1. --- spec/04-basic-declarations-and-definitions.md | 28 +++++++++++++++++++-------- 1 file changed, 20 insertions(+), 8 deletions(-) diff --git a/spec/04-basic-declarations-and-definitions.md b/spec/04-basic-declarations-and-definitions.md index 7fb5427d36..a8fd4cb60c 100644 --- a/spec/04-basic-declarations-and-definitions.md +++ b/spec/04-basic-declarations-and-definitions.md @@ -620,7 +620,11 @@ well as the function body, if it is present. A value parameter clause $\mathit{ps}$ consists of zero or more formal parameter bindings such as `$x$: $T$` or `$x: T = e$`, which bind value -parameters and associate them with their types. Each value parameter +parameters and associate them with their types. + +### Default Arguments + +Each value parameter declaration may optionally define a default argument. The default argument expression $e$ is type-checked with an expected type $T'$ obtained by replacing all occurences of the function's type parameters in $T$ by @@ -632,13 +636,7 @@ expression. Here, $n$ denotes the parameter's position in the method declaration. These methods are parametrized by the type parameter clause `[$\mathit{tps}\,$]` and all value parameter clauses `($\mathit{ps}_1$)$\ldots$($\mathit{ps}_{i-1}$)` preceding $p_{i,j}$. -The `$f\$$default$\$$n` methods are inaccessible for -user programs. - -The scope of a formal value parameter name $x$ comprises all subsequent -parameter clauses, as well as the method return type and the function body, if -they are given. Both type parameter names and value parameter names must -be pairwise distinct. +The `$f\$$default$\$$n` methods are inaccessible for user programs. ###### Example In the method @@ -657,6 +655,20 @@ def compare$\$$default$\$$1[T]: Int = 0 def compare$\$$default$\$$2[T](a: T): T = a ``` +The scope of a formal value parameter name $x$ comprises all subsequent +parameter clauses, as well as the method return type and the function body, if +they are given. Both type parameter names and value parameter names must +be pairwise distinct. + +A default value which depends on earlier parameters uses the actual arguments +if they are provided, not the default arguments. + +```scala +def f(a: Int = 0)(b: Int = a + 1) = b // OK +// def f(a: Int = 0, b: Int = a + 1) // "error: not found: value a" +f(10)() // returns 11 (not 1) +``` + ### By-Name Parameters ```ebnf -- cgit v1.2.3 From a3961f5d873609658e93c6c9abfdbaa6ea8f2e30 Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Sun, 14 Jun 2015 01:22:34 -0700 Subject: SI-9354 ScalaDoc members added via by-name view Eligible views were looked up by exact from type without including the by-name dodge. By-name views are now included without consideration whether ScalaDoc processes possible duplicates correctly. --- src/library/scala/collection/immutable/Stream.scala | 6 ++++++ .../scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala | 8 ++++++-- 2 files changed, 12 insertions(+), 2 deletions(-) diff --git a/src/library/scala/collection/immutable/Stream.scala b/src/library/scala/collection/immutable/Stream.scala index cf7b7e272a..7edd36dc22 100644 --- a/src/library/scala/collection/immutable/Stream.scala +++ b/src/library/scala/collection/immutable/Stream.scala @@ -1180,7 +1180,13 @@ object Stream extends SeqFactory[Stream] { * to streams. */ class ConsWrapper[A](tl: => Stream[A]) { + /** Construct a stream consisting of a given first element followed by elements + * from a lazily evaluated Stream. + */ def #::(hd: A): Stream[A] = cons(hd, tl) + /** Construct a stream consisting of the concatenation of the given stream and + * a lazily evaluated Stream. + */ def #:::(prefix: Stream[A]): Stream[A] = prefix append tl } diff --git a/src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala b/src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala index f984b4579f..778839a1f5 100644 --- a/src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala +++ b/src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala @@ -90,8 +90,12 @@ trait ModelFactoryImplicitSupport { else { val context: global.analyzer.Context = global.analyzer.rootContext(NoCompilationUnit) - val results = global.analyzer.allViewsFrom(sym.tpe_*, context, sym.typeParams) + val results = global.analyzer.allViewsFrom(sym.tpe_*, context, sym.typeParams) ++ + global.analyzer.allViewsFrom(byNameType(sym.tpe_*), context, sym.typeParams) var conversions = results.flatMap(result => makeImplicitConversion(sym, result._1, result._2, context, inTpl)) + //debug(results.mkString("All views\n ", "\n ", "\n")) + //debug(conversions.mkString("Conversions\n ", "\n ", "\n")) + // also keep empty conversions, so they appear in diagrams // conversions = conversions.filter(!_.members.isEmpty) @@ -193,7 +197,7 @@ trait ModelFactoryImplicitSupport { List(new ImplicitConversionImpl(sym, result.tree.symbol, toType, constraints, inTpl)) } catch { case i: ImplicitNotFound => - //println(" Eliminating: " + toType) + //debug(s" Eliminating: $toType") Nil } } -- cgit v1.2.3 From fea652827dbef0b49893bb438205204019f00554 Mon Sep 17 00:00:00 2001 From: Janek Bogucki Date: Tue, 16 Jun 2015 19:04:42 +0100 Subject: Improve API documentation for ListBuffer and Try --- src/library/scala/collection/mutable/ListBuffer.scala | 2 +- src/library/scala/util/Try.scala | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/library/scala/collection/mutable/ListBuffer.scala b/src/library/scala/collection/mutable/ListBuffer.scala index 8faaf97741..f9bab40a1e 100644 --- a/src/library/scala/collection/mutable/ListBuffer.scala +++ b/src/library/scala/collection/mutable/ListBuffer.scala @@ -15,7 +15,7 @@ import immutable.{List, Nil, ::} import java.io._ import scala.annotation.migration -/** A `Buffer` implementation back up by a list. It provides constant time +/** A `Buffer` implementation backed by a list. It provides constant time * prepend and append. Most other operations are linear. * * @author Matthias Zenger diff --git a/src/library/scala/util/Try.scala b/src/library/scala/util/Try.scala index 0a6a7972c2..b0eae74043 100644 --- a/src/library/scala/util/Try.scala +++ b/src/library/scala/util/Try.scala @@ -48,7 +48,7 @@ import scala.language.implicitConversions * catching exceptions along the way. The `flatMap` and `map` combinators in the above example each essentially * pass off either their successfully completed value, wrapped in the `Success` type for it to be further operated * upon by the next combinator in the chain, or the exception wrapped in the `Failure` type usually to be simply - * passed on down the chain. Combinators such as `rescue` and `recover` are designed to provide some type of + * passed on down the chain. Combinators such as `recover` and `recoverWith` are designed to provide some type of * default behavior in the case of failure. * * ''Note'': only non-fatal exceptions are caught by the combinators on `Try` (see [[scala.util.control.NonFatal]]). -- cgit v1.2.3 From f8fbd5dbf031a04343c795cfa99cf768add65f05 Mon Sep 17 00:00:00 2001 From: Adriaan Moors Date: Tue, 16 Jun 2015 17:00:25 -0700 Subject: SI-9356 more careful assertion in back-end Calling `exists` on a `Symbol` triggers unpickling, which failed for reasons I did not investigate. Replaced `sym.exists` by `sym != NoSymbol`, which is good enough here. Also replaced assertion by a `devWarning`, since the logic seems too ad-hoc to actually crash the compiler when it's invalidated. Partially reverts b45a91fe22. See also #1532. --- .../scala/tools/nsc/backend/jvm/GenASM.scala | 20 +++++++++----------- test/files/pos/t9356/Foo_2.scala | 6 ++++++ test/files/pos/t9356/MyAnnotation.java | 12 ++++++++++++ test/files/pos/t9356/Test_3.scala | 3 +++ 4 files changed, 30 insertions(+), 11 deletions(-) create mode 100644 test/files/pos/t9356/Foo_2.scala create mode 100644 test/files/pos/t9356/MyAnnotation.java create mode 100644 test/files/pos/t9356/Test_3.scala diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala index f866c0d038..76af40b330 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala @@ -617,18 +617,16 @@ abstract class GenASM extends SubComponent with BytecodeWriters { self => val internalName = cachedJN.toString() val trackedSym = jsymbol(sym) reverseJavaName.get(internalName) match { - case Some(oldsym) if oldsym.exists && trackedSym.exists => - assert( - // In contrast, neither NothingClass nor NullClass show up bytecode-level. - (oldsym == trackedSym) || (oldsym == RuntimeNothingClass) || (oldsym == RuntimeNullClass) || (oldsym.isModuleClass && (oldsym.sourceModule == trackedSym.sourceModule)), - s"""|Different class symbols have the same bytecode-level internal name: - | name: $internalName - | oldsym: ${oldsym.fullNameString} - | tracked: ${trackedSym.fullNameString} - """.stripMargin - ) - case _ => + case None => reverseJavaName.put(internalName, trackedSym) + case Some(oldsym) => + // TODO: `duplicateOk` seems pretty ad-hoc (a more aggressive version caused SI-9356 because it called oldSym.exists, which failed in the unpickler; see also SI-5031) + def duplicateOk = oldsym == NoSymbol || trackedSym == NoSymbol || (syntheticCoreClasses contains oldsym) || (oldsym.isModuleClass && (oldsym.sourceModule == trackedSym.sourceModule)) + if (oldsym != trackedSym && !duplicateOk) + devWarning(s"""|Different class symbols have the same bytecode-level internal name: + | name: $internalName + | oldsym: ${oldsym.fullNameString} + | tracked: ${trackedSym.fullNameString}""".stripMargin) } } diff --git a/test/files/pos/t9356/Foo_2.scala b/test/files/pos/t9356/Foo_2.scala new file mode 100644 index 0000000000..ab7bb44d0e --- /dev/null +++ b/test/files/pos/t9356/Foo_2.scala @@ -0,0 +1,6 @@ +class C + +trait Foo { + @annot.MyAnnotation(cls = classOf[C]) + def function: Any = ??? +} diff --git a/test/files/pos/t9356/MyAnnotation.java b/test/files/pos/t9356/MyAnnotation.java new file mode 100644 index 0000000000..b6c00e7356 --- /dev/null +++ b/test/files/pos/t9356/MyAnnotation.java @@ -0,0 +1,12 @@ +package annot; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +@Target(ElementType.METHOD) +@Retention(RetentionPolicy.RUNTIME) +public @interface MyAnnotation { + Class cls(); +} diff --git a/test/files/pos/t9356/Test_3.scala b/test/files/pos/t9356/Test_3.scala new file mode 100644 index 0000000000..fa1b76c9e1 --- /dev/null +++ b/test/files/pos/t9356/Test_3.scala @@ -0,0 +1,3 @@ +class Foo1 extends Foo + +class Foo2 extends Foo \ No newline at end of file -- cgit v1.2.3 From 8e0bc0bffd31d994a6911116f170347004934c55 Mon Sep 17 00:00:00 2001 From: Niko Vuokko Date: Sat, 6 Jun 2015 01:50:53 +0300 Subject: SI-9348 Fix missing last element in exclusive floating point ranges Fix exclusive floating point ranges to contain also the last element when the end-start difference is not an integer multiple of step. --- src/library/scala/math/Numeric.scala | 6 ++--- test/files/run/range.scala | 18 ++++++++------ .../immutable/RangeConsistencyTest.scala | 11 +++++++++ test/junit/scala/math/NumericTest.scala | 28 +++++++++++++++++++++- 4 files changed, 52 insertions(+), 11 deletions(-) diff --git a/src/library/scala/math/Numeric.scala b/src/library/scala/math/Numeric.scala index eafbf96993..9245798c17 100644 --- a/src/library/scala/math/Numeric.scala +++ b/src/library/scala/math/Numeric.scala @@ -134,7 +134,7 @@ object Numeric { def div(x: Float, y: Float): Float = x / y } trait FloatAsIfIntegral extends FloatIsConflicted with Integral[Float] { - def quot(x: Float, y: Float): Float = (BigDecimal(x) / BigDecimal(y)).floatValue + def quot(x: Float, y: Float): Float = (BigDecimal(x) quot BigDecimal(y)).floatValue def rem(x: Float, y: Float): Float = (BigDecimal(x) remainder BigDecimal(y)).floatValue } implicit object FloatIsFractional extends FloatIsFractional with Ordering.FloatOrdering @@ -158,7 +158,7 @@ object Numeric { def div(x: Double, y: Double): Double = x / y } trait DoubleAsIfIntegral extends DoubleIsConflicted with Integral[Double] { - def quot(x: Double, y: Double): Double = (BigDecimal(x) / BigDecimal(y)).doubleValue + def quot(x: Double, y: Double): Double = (BigDecimal(x) quot BigDecimal(y)).doubleValue def rem(x: Double, y: Double): Double = (BigDecimal(x) remainder BigDecimal(y)).doubleValue } @@ -178,7 +178,7 @@ object Numeric { def div(x: BigDecimal, y: BigDecimal): BigDecimal = x / y } trait BigDecimalAsIfIntegral extends BigDecimalIsConflicted with Integral[BigDecimal] { - def quot(x: BigDecimal, y: BigDecimal): BigDecimal = x / y + def quot(x: BigDecimal, y: BigDecimal): BigDecimal = x quot y def rem(x: BigDecimal, y: BigDecimal): BigDecimal = x remainder y } diff --git a/test/files/run/range.scala b/test/files/run/range.scala index 4637ab874d..e50d0ac6a5 100644 --- a/test/files/run/range.scala +++ b/test/files/run/range.scala @@ -36,16 +36,19 @@ object Test { def gr1 = NumericRange(x, x, x) def gr2 = NumericRange.inclusive(x, x, x) - def gr3 = NumericRange(x, x * fromInt(10), x) - def gr4 = NumericRange.inclusive(x, x * fromInt(10), x) - def gr5 = gr3.toList ::: negated.gr3.toList + def gr3 = NumericRange(x, x * fromInt(4), x * fromInt(2)) // SI-9348 + def gr4 = NumericRange(x, x * fromInt(-2), x * fromInt(-2)) + def gr5 = NumericRange(x, x * fromInt(10), x) + def gr6 = NumericRange.inclusive(x, x * fromInt(10), x) + def gr7 = gr3.toList ::: negated.gr3.toList def check = { assert(gr1.isEmpty && !gr2.isEmpty) - assert(gr3.size == 9 && gr4.size == 10) - assert(gr5.sum == num.zero, gr5.toString) - assert(!(gr3 contains (x * fromInt(10)))) - assert((gr4 contains (x * fromInt(10)))) + assert(gr3.size == 2 && gr4.size == 2) + assert(gr5.size == 9 && gr6.size == 10) + assert(gr7.sum == num.zero, gr7.toString) + assert(!(gr5 contains (x * fromInt(10)))) + assert(gr6 contains (x * fromInt(10))) } } @@ -55,6 +58,7 @@ object Test { val _grs = List[GR[_]]( GR(BigDecimal(5.0)), + GR(BigDecimal(0.25)), // SI-9348 GR(BigInt(5)), GR(5L), GR(5.0d), diff --git a/test/junit/scala/collection/immutable/RangeConsistencyTest.scala b/test/junit/scala/collection/immutable/RangeConsistencyTest.scala index 3980c31577..135796979d 100644 --- a/test/junit/scala/collection/immutable/RangeConsistencyTest.scala +++ b/test/junit/scala/collection/immutable/RangeConsistencyTest.scala @@ -137,4 +137,15 @@ class RangeConsistencyTest { assert( (-3 to Int.MaxValue).dropWhile(_ <= 0).length == Int.MaxValue ) assert( (-3 to Int.MaxValue).span(_ <= 0) match { case (a,b) => a.length == 4 && b.length == Int.MaxValue } ) } + + @Test + def testSI9348() { + // Test exclusive range with (end-start) != 0 (mod step) + assert( (0.0f until 0.4f by 0.25f) sameElements List(0.0f, 0.25f) ) + assert( (1.0 until 2.2 by 0.5) sameElements List(1.0, 1.5, 2.0) ) + + def bd(d: Double) = BigDecimal(d) + val bdRange = bd(-10.0) until bd(0.0) by bd(4.5) + assert( bdRange sameElements List(bd(-10.0), bd(-5.5), bd(-1.0)) ) + } } diff --git a/test/junit/scala/math/NumericTest.scala b/test/junit/scala/math/NumericTest.scala index 9bf7d4f1e4..682dcbfd75 100644 --- a/test/junit/scala/math/NumericTest.scala +++ b/test/junit/scala/math/NumericTest.scala @@ -5,6 +5,9 @@ import org.junit.Test import org.junit.runner.RunWith import org.junit.runners.JUnit4 +import scala.math.Numeric.FloatAsIfIntegral + + @RunWith(classOf[JUnit4]) class NumericTest { @@ -14,5 +17,28 @@ class NumericTest { assertTrue(-0.0.abs equals 0.0) assertTrue(-0.0f.abs equals 0.0f) } -} + + /* Test for SI-9348 */ + @Test + def testFloatAsIfIntegral { + val num = scala.math.Numeric.FloatAsIfIntegral + assertTrue(num.quot(1.0f, 0.5f) equals 2.0f) + assertTrue(num.quot(1.0f, 0.3f) equals 3.0f) + } + + /* Test for SI-9348 */ + @Test + def testDoubleAsIfIntegral { + val num = scala.math.Numeric.DoubleAsIfIntegral + assertTrue(num.quot(1.0, 0.25) equals 4.0) + assertTrue(num.quot(0.5, 0.15) equals 3.0) + } + + /* Test for SI-9348 */ + @Test + def testBigDecimalAsIfIntegral { + val num = scala.math.Numeric.BigDecimalAsIfIntegral + assertTrue(num.quot(BigDecimal(2.5), BigDecimal(0.5)) equals BigDecimal(5.0)) + assertTrue(num.quot(BigDecimal(5.0), BigDecimal(2.0)) equals BigDecimal(2.0)) + }} -- cgit v1.2.3 From 43139faa4f4348b95907e06883f2fefb41ea3a3b Mon Sep 17 00:00:00 2001 From: Adriaan Moors Date: Fri, 12 Jun 2015 15:19:00 +0200 Subject: Centralize dependencies on jline Code that depends on jline is now in package `scala.tools.nsc.interpreter.jline`. To make this possible, remove the `entries` functionality from `History`, and add the `historicize` method. Also provide an overload for `asStrings`. Clean up a little along the way in `JLineHistory.scala` and `JLineReader.scala`. Next step: fall back to an embedded jline when the expected jline jar is not on the classpath. The gist of the refactor: https://gist.github.com/adriaanm/02e110d4da0a585480c1 --- .../nsc/interpreter/ConsoleReaderHelper.scala | 165 --------------------- .../scala/tools/nsc/interpreter/Delimited.scala | 41 ----- src/repl/scala/tools/nsc/interpreter/ILoop.scala | 41 +++-- .../tools/nsc/interpreter/InteractiveReader.scala | 2 + .../tools/nsc/interpreter/JLineCompletion.scala | 1 + .../scala/tools/nsc/interpreter/JLineReader.scala | 75 ---------- src/repl/scala/tools/nsc/interpreter/Parsed.scala | 19 +++ .../scala/tools/nsc/interpreter/Tabulators.scala | 112 ++++++++++++++ .../nsc/interpreter/jline/FileBackedHistory.scala | 93 ++++++++++++ .../nsc/interpreter/jline/JLineDelimiter.scala | 25 ++++ .../tools/nsc/interpreter/jline/JLineHistory.scala | 77 ++++++++++ .../tools/nsc/interpreter/jline/JLineReader.scala | 143 ++++++++++++++++++ .../interpreter/session/FileBackedHistory.scala | 84 ----------- .../tools/nsc/interpreter/session/History.scala | 3 + .../nsc/interpreter/session/JLineHistory.scala | 49 ------ .../nsc/interpreter/session/SimpleHistory.scala | 12 +- .../tools/nsc/interpreter/session/package.scala | 5 - 17 files changed, 494 insertions(+), 453 deletions(-) delete mode 100644 src/repl/scala/tools/nsc/interpreter/ConsoleReaderHelper.scala delete mode 100644 src/repl/scala/tools/nsc/interpreter/Delimited.scala delete mode 100644 src/repl/scala/tools/nsc/interpreter/JLineReader.scala create mode 100644 src/repl/scala/tools/nsc/interpreter/Tabulators.scala create mode 100644 src/repl/scala/tools/nsc/interpreter/jline/FileBackedHistory.scala create mode 100644 src/repl/scala/tools/nsc/interpreter/jline/JLineDelimiter.scala create mode 100644 src/repl/scala/tools/nsc/interpreter/jline/JLineHistory.scala create mode 100644 src/repl/scala/tools/nsc/interpreter/jline/JLineReader.scala delete mode 100644 src/repl/scala/tools/nsc/interpreter/session/FileBackedHistory.scala delete mode 100644 src/repl/scala/tools/nsc/interpreter/session/JLineHistory.scala diff --git a/src/repl/scala/tools/nsc/interpreter/ConsoleReaderHelper.scala b/src/repl/scala/tools/nsc/interpreter/ConsoleReaderHelper.scala deleted file mode 100644 index a8d537e314..0000000000 --- a/src/repl/scala/tools/nsc/interpreter/ConsoleReaderHelper.scala +++ /dev/null @@ -1,165 +0,0 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Paul Phillips - */ - -package scala.tools.nsc -package interpreter - -import jline.console.{ ConsoleReader, CursorBuffer } - -trait ConsoleReaderHelper { _: ConsoleReader with Tabulator => - def isAcross: Boolean - - def terminal = getTerminal() - def width = terminal.getWidth() - def height = terminal.getHeight() - - def readOneKey(prompt: String): Int - def eraseLine(): Unit - - val marginSize = 3 - - private def morePrompt = "--More--" - private def emulateMore(): Int = { - val key = readOneKey(morePrompt) - try key match { - case '\r' | '\n' => 1 - case 'q' => -1 - case _ => height - 1 - } - finally { - eraseLine() - // TODO: still not quite managing to erase --More-- and get - // back to a scala prompt without another keypress. - if (key == 'q') { - putString(getPrompt()) - redrawLine() - flush() - } - } - } - - override def printColumns(items: JCollection[_ <: CharSequence]): Unit = - printColumns_(items: List[String]) - - private def printColumns_(items: List[String]): Unit = if (items exists (_ != "")) { - val grouped = tabulate(items) - var linesLeft = if (isPaginationEnabled()) height - 1 else Int.MaxValue - grouped foreach { xs => - println(xs.mkString) - linesLeft -= 1 - if (linesLeft <= 0) { - linesLeft = emulateMore() - if (linesLeft < 0) - return - } - } - } -} - -trait Tabulator { - def isAcross: Boolean - def width: Int - def marginSize: Int - - protected def fits(items: Seq[String], width: Int): Boolean = ( - (items map (_.length)).sum + (items.length - 1) * marginSize < width - ) - def tabulate(items: Seq[String]): Seq[Seq[String]] = ( - if (fits(items, width)) Seq(Seq(items mkString " " * marginSize)) - else printMultiLineColumns(items) - ) - protected def columnize(ss: Seq[String]): Seq[Seq[String]] = ss map (s => Seq(s)) - protected def printMultiLineColumns(items: Seq[String]): Seq[Seq[String]] = { - import SimpleMath._ - val longest = (items map (_.length)).max - val columnWidth = longest + marginSize - val maxcols = ( - if (columnWidth >= width) 1 - else 1 max (width / columnWidth) // make sure it doesn't divide to 0 - ) - val nrows = items.size /% maxcols - val ncols = items.size /% nrows - val groupSize = ncols - val padded = items map (s"%-${columnWidth}s" format _) - val xwise = isAcross || ncols >= items.length - val grouped: Seq[Seq[String]] = - if (groupSize == 1) columnize(items) - else if (xwise) (padded grouped groupSize).toSeq - else { - val h = 1 max padded.size /% groupSize - val cols = (padded grouped h).toList - for (i <- 0 until h) yield - for (j <- 0 until groupSize) yield - if (i < cols(j).size) cols(j)(i) else "" - } - grouped - } -} - -/** Adjust the column width and number of columns to minimize the row count. */ -trait VariColumnTabulator extends Tabulator { - override protected def printMultiLineColumns(items: Seq[String]): Seq[Seq[String]] = { - import SimpleMath._ - val longest = (items map (_.length)).max - val shortest = (items map (_.length)).min - val fattest = longest + marginSize - val skinny = shortest + marginSize - - // given ncols, calculate nrows and a list of column widths, or none if not possible - // if ncols > items.size, then columnWidths.size == items.size - def layout(ncols: Int): Option[(Int, Seq[Int], Seq[Seq[String]])] = { - val nrows = items.size /% ncols - val xwise = isAcross || ncols >= items.length - // max width item in each column - def maxima(rows: Seq[Seq[String]]) = - (0 until (ncols min items.size)) map { col => - val widths = for (r <- rows if r.size > col) yield r(col).length - widths.max - } - def resulting(rows: Seq[Seq[String]]) = { - val columnWidths = maxima(rows) map (_ + marginSize) - val linelen = columnWidths.sum - if (linelen <= width) Some((nrows, columnWidths, rows)) - else None - } - if (ncols == 1) resulting(columnize(items)) - else if (xwise) resulting((items grouped ncols).toSeq) - else { - val cols = (items grouped nrows).toList - val rows = - for (i <- 0 until nrows) yield - for (j <- 0 until ncols) yield - if (j < cols.size && i < cols(j).size) cols(j)(i) else "" - resulting(rows) - } - } - - if (fattest >= width) { - columnize(items) - } else { - // if every col is widest, we have at least this many cols - val mincols = 1 max (width / fattest) - // if every other col is skinniest, we have at most this many cols - val maxcols = 1 + ((width - fattest) / skinny) - val possibles = (mincols to maxcols).map(n => layout(n)).flatten - val minrows = (possibles map (_._1)).min - - // select the min ncols that results in minrows - val (_, columnWidths, sss) = (possibles find (_._1 == minrows)).get - - // format to column width - sss map (ss => ss.zipWithIndex map { - case (s, i) => s"%-${columnWidths(i)}s" format s - }) - } - } -} - -private[interpreter] object SimpleMath { - implicit class DivRem(private val i: Int) extends AnyVal { - /** i/n + if (i % n != 0) 1 else 0 */ - def /%(n: Int): Int = (i + n - 1) / n - } -} diff --git a/src/repl/scala/tools/nsc/interpreter/Delimited.scala b/src/repl/scala/tools/nsc/interpreter/Delimited.scala deleted file mode 100644 index b7f06f1d0a..0000000000 --- a/src/repl/scala/tools/nsc/interpreter/Delimited.scala +++ /dev/null @@ -1,41 +0,0 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Paul Phillips - */ - -package scala.tools.nsc -package interpreter - -import jline.console.completer.ArgumentCompleter.{ ArgumentDelimiter, ArgumentList } - -class JLineDelimiter extends ArgumentDelimiter { - def toJLine(args: List[String], cursor: Int) = args match { - case Nil => new ArgumentList(new Array[String](0), 0, 0, cursor) - case xs => new ArgumentList(xs.toArray, xs.size - 1, xs.last.length, cursor) - } - - def delimit(buffer: CharSequence, cursor: Int) = { - val p = Parsed(buffer.toString, cursor) - toJLine(p.args, cursor) - } - def isDelimiter(buffer: CharSequence, cursor: Int) = Parsed(buffer.toString, cursor).isDelimiter -} - -trait Delimited { - self: Parsed => - - def delimited: Char => Boolean - def escapeChars: List[Char] = List('\\') - - /** Break String into args based on delimiting function. - */ - protected def toArgs(s: String): List[String] = - if (s == "") Nil - else (s indexWhere isDelimiterChar) match { - case -1 => List(s) - case idx => (s take idx) :: toArgs(s drop (idx + 1)) - } - - def isDelimiterChar(ch: Char) = delimited(ch) - def isEscapeChar(ch: Char): Boolean = escapeChars contains ch -} diff --git a/src/repl/scala/tools/nsc/interpreter/ILoop.scala b/src/repl/scala/tools/nsc/interpreter/ILoop.scala index 4221126caa..3ce9668b97 100644 --- a/src/repl/scala/tools/nsc/interpreter/ILoop.scala +++ b/src/repl/scala/tools/nsc/interpreter/ILoop.scala @@ -503,10 +503,7 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter) val errless = intp compileSources new BatchSourceFile("", s"object pastel {\n$code\n}") if (errless) echo("The compiler reports no errors.") } - def historicize(text: String) = history match { - case jlh: JLineHistory => text.lines foreach jlh.add ; jlh.moveToEnd() ; true - case _ => false - } + def edit(text: String): Result = editor match { case Some(ed) => val tmp = File.makeTemp() @@ -522,7 +519,7 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter) val res = intp interpret edited if (res == IR.Incomplete) diagnose(edited) else { - historicize(edited) + history.historicize(edited) Result(lineToRecord = Some(edited), keepRunning = true) } case None => echo("Can't read edited text. Did you delete it?") @@ -533,7 +530,7 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter) tmp.delete() } case None => - if (historicize(text)) echo("Placing text in recent history.") + if (history.historicize(text)) echo("Placing text in recent history.") else echo(f"No EDITOR defined and you can't change history, echoing your text:%n$text") } @@ -565,10 +562,7 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter) } import scala.collection.JavaConverters._ val index = (start - 1) max 0 - val text = history match { - case jlh: JLineHistory => jlh.entries(index).asScala.take(len) map (_.value) mkString "\n" - case _ => history.asStrings.slice(index, index + len) mkString "\n" - } + val text = history.asStrings(index, index + len) mkString "\n" edit(text) } catch { case _: NumberFormatException => echo(s"Bad range '$what'") @@ -866,16 +860,18 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter) * with SimpleReader. */ def chooseReader(settings: Settings): InteractiveReader = { - if (settings.Xnojline || Properties.isEmacsShell) - SimpleReader() - else try new JLineReader( - if (settings.noCompletion) NoCompletion - else new JLineCompletion(intp) - ) - catch { - case ex @ (_: Exception | _: NoClassDefFoundError) => - echo(f"Failed to created JLineReader: ${ex}%nFalling back to SimpleReader.") - SimpleReader() + def mkJLineReader(completer: () => Completion): InteractiveReader = + try new jline.JLineReader(completer) + catch { + case ex@(_: Exception | _: NoClassDefFoundError) => + Console.println(f"Failed to created JLineReader: ${ex}%nFalling back to SimpleReader.") + SimpleReader() + } + + if (settings.Xnojline || Properties.isEmacsShell) SimpleReader() + else { + if (settings.noCompletion) mkJLineReader(() => NoCompletion) + else mkJLineReader(() => new JLineCompletion(intp)) } } @@ -896,10 +892,7 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter) asyncMessage(power.banner) } // SI-7418 Now, and only now, can we enable TAB completion. - in match { - case x: JLineReader => x.consoleReader.postInit - case _ => - } + in.postInit() } // start an interpreter with the given settings diff --git a/src/repl/scala/tools/nsc/interpreter/InteractiveReader.scala b/src/repl/scala/tools/nsc/interpreter/InteractiveReader.scala index ed69d449cb..71753a3e39 100644 --- a/src/repl/scala/tools/nsc/interpreter/InteractiveReader.scala +++ b/src/repl/scala/tools/nsc/interpreter/InteractiveReader.scala @@ -13,6 +13,8 @@ import Properties.isMac /** Reads lines from an input stream */ trait InteractiveReader { + def postInit(): Unit = {} + val interactive: Boolean def reset(): Unit diff --git a/src/repl/scala/tools/nsc/interpreter/JLineCompletion.scala b/src/repl/scala/tools/nsc/interpreter/JLineCompletion.scala index c1122d4223..d878988e26 100644 --- a/src/repl/scala/tools/nsc/interpreter/JLineCompletion.scala +++ b/src/repl/scala/tools/nsc/interpreter/JLineCompletion.scala @@ -12,6 +12,7 @@ import scala.reflect.internal.util.StringOps.longestCommonPrefix // REPL completor - queries supplied interpreter for valid // completions based on current contents of buffer. +// TODO: change class name to reflect it's not specific to jline (nor does it depend on it) class JLineCompletion(val intp: IMain) extends Completion with CompletionOutput { val global: intp.global.type = intp.global import global._ diff --git a/src/repl/scala/tools/nsc/interpreter/JLineReader.scala b/src/repl/scala/tools/nsc/interpreter/JLineReader.scala deleted file mode 100644 index b6e834a1ed..0000000000 --- a/src/repl/scala/tools/nsc/interpreter/JLineReader.scala +++ /dev/null @@ -1,75 +0,0 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Stepan Koltsov - */ - -package scala.tools.nsc -package interpreter - -import jline.console.ConsoleReader -import jline.console.completer._ -import session._ -import Completion._ - -/** - * Reads from the console using JLine. - */ -class JLineReader(_completion: => Completion) extends InteractiveReader { - val interactive = true - val consoleReader = new JLineConsoleReader() - - lazy val completion = _completion - lazy val history: JLineHistory = JLineHistory() - - private def term = consoleReader.getTerminal() - def reset() = term.reset() - - def scalaToJline(tc: ScalaCompleter): Completer = new Completer { - def complete(_buf: String, cursor: Int, candidates: JList[CharSequence]): Int = { - val buf = if (_buf == null) "" else _buf - val Candidates(newCursor, newCandidates) = tc.complete(buf, cursor) - newCandidates foreach (candidates add _) - newCursor - } - } - - class JLineConsoleReader extends ConsoleReader with ConsoleReaderHelper with VariColumnTabulator { - val isAcross = interpreter.`package`.isAcross - - this setPaginationEnabled interpreter.`package`.isPaged - - // ASAP - this setExpandEvents false - - // working around protected/trait/java insufficiencies. - def goBack(num: Int): Unit = back(num) - if ((history: History) ne NoHistory) - this setHistory history - - def readOneKey(prompt: String) = { - this.print(prompt) - this.flush() - this.readCharacter() - } - def eraseLine() = consoleReader.resetPromptLine("", "", 0) - def redrawLineAndFlush(): Unit = { flush() ; drawLine() ; flush() } - - // A hook for running code after the repl is done initializing. - lazy val postInit: Unit = { - this setBellEnabled false - - if (completion ne NoCompletion) { - val argCompletor: ArgumentCompleter = - new ArgumentCompleter(new JLineDelimiter, scalaToJline(completion.completer())) - argCompletor setStrict false - - this addCompleter argCompletor - this setAutoprintThreshold 400 // max completion candidates without warning - } - } - } - - def redrawLine() = consoleReader.redrawLineAndFlush() - def readOneLine(prompt: String) = consoleReader readLine prompt - def readOneKey(prompt: String) = consoleReader readOneKey prompt -} diff --git a/src/repl/scala/tools/nsc/interpreter/Parsed.scala b/src/repl/scala/tools/nsc/interpreter/Parsed.scala index 672a6fd28f..5e58d3a2c4 100644 --- a/src/repl/scala/tools/nsc/interpreter/Parsed.scala +++ b/src/repl/scala/tools/nsc/interpreter/Parsed.scala @@ -8,6 +8,25 @@ package interpreter import util.returning +trait Delimited { + self: Parsed => + + def delimited: Char => Boolean + def escapeChars: List[Char] = List('\\') + + /** Break String into args based on delimiting function. + */ + protected def toArgs(s: String): List[String] = + if (s == "") Nil + else (s indexWhere isDelimiterChar) match { + case -1 => List(s) + case idx => (s take idx) :: toArgs(s drop (idx + 1)) + } + + def isDelimiterChar(ch: Char) = delimited(ch) + def isEscapeChar(ch: Char): Boolean = escapeChars contains ch +} + /** One instance of a command buffer. */ class Parsed private ( diff --git a/src/repl/scala/tools/nsc/interpreter/Tabulators.scala b/src/repl/scala/tools/nsc/interpreter/Tabulators.scala new file mode 100644 index 0000000000..75bec168eb --- /dev/null +++ b/src/repl/scala/tools/nsc/interpreter/Tabulators.scala @@ -0,0 +1,112 @@ +/* NSC -- new Scala compiler + * Copyright 2005-2013 LAMP/EPFL + * @author Paul Phillips + */ + +package scala.tools.nsc.interpreter + +trait Tabulator { + def isAcross: Boolean + def width: Int + def marginSize: Int + + protected def fits(items: Seq[String], width: Int): Boolean = ( + (items map (_.length)).sum + (items.length - 1) * marginSize < width + ) + def tabulate(items: Seq[String]): Seq[Seq[String]] = ( + if (fits(items, width)) Seq(Seq(items mkString " " * marginSize)) + else printMultiLineColumns(items) + ) + protected def columnize(ss: Seq[String]): Seq[Seq[String]] = ss map (s => Seq(s)) + protected def printMultiLineColumns(items: Seq[String]): Seq[Seq[String]] = { + import scala.tools.nsc.interpreter.SimpleMath._ + val longest = (items map (_.length)).max + val columnWidth = longest + marginSize + val maxcols = ( + if (columnWidth >= width) 1 + else 1 max (width / columnWidth) // make sure it doesn't divide to 0 + ) + val nrows = items.size /% maxcols + val ncols = items.size /% nrows + val groupSize = ncols + val padded = items map (s"%-${columnWidth}s" format _) + val xwise = isAcross || ncols >= items.length + val grouped: Seq[Seq[String]] = + if (groupSize == 1) columnize(items) + else if (xwise) (padded grouped groupSize).toSeq + else { + val h = 1 max padded.size /% groupSize + val cols = (padded grouped h).toList + for (i <- 0 until h) yield + for (j <- 0 until groupSize) yield + if (i < cols(j).size) cols(j)(i) else "" + } + grouped + } +} + +/** Adjust the column width and number of columns to minimize the row count. */ +trait VariColumnTabulator extends Tabulator { + override protected def printMultiLineColumns(items: Seq[String]): Seq[Seq[String]] = { + import scala.tools.nsc.interpreter.SimpleMath._ + val longest = (items map (_.length)).max + val shortest = (items map (_.length)).min + val fattest = longest + marginSize + val skinny = shortest + marginSize + + // given ncols, calculate nrows and a list of column widths, or none if not possible + // if ncols > items.size, then columnWidths.size == items.size + def layout(ncols: Int): Option[(Int, Seq[Int], Seq[Seq[String]])] = { + val nrows = items.size /% ncols + val xwise = isAcross || ncols >= items.length + // max width item in each column + def maxima(rows: Seq[Seq[String]]) = + (0 until (ncols min items.size)) map { col => + val widths = for (r <- rows if r.size > col) yield r(col).length + widths.max + } + def resulting(rows: Seq[Seq[String]]) = { + val columnWidths = maxima(rows) map (_ + marginSize) + val linelen = columnWidths.sum + if (linelen <= width) Some((nrows, columnWidths, rows)) + else None + } + if (ncols == 1) resulting(columnize(items)) + else if (xwise) resulting((items grouped ncols).toSeq) + else { + val cols = (items grouped nrows).toList + val rows = + for (i <- 0 until nrows) yield + for (j <- 0 until ncols) yield + if (j < cols.size && i < cols(j).size) cols(j)(i) else "" + resulting(rows) + } + } + + if (fattest >= width) { + columnize(items) + } else { + // if every col is widest, we have at least this many cols + val mincols = 1 max (width / fattest) + // if every other col is skinniest, we have at most this many cols + val maxcols = 1 + ((width - fattest) / skinny) + val possibles = (mincols to maxcols).map(n => layout(n)).flatten + val minrows = (possibles map (_._1)).min + + // select the min ncols that results in minrows + val (_, columnWidths, sss) = (possibles find (_._1 == minrows)).get + + // format to column width + sss map (ss => ss.zipWithIndex map { + case (s, i) => s"%-${columnWidths(i)}s" format s + }) + } + } +} + +private[interpreter] object SimpleMath { + implicit class DivRem(private val i: Int) extends AnyVal { + /** i/n + if (i % n != 0) 1 else 0 */ + def /%(n: Int): Int = (i + n - 1) / n + } +} diff --git a/src/repl/scala/tools/nsc/interpreter/jline/FileBackedHistory.scala b/src/repl/scala/tools/nsc/interpreter/jline/FileBackedHistory.scala new file mode 100644 index 0000000000..b6c9792ec0 --- /dev/null +++ b/src/repl/scala/tools/nsc/interpreter/jline/FileBackedHistory.scala @@ -0,0 +1,93 @@ +/* NSC -- new Scala compiler + * Copyright 2005-2015 LAMP/EPFL + * @author Paul Phillips + */ + +package scala.tools.nsc.interpreter.jline + +import _root_.jline.console.history.PersistentHistory + + +import scala.tools.nsc.interpreter +import scala.tools.nsc.io.{File, Path} + +/** TODO: file locking. + */ +trait FileBackedHistory extends JLineHistory with PersistentHistory { + def maxSize: Int + + protected lazy val historyFile: File = FileBackedHistory.defaultFile + private var isPersistent = true + + locally { + load() + } + + def withoutSaving[T](op: => T): T = { + val saved = isPersistent + isPersistent = false + try op + finally isPersistent = saved + } + + def addLineToFile(item: CharSequence): Unit = { + if (isPersistent) + append(item + "\n") + } + + /** Overwrites the history file with the current memory. */ + protected def sync(): Unit = { + val lines = asStrings map (_ + "\n") + historyFile.writeAll(lines: _*) + } + + /** Append one or more lines to the history file. */ + protected def append(lines: String*): Unit = { + historyFile.appendAll(lines: _*) + } + + def load(): Unit = { + if (!historyFile.canRead) + historyFile.createFile() + + val lines: IndexedSeq[String] = { + try historyFile.lines().toIndexedSeq + catch { + // It seems that control characters in the history file combined + // with the default codec can lead to nio spewing exceptions. Rather + // than abandon hope we'll try to read it as ISO-8859-1 + case _: Exception => + try historyFile.lines("ISO-8859-1").toIndexedSeq + catch { + case _: Exception => Vector() + } + } + } + + interpreter.repldbg("Loading " + lines.size + " into history.") + + // avoid writing to the history file + withoutSaving(lines takeRight maxSize foreach add) + // truncate the history file if it's too big. + if (lines.size > maxSize) { + interpreter.repldbg("File exceeds maximum size: truncating to " + maxSize + " entries.") + sync() + } + moveToEnd() + } + + def flush(): Unit = () + + def purge(): Unit = historyFile.truncate() +} + +object FileBackedHistory { + // val ContinuationChar = '\003' + // val ContinuationNL: String = Array('\003', '\n').mkString + + import scala.tools.nsc.Properties.userHome + + def defaultFileName = ".scala_history" + + def defaultFile: File = File(Path(userHome) / defaultFileName) +} diff --git a/src/repl/scala/tools/nsc/interpreter/jline/JLineDelimiter.scala b/src/repl/scala/tools/nsc/interpreter/jline/JLineDelimiter.scala new file mode 100644 index 0000000000..c18a9809a0 --- /dev/null +++ b/src/repl/scala/tools/nsc/interpreter/jline/JLineDelimiter.scala @@ -0,0 +1,25 @@ +/* NSC -- new Scala compiler + * Copyright 2005-2013 LAMP/EPFL + * @author Paul Phillips + */ + +package scala.tools.nsc.interpreter.jline + +import scala.tools.nsc.interpreter + +import _root_.jline.console.completer.ArgumentCompleter.{ ArgumentDelimiter, ArgumentList } + +// implements a jline interface +class JLineDelimiter extends ArgumentDelimiter { + def toJLine(args: List[String], cursor: Int) = args match { + case Nil => new ArgumentList(new Array[String](0), 0, 0, cursor) + case xs => new ArgumentList(xs.toArray, xs.size - 1, xs.last.length, cursor) + } + + def delimit(buffer: CharSequence, cursor: Int) = { + val p = interpreter.Parsed(buffer.toString, cursor) + toJLine(p.args, cursor) + } + + def isDelimiter(buffer: CharSequence, cursor: Int) = interpreter.Parsed(buffer.toString, cursor).isDelimiter +} diff --git a/src/repl/scala/tools/nsc/interpreter/jline/JLineHistory.scala b/src/repl/scala/tools/nsc/interpreter/jline/JLineHistory.scala new file mode 100644 index 0000000000..1f6a1f7022 --- /dev/null +++ b/src/repl/scala/tools/nsc/interpreter/jline/JLineHistory.scala @@ -0,0 +1,77 @@ +/* NSC -- new Scala compiler + * Copyright 2005-2013 LAMP/EPFL + * @author Paul Phillips + */ + +package scala.tools.nsc.interpreter.jline + +import java.util.{Iterator => JIterator, ListIterator => JListIterator} + +import _root_.jline.{console => jconsole} +import jconsole.history.History.{Entry => JEntry} +import jconsole.history.{History => JHistory} + +import scala.tools.nsc.interpreter +import scala.tools.nsc.interpreter.session.{History, SimpleHistory} + + +/** A straight scalification of the jline interface which mixes + * in the sparse jline-independent one too. + */ +trait JLineHistory extends JHistory with History { + def size: Int + def isEmpty: Boolean + def index: Int + def clear(): Unit + def get(index: Int): CharSequence + def add(line: CharSequence): Unit + def replace(item: CharSequence): Unit + + def entries(index: Int): JListIterator[JEntry] + def entries(): JListIterator[JEntry] + def iterator: JIterator[JEntry] + + def current(): CharSequence + def previous(): Boolean + def next(): Boolean + def moveToFirst(): Boolean + def moveToLast(): Boolean + def moveTo(index: Int): Boolean + def moveToEnd(): Unit + + override def historicize(text: String): Boolean = { + text.lines foreach add + moveToEnd() + true + } +} + +object JLineHistory { + class JLineFileHistory extends SimpleHistory with FileBackedHistory { + override def add(item: CharSequence): Unit = { + if (!isEmpty && last == item) + interpreter.repldbg("Ignoring duplicate entry '" + item + "'") + else { + super.add(item) + addLineToFile(item) + } + } + override def toString = "History(size = " + size + ", index = " + index + ")" + + import scala.collection.JavaConverters._ + + override def asStrings(from: Int, to: Int): List[String] = + entries(from).asScala.take(to - from).map(_.value.toString).toList + + case class Entry(index: Int, value: CharSequence) extends JEntry { + override def toString = value.toString + } + + private def toEntries(): Seq[JEntry] = buf.zipWithIndex map { case (x, i) => Entry(i, x)} + def entries(idx: Int): JListIterator[JEntry] = toEntries().asJava.listIterator(idx) + def entries(): JListIterator[JEntry] = toEntries().asJava.listIterator() + def iterator: JIterator[JEntry] = toEntries().iterator.asJava + } + + def apply(): History = try new JLineFileHistory catch { case x: Exception => new SimpleHistory() } +} diff --git a/src/repl/scala/tools/nsc/interpreter/jline/JLineReader.scala b/src/repl/scala/tools/nsc/interpreter/jline/JLineReader.scala new file mode 100644 index 0000000000..414868a7e5 --- /dev/null +++ b/src/repl/scala/tools/nsc/interpreter/jline/JLineReader.scala @@ -0,0 +1,143 @@ +/** NSC -- new Scala compiler + * + * Copyright 2005-2015 LAMP/EPFL + * @author Stepan Koltsov + * @author Adriaan Moors + */ + +package scala.tools.nsc.interpreter.jline + +import java.util.{Collection => JCollection, List => JList} + +import _root_.jline.{console => jconsole} +import jconsole.completer.{Completer, ArgumentCompleter} +import jconsole.history.{History => JHistory} + + +import scala.tools.nsc.interpreter +import scala.tools.nsc.interpreter.Completion +import scala.tools.nsc.interpreter.Completion.Candidates +import scala.tools.nsc.interpreter.session.History + +/** + * Reads from the console using JLine. + * + * Eagerly instantiates all relevant JLine classes, so that we can detect linkage errors on `new JLineReader` and retry. + */ +class JLineReader(completer: () => Completion) extends interpreter.InteractiveReader { + val interactive = true + + val history: History = new JLineHistory.JLineFileHistory() + + private val consoleReader = { + val reader = new JLineConsoleReader() + + reader setPaginationEnabled interpreter.`package`.isPaged + + // ASAP + reader setExpandEvents false + + reader setHistory history.asInstanceOf[JHistory] + + reader + } + + private[this] var _completion: Completion = interpreter.NoCompletion + def completion: Completion = _completion + + override def postInit() = { + _completion = completer() + + consoleReader.initCompletion(completion) + } + + def reset() = consoleReader.getTerminal().reset() + def redrawLine() = consoleReader.redrawLineAndFlush() + def readOneLine(prompt: String) = consoleReader.readLine(prompt) + def readOneKey(prompt: String) = consoleReader.readOneKey(prompt) +} + +// implements a jline interface +private class JLineConsoleReader extends jconsole.ConsoleReader with interpreter.VariColumnTabulator { + val isAcross = interpreter.`package`.isAcross + val marginSize = 3 + + def width = getTerminal.getWidth() + def height = getTerminal.getHeight() + + private def morePrompt = "--More--" + + private def emulateMore(): Int = { + val key = readOneKey(morePrompt) + try key match { + case '\r' | '\n' => 1 + case 'q' => -1 + case _ => height - 1 + } + finally { + eraseLine() + // TODO: still not quite managing to erase --More-- and get + // back to a scala prompt without another keypress. + if (key == 'q') { + putString(getPrompt()) + redrawLine() + flush() + } + } + } + + override def printColumns(items: JCollection[_ <: CharSequence]): Unit = { + import scala.tools.nsc.interpreter.javaCharSeqCollectionToScala + printColumns_(items: List[String]) + } + + private def printColumns_(items: List[String]): Unit = if (items exists (_ != "")) { + val grouped = tabulate(items) + var linesLeft = if (isPaginationEnabled()) height - 1 else Int.MaxValue + grouped foreach { xs => + println(xs.mkString) + linesLeft -= 1 + if (linesLeft <= 0) { + linesLeft = emulateMore() + if (linesLeft < 0) + return + } + } + } + + def readOneKey(prompt: String) = { + this.print(prompt) + this.flush() + this.readCharacter() + } + + def eraseLine() = resetPromptLine("", "", 0) + + def redrawLineAndFlush(): Unit = { + flush(); drawLine(); flush() + } + + // A hook for running code after the repl is done initializing. + def initCompletion(completion: Completion): Unit = { + this setBellEnabled false + + if (completion ne interpreter.NoCompletion) { + val jlineCompleter = new ArgumentCompleter(new JLineDelimiter, + new Completer { + val tc = completion.completer() + def complete(_buf: String, cursor: Int, candidates: JList[CharSequence]): Int = { + val buf = if (_buf == null) "" else _buf + val Candidates(newCursor, newCandidates) = tc.complete(buf, cursor) + newCandidates foreach (candidates add _) + newCursor + } + } + ) + + jlineCompleter setStrict false + + this addCompleter jlineCompleter + this setAutoprintThreshold 400 // max completion candidates without warning + } + } +} diff --git a/src/repl/scala/tools/nsc/interpreter/session/FileBackedHistory.scala b/src/repl/scala/tools/nsc/interpreter/session/FileBackedHistory.scala deleted file mode 100644 index dddfb1b8f6..0000000000 --- a/src/repl/scala/tools/nsc/interpreter/session/FileBackedHistory.scala +++ /dev/null @@ -1,84 +0,0 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Paul Phillips - */ - -package scala.tools.nsc -package interpreter -package session - -import scala.tools.nsc.io._ -import FileBackedHistory._ - -/** TODO: file locking. - */ -trait FileBackedHistory extends JLineHistory with JPersistentHistory { - def maxSize: Int - protected lazy val historyFile: File = defaultFile - private var isPersistent = true - - locally { - load() - } - - def withoutSaving[T](op: => T): T = { - val saved = isPersistent - isPersistent = false - try op - finally isPersistent = saved - } - def addLineToFile(item: CharSequence): Unit = { - if (isPersistent) - append(item + "\n") - } - - /** Overwrites the history file with the current memory. */ - protected def sync(): Unit = { - val lines = asStrings map (_ + "\n") - historyFile.writeAll(lines: _*) - } - /** Append one or more lines to the history file. */ - protected def append(lines: String*): Unit = { - historyFile.appendAll(lines: _*) - } - - def load(): Unit = { - if (!historyFile.canRead) - historyFile.createFile() - - val lines: IndexedSeq[String] = { - try historyFile.lines().toIndexedSeq - catch { - // It seems that control characters in the history file combined - // with the default codec can lead to nio spewing exceptions. Rather - // than abandon hope we'll try to read it as ISO-8859-1 - case _: Exception => - try historyFile.lines("ISO-8859-1").toIndexedSeq - catch { case _: Exception => Vector() } - } - } - - repldbg("Loading " + lines.size + " into history.") - - // avoid writing to the history file - withoutSaving(lines takeRight maxSize foreach add) - // truncate the history file if it's too big. - if (lines.size > maxSize) { - repldbg("File exceeds maximum size: truncating to " + maxSize + " entries.") - sync() - } - moveToEnd() - } - - def flush(): Unit = () - def purge(): Unit = historyFile.truncate() -} - -object FileBackedHistory { - // val ContinuationChar = '\003' - // val ContinuationNL: String = Array('\003', '\n').mkString - import Properties.userHome - - def defaultFileName = ".scala_history" - def defaultFile: File = File(Path(userHome) / defaultFileName) -} diff --git a/src/repl/scala/tools/nsc/interpreter/session/History.scala b/src/repl/scala/tools/nsc/interpreter/session/History.scala index 794d41adc7..2028a13dfd 100644 --- a/src/repl/scala/tools/nsc/interpreter/session/History.scala +++ b/src/repl/scala/tools/nsc/interpreter/session/History.scala @@ -11,7 +11,10 @@ package session * reference to the jline classes. Very sparse right now. */ trait History { + def historicize(text: String): Boolean = false + def asStrings: List[String] + def asStrings(from: Int, to: Int): List[String] = asStrings.slice(from, to) def index: Int def size: Int } diff --git a/src/repl/scala/tools/nsc/interpreter/session/JLineHistory.scala b/src/repl/scala/tools/nsc/interpreter/session/JLineHistory.scala deleted file mode 100644 index 18e0ee7c85..0000000000 --- a/src/repl/scala/tools/nsc/interpreter/session/JLineHistory.scala +++ /dev/null @@ -1,49 +0,0 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Paul Phillips - */ - -package scala.tools.nsc -package interpreter -package session - -/** A straight scalification of the jline interface which mixes - * in the sparse jline-independent one too. - */ -trait JLineHistory extends JHistory with History { - def size: Int - def isEmpty: Boolean - def index: Int - def clear(): Unit - def get(index: Int): CharSequence - def add(line: CharSequence): Unit - def replace(item: CharSequence): Unit - - def entries(index: Int): JListIterator[JEntry] - def entries(): JListIterator[JEntry] - def iterator: JIterator[JEntry] - - def current(): CharSequence - def previous(): Boolean - def next(): Boolean - def moveToFirst(): Boolean - def moveToLast(): Boolean - def moveTo(index: Int): Boolean - def moveToEnd(): Unit -} - -object JLineHistory { - class JLineFileHistory extends SimpleHistory with FileBackedHistory { - override def add(item: CharSequence): Unit = { - if (!isEmpty && last == item) - repldbg("Ignoring duplicate entry '" + item + "'") - else { - super.add(item) - addLineToFile(item) - } - } - override def toString = "History(size = " + size + ", index = " + index + ")" - } - - def apply(): JLineHistory = try new JLineFileHistory catch { case x: Exception => new SimpleHistory() } -} diff --git a/src/repl/scala/tools/nsc/interpreter/session/SimpleHistory.scala b/src/repl/scala/tools/nsc/interpreter/session/SimpleHistory.scala index 7c49b91296..504d0d30ee 100644 --- a/src/repl/scala/tools/nsc/interpreter/session/SimpleHistory.scala +++ b/src/repl/scala/tools/nsc/interpreter/session/SimpleHistory.scala @@ -10,10 +10,9 @@ package session import scala.collection.mutable.{ Buffer, ListBuffer } import scala.collection.JavaConverters._ -class SimpleHistory extends JLineHistory { +class SimpleHistory extends History { private var _index: Int = 0 - private val buf: Buffer[String] = new ListBuffer[String] - private def toEntries(): Seq[JEntry] = buf.zipWithIndex map { case (x, i) => Entry(i, x) } + protected val buf: Buffer[String] = new ListBuffer[String] private def setTo(num: Int) = { _index = num ; true } private def minusOne = { _index -= 1 ; true } private def plusOne = { _index += 1 ; true } @@ -25,10 +24,6 @@ class SimpleHistory extends JLineHistory { "" } - case class Entry(index: Int, value: CharSequence) extends JEntry { - override def toString = value - } - def maxSize: Int = 2500 def last = if (isEmpty) fail("last") else buf.last @@ -42,9 +37,6 @@ class SimpleHistory extends JLineHistory { buf trimEnd 1 add(item) } - def entries(idx: Int): JListIterator[JEntry] = toEntries().asJava.listIterator(idx) - def entries(): JListIterator[JEntry] = toEntries().asJava.listIterator() - def iterator: JIterator[JEntry] = toEntries().iterator.asJava def remove(idx: Int): CharSequence = buf remove idx def removeFirst(): CharSequence = buf remove 0 diff --git a/src/repl/scala/tools/nsc/interpreter/session/package.scala b/src/repl/scala/tools/nsc/interpreter/session/package.scala index a3d7312c98..06e7f6207b 100644 --- a/src/repl/scala/tools/nsc/interpreter/session/package.scala +++ b/src/repl/scala/tools/nsc/interpreter/session/package.scala @@ -14,10 +14,5 @@ package object session { type JIterator[T] = java.util.Iterator[T] type JListIterator[T] = java.util.ListIterator[T] - type JEntry = jline.console.history.History.Entry - type JHistory = jline.console.history.History - type JMemoryHistory = jline.console.history.MemoryHistory - type JPersistentHistory = jline.console.history.PersistentHistory - private[interpreter] implicit def charSequenceFix(x: CharSequence): String = x.toString } -- cgit v1.2.3 From cc8f2f6695e0b5c65439466e746a5df99ab36d84 Mon Sep 17 00:00:00 2001 From: vsalvis Date: Wed, 3 Jun 2015 16:20:00 +0200 Subject: SI-8858 doc: fix note about PartialFunction in Function0, F1 and F2 --- src/build/genprod.scala | 13 +++++-------- src/library/scala/Function0.scala | 8 +------- src/library/scala/Function1.scala | 7 ++----- src/library/scala/Function2.scala | 6 ------ 4 files changed, 8 insertions(+), 26 deletions(-) diff --git a/src/build/genprod.scala b/src/build/genprod.scala index ed436fe2e4..b470348e8c 100644 --- a/src/build/genprod.scala +++ b/src/build/genprod.scala @@ -123,7 +123,10 @@ object FunctionOne extends Function(1) { * def apply(x: Int): Int = x + 1 * } * assert(succ(0) == anonfun1(0)) - * """) + * """) + """ + * + * Note that the difference between `Function1` and [[scala.PartialFunction]] + * is that the latter can specify inputs which it will not handle.""" override def moreMethods = """ /** Composes two instances of Function1 in a new Function1, with this function applied last. @@ -178,13 +181,7 @@ class Function(val i: Int) extends Group("Function") with Arity { * * {{{ * object Main extends App {%s} - * }}} - * - * Note that `Function1` does not define a total function, as might - * be suggested by the existence of [[scala.PartialFunction]]. The only - * distinction between `Function1` and `PartialFunction` is that the - * latter can specify inputs which it will not handle. -""" + * }}}""" def toStr() = "\"" + ("" format i) + "\"" def apply() = { diff --git a/src/library/scala/Function0.scala b/src/library/scala/Function0.scala index e13aaad7bc..15d0f14938 100644 --- a/src/library/scala/Function0.scala +++ b/src/library/scala/Function0.scala @@ -6,7 +6,7 @@ ** |/ ** \* */ // GENERATED CODE: DO NOT EDIT. -// genprod generated these sources at: Sun Sep 15 20:42:00 CEST 2013 +// genprod generated these sources at: Mon Jun 08 18:05:40 CEST 2015 package scala @@ -26,12 +26,6 @@ package scala * assert(javaVersion() == anonfun0()) * } * }}} - * - * Note that `Function1` does not define a total function, as might - * be suggested by the existence of [[scala.PartialFunction]]. The only - * distinction between `Function1` and `PartialFunction` is that the - * latter can specify inputs which it will not handle. - */ trait Function0[@specialized(Specializable.Primitives) +R] extends AnyRef { self => /** Apply the body of this function to the arguments. diff --git a/src/library/scala/Function1.scala b/src/library/scala/Function1.scala index 620dcc19aa..572901c6f3 100644 --- a/src/library/scala/Function1.scala +++ b/src/library/scala/Function1.scala @@ -25,11 +25,8 @@ package scala * } * }}} * - * Note that `Function1` does not define a total function, as might - * be suggested by the existence of [[scala.PartialFunction]]. The only - * distinction between `Function1` and `PartialFunction` is that the - * latter can specify inputs which it will not handle. - + * Note that the difference between `Function1` and [[scala.PartialFunction]] + * is that the latter can specify inputs which it will not handle. */ @annotation.implicitNotFound(msg = "No implicit view available from ${T1} => ${R}.") trait Function1[@specialized(scala.Int, scala.Long, scala.Float, scala.Double) -T1, @specialized(scala.Unit, scala.Boolean, scala.Int, scala.Float, scala.Long, scala.Double) +R] extends AnyRef { self => diff --git a/src/library/scala/Function2.scala b/src/library/scala/Function2.scala index 5690adb56a..e2c094ea40 100644 --- a/src/library/scala/Function2.scala +++ b/src/library/scala/Function2.scala @@ -25,12 +25,6 @@ package scala * assert(max(0, 1) == anonfun2(0, 1)) * } * }}} - * - * Note that `Function1` does not define a total function, as might - * be suggested by the existence of [[scala.PartialFunction]]. The only - * distinction between `Function1` and `PartialFunction` is that the - * latter can specify inputs which it will not handle. - */ trait Function2[@specialized(scala.Int, scala.Long, scala.Double) -T1, @specialized(scala.Int, scala.Long, scala.Double) -T2, @specialized(scala.Unit, scala.Boolean, scala.Int, scala.Float, scala.Long, scala.Double) +R] extends AnyRef { self => /** Apply the body of this function to the arguments. -- cgit v1.2.3 From 59006209a5be5afc1724e0b14a2fdde042c8953e Mon Sep 17 00:00:00 2001 From: vsalvis Date: Wed, 3 Jun 2015 16:36:45 +0200 Subject: SI-8543 doc: Move TODO out of NumericRange's scaladoc --- src/library/scala/collection/immutable/NumericRange.scala | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/library/scala/collection/immutable/NumericRange.scala b/src/library/scala/collection/immutable/NumericRange.scala index f1ac161e9a..28e56a6d87 100644 --- a/src/library/scala/collection/immutable/NumericRange.scala +++ b/src/library/scala/collection/immutable/NumericRange.scala @@ -12,6 +12,9 @@ package immutable import mutable.{ Builder, ListBuffer } +// TODO: Now the specialization exists there is no clear reason to have +// separate classes for Range/NumericRange. Investigate and consolidate. + /** `NumericRange` is a more generic version of the * `Range` class which works with arbitrary types. * It must be supplied with an `Integral` implementation of the @@ -28,9 +31,6 @@ import mutable.{ Builder, ListBuffer } * assert(r1 sameElements r2.map(_ - veryBig)) * }}} * - * TODO: Now the specialization exists there is no clear reason to have - * separate classes for Range/NumericRange. Investigate and consolidate. - * * @author Paul Phillips * @version 2.8 * @define Coll `NumericRange` @@ -266,7 +266,7 @@ object NumericRange { // Numbers may be big. val one = num.one val limit = num.fromInt(Int.MaxValue) - def check(t: T): T = + def check(t: T): T = if (num.gt(t, limit)) throw new IllegalArgumentException("More than Int.MaxValue elements.") else t // If the range crosses zero, it might overflow when subtracted -- cgit v1.2.3 From 479fedbf4630e80397bc62a2ac44116761ff21b9 Mon Sep 17 00:00:00 2001 From: vsalvis Date: Wed, 3 Jun 2015 22:26:03 +0200 Subject: SI-6131 doc: remove broken show member link from all scaladoc pages --- src/scaladoc/scala/tools/nsc/doc/html/page/Template.scala | 1 - 1 file changed, 1 deletion(-) diff --git a/src/scaladoc/scala/tools/nsc/doc/html/page/Template.scala b/src/scaladoc/scala/tools/nsc/doc/html/page/Template.scala index c384ed7034..81036b4908 100644 --- a/src/scaladoc/scala/tools/nsc/doc/html/page/Template.scala +++ b/src/scaladoc/scala/tools/nsc/doc/html/page/Template.scala @@ -177,7 +177,6 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp
  • Hide All
  • Show all
  • -
    Learn more about member selection } { -- cgit v1.2.3 From 5ab401084141d37c03dc29c9028917b92e56ca68 Mon Sep 17 00:00:00 2001 From: Zhong Sheng Date: Thu, 18 Jun 2015 10:43:18 +0800 Subject: make BigDecimalTest.testMathContext a bit easier to understand --- test/junit/scala/math/BigDecimalTest.scala | 27 ++++++++++++++------------- 1 file changed, 14 insertions(+), 13 deletions(-) diff --git a/test/junit/scala/math/BigDecimalTest.scala b/test/junit/scala/math/BigDecimalTest.scala index a801204cb2..a9e2481f37 100644 --- a/test/junit/scala/math/BigDecimalTest.scala +++ b/test/junit/scala/math/BigDecimalTest.scala @@ -233,27 +233,28 @@ class BigDecimalTest { @Test def testMathContext() { def testPrecision() { - val e = 1000 + val p = 1000 + val n = BigDecimal("1.1", MC.UNLIMITED).pow(p) - val n = BigDecimal("1.1", MC.UNLIMITED).pow(e) - assert(BigDecimal(1.1d, MC.UNLIMITED).pow(e) == n) - assert(BigDecimal.decimal(1.1d, MC.UNLIMITED).pow(e) == n) - assert(BigDecimal.decimal(1.1f, MC.UNLIMITED).pow(e) == n) - assert(BigDecimal.decimal(new BD("1.1"), MC.UNLIMITED).pow(e) == n) + // BigDecimal(x: Float, mc: MC), which may not do what you want, is deprecated + assert(BigDecimal(1.1f, MC.UNLIMITED).pow(p) == BigDecimal(java.lang.Double.toString(1.1f.toDouble), MC.UNLIMITED).pow(p)) + assert(BigDecimal(1.1d, MC.UNLIMITED).pow(p) == n) + assert(BigDecimal(new BD("1.1"), MC.UNLIMITED).pow(p) == n) - val m = BigDecimal(java.lang.Double.toString(1.1f.toDouble), MC.UNLIMITED).pow(e) - assert(BigDecimal(1.1f, MC.UNLIMITED).pow(e) == m) // deprecated + assert(BigDecimal.decimal(1.1f, MC.UNLIMITED).pow(p) == n) + assert(BigDecimal.decimal(1.1d, MC.UNLIMITED).pow(p) == n) + assert(BigDecimal.decimal(new BD("1.1"), MC.UNLIMITED).pow(p) == n) - val l = BigDecimal("11", MC.UNLIMITED).pow(e) - assert(BigDecimal(11, MC.UNLIMITED).pow(e) == l) - assert(BigDecimal.decimal(11, MC.UNLIMITED).pow(e) == l) + assert((BigDecimal(11, MC.UNLIMITED) / 10).pow(p) == n) + assert((BigDecimal.decimal(11, MC.UNLIMITED) / 10).pow(p) == n) } def testRounded() { + // the default rounding mode is HALF_UP + assert((BigDecimal(1.23f, new MC(3)) + BigDecimal("0.005")).rounded == BigDecimal("1.24")) // deprecated api assert((BigDecimal(1.23d, new MC(3)) + BigDecimal("0.005")).rounded == BigDecimal("1.24")) - assert((BigDecimal(1.23f, new MC(3)) + BigDecimal("0.005")).rounded == BigDecimal("1.24")) // deprecated - assert((BigDecimal.decimal(1.23d, new MC(3)) + BigDecimal("0.005")).rounded == BigDecimal("1.24")) assert((BigDecimal.decimal(1.23f, new MC(3)) + BigDecimal("0.005")).rounded == BigDecimal("1.24")) + assert((BigDecimal.decimal(1.23d, new MC(3)) + BigDecimal("0.005")).rounded == BigDecimal("1.24")) } testPrecision() -- cgit v1.2.3 From ce7d2e95ce1f2bc6e601fb31f4c1fefa39d0d222 Mon Sep 17 00:00:00 2001 From: Janek Bogucki Date: Thu, 18 Jun 2015 08:30:44 +0100 Subject: Fix some typos (a-c) --- src/compiler/scala/tools/nsc/backend/opt/Inliners.scala | 2 +- .../scala/tools/nsc/classpath/ZipAndJarFileLookupFactory.scala | 2 +- src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala | 2 +- src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala | 2 +- src/compiler/scala/tools/nsc/typechecker/Typers.scala | 2 +- src/reflect/scala/reflect/internal/AnnotationCheckers.scala | 2 +- src/reflect/scala/reflect/internal/Symbols.scala | 2 +- src/scaladoc/scala/tools/nsc/doc/model/Entity.scala | 4 ++-- .../scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala | 2 +- test/files/jvm/bytecode-test-example/Test.scala | 2 +- test/files/jvm/t7006/Foo_1.scala | 2 +- test/files/neg/names-defaults-neg.check | 4 ++-- test/files/neg/names-defaults-neg.scala | 4 ++-- test/files/pos/t6575b.scala | 2 +- test/files/run/t6502.scala | 2 +- test/pending/pos/t1786.scala | 2 +- test/pending/run/idempotency-partial-functions.scala | 2 +- test/scaladoc/resources/implicits-base-res.scala | 2 +- 18 files changed, 21 insertions(+), 21 deletions(-) diff --git a/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala b/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala index 8f6fc65706..8cd2a14066 100644 --- a/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala +++ b/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala @@ -773,7 +773,7 @@ abstract class Inliners extends SubComponent { staleOut += block - tfa.remainingCALLs.remove(instr) // this bookkpeeping is done here and not in MTFAGrowable.reinit due to (1st) convenience and (2nd) necessity. + tfa.remainingCALLs.remove(instr) // this bookkeeping is done here and not in MTFAGrowable.reinit due to (1st) convenience and (2nd) necessity. tfa.isOnWatchlist.remove(instr) // ditto tfa.warnIfInlineFails.remove(instr) diff --git a/src/compiler/scala/tools/nsc/classpath/ZipAndJarFileLookupFactory.scala b/src/compiler/scala/tools/nsc/classpath/ZipAndJarFileLookupFactory.scala index 84e21a3ccd..85c7c3c843 100644 --- a/src/compiler/scala/tools/nsc/classpath/ZipAndJarFileLookupFactory.scala +++ b/src/compiler/scala/tools/nsc/classpath/ZipAndJarFileLookupFactory.scala @@ -61,7 +61,7 @@ object ZipAndJarFlatClassPathFactory extends ZipAndJarFileLookupFactory { } /** - * This type of classpath is closly related to the support for JSR-223. + * This type of classpath is closely related to the support for JSR-223. * Its usage can be observed e.g. when running: * jrunscript -classpath scala-compiler.jar;scala-reflect.jar;scala-library.jar -l scala * with a particularly prepared scala-library.jar. It should have all classes listed in the manifest like e.g. this entry: diff --git a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala index a22428075c..4f5589fd7c 100644 --- a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala +++ b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala @@ -373,7 +373,7 @@ abstract class SymbolLoaders { protected def doComplete(root: Symbol) { root.sourceModule.initialize } } - /** used from classfile parser to avoid cyclies */ + /** used from classfile parser to avoid cycles */ var parentsLevel = 0 var pendingLoadActions: List[() => Unit] = Nil } diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala index 6302e34ac9..451b72d498 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala @@ -248,7 +248,7 @@ trait MatchTranslation { if (caseDefs forall treeInfo.isCatchCase) caseDefs else { val swatches = { // switch-catches - // SI-7459 must duplicate here as we haven't commited to switch emission, and just figuring out + // SI-7459 must duplicate here as we haven't committed to switch emission, and just figuring out // if we can ends up mutating `caseDefs` down in the use of `substituteSymbols` in // `TypedSubstitution#Substitution`. That is called indirectly by `emitTypeSwitch`. val bindersAndCases = caseDefs.map(_.duplicate) map { caseDef => diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index 27a574a449..8dd65a78ed 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -4443,7 +4443,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper def onError(typeErrors: Seq[AbsTypeError], warnings: Seq[(Position, String)]): Tree = { if (Statistics.canEnable) Statistics.stopTimer(failedApplyNanos, start) - // If the problem is with raw types, copnvert to existentials and try again. + // If the problem is with raw types, convert to existentials and try again. // See #4712 for a case where this situation arises, if ((fun.symbol ne null) && fun.symbol.isJavaDefined) { val newtpe = rawToExistential(fun.tpe) diff --git a/src/reflect/scala/reflect/internal/AnnotationCheckers.scala b/src/reflect/scala/reflect/internal/AnnotationCheckers.scala index 74310e1c34..1ba014d19d 100644 --- a/src/reflect/scala/reflect/internal/AnnotationCheckers.scala +++ b/src/reflect/scala/reflect/internal/AnnotationCheckers.scala @@ -60,7 +60,7 @@ trait AnnotationCheckers { * mode (see method adapt in trait Typers). * * An implementation cannot rely on canAdaptAnnotations being called before. If the implementing - * class cannot do the adaptiong, it should return the tree unchanged. + * class cannot do the adapting, it should return the tree unchanged. */ @deprecated("Create an AnalyzerPlugin and use adaptAnnotations", "2.10.1") def adaptAnnotations(tree: Tree, mode: Mode, pt: Type): Tree = tree diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala index abe966920b..285d59c5e2 100644 --- a/src/reflect/scala/reflect/internal/Symbols.scala +++ b/src/reflect/scala/reflect/internal/Symbols.scala @@ -987,7 +987,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => || isLocalToBlock ) ) - /** Is this symbol effectively final or a concrete term member of sealed class whose childred do not override it */ + /** Is this symbol effectively final or a concrete term member of sealed class whose children do not override it */ final def isEffectivelyFinalOrNotOverridden: Boolean = isEffectivelyFinal || (isTerm && !isDeferred && isNotOverridden) /** Is this symbol owned by a package? */ diff --git a/src/scaladoc/scala/tools/nsc/doc/model/Entity.scala b/src/scaladoc/scala/tools/nsc/doc/model/Entity.scala index 7fe8903c76..c2b1fa6bfb 100644 --- a/src/scaladoc/scala/tools/nsc/doc/model/Entity.scala +++ b/src/scaladoc/scala/tools/nsc/doc/model/Entity.scala @@ -484,10 +484,10 @@ trait ImplicitConversion { /** The entity for the method that performed the conversion, if it's documented (or just its name, otherwise) */ def convertorMethod: Either[MemberEntity, String] - /** A short name of the convertion */ + /** A short name of the conversion */ def conversionShortName: String - /** A qualified name uniquely identifying the convertion (currently: the conversion method's qualified name) */ + /** A qualified name uniquely identifying the conversion (currently: the conversion method's qualified name) */ def conversionQualifiedName: String /** The entity that performed the conversion */ diff --git a/src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala b/src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala index f984b4579f..27c3d39269 100644 --- a/src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala +++ b/src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala @@ -475,7 +475,7 @@ trait ModelFactoryImplicitSupport { } /** - * Make implicits explicit - Not used curently + * Make implicits explicit - Not used currently */ // object implicitToExplicit extends TypeMap { // def apply(tp: Type): Type = mapOver(tp) match { diff --git a/test/files/jvm/bytecode-test-example/Test.scala b/test/files/jvm/bytecode-test-example/Test.scala index d668059cb7..0da54d5bde 100644 --- a/test/files/jvm/bytecode-test-example/Test.scala +++ b/test/files/jvm/bytecode-test-example/Test.scala @@ -17,7 +17,7 @@ object Test extends BytecodeTest { def countNullChecks(insnList: InsnList): Int = { /** Is given instruction a null check? * NOTE - * This will detect direct null compparsion as in + * This will detect direct null comparison as in * if (x == null) ... * and not indirect as in * val foo = null diff --git a/test/files/jvm/t7006/Foo_1.scala b/test/files/jvm/t7006/Foo_1.scala index 995619ce6b..3985557d9f 100644 --- a/test/files/jvm/t7006/Foo_1.scala +++ b/test/files/jvm/t7006/Foo_1.scala @@ -5,6 +5,6 @@ class Foo_1 { } finally { print("hello") } - while(true){} // ensure infinite loop doesn't break the algoirthm + while(true){} // ensure infinite loop doesn't break the algorithm } } diff --git a/test/files/neg/names-defaults-neg.check b/test/files/neg/names-defaults-neg.check index 2db24b6f32..a43bf66811 100644 --- a/test/files/neg/names-defaults-neg.check +++ b/test/files/neg/names-defaults-neg.check @@ -64,7 +64,7 @@ names-defaults-neg.scala:49: error: ambiguous reference to overloaded definition both method g in object t7 of type (a: B)String and method g in object t7 of type (a: C, b: Int*)String match argument types (C) - t7.g(new C()) // ambigous reference + t7.g(new C()) // ambiguous reference ^ names-defaults-neg.scala:53: error: parameter 'b' is already specified at parameter position 2 test5(a = 1, b = "dkjl", b = "dkj") @@ -79,7 +79,7 @@ names-defaults-neg.scala:61: error: ambiguous reference to overloaded definition both method f in object t8 of type (b: String, a: Int)String and method f in object t8 of type (a: Int, b: Object)String match argument types (a: Int,b: String) and expected result type Any - println(t8.f(a = 0, b = "1")) // ambigous reference + println(t8.f(a = 0, b = "1")) // ambiguous reference ^ names-defaults-neg.scala:69: error: wrong number of arguments for pattern A1(x: Int,y: String) A1() match { case A1(_) => () } diff --git a/test/files/neg/names-defaults-neg.scala b/test/files/neg/names-defaults-neg.scala index 042f73708c..a97b590bf2 100644 --- a/test/files/neg/names-defaults-neg.scala +++ b/test/files/neg/names-defaults-neg.scala @@ -46,7 +46,7 @@ object Test extends App { def g(a: C, b: Int*) = "third" def g(a: B) = "fourth" } - t7.g(new C()) // ambigous reference + t7.g(new C()) // ambiguous reference // vararg def test5(a: Int, b: String*) = a @@ -58,7 +58,7 @@ object Test extends App { def f(a: Int, b: Object) = "first" def f(b: String, a: Int) = "second" } - println(t8.f(a = 0, b = "1")) // ambigous reference + println(t8.f(a = 0, b = "1")) // ambiguous reference // case class copy does not exist if there's a vararg diff --git a/test/files/pos/t6575b.scala b/test/files/pos/t6575b.scala index d3e58b2a16..c89424287a 100644 --- a/test/files/pos/t6575b.scala +++ b/test/files/pos/t6575b.scala @@ -1,5 +1,5 @@ // inferred types were okay here as Function nodes aren't -// translated into anoymous subclasses of AbstractFunctionN +// translated into anonymous subclasses of AbstractFunctionN // until after the typer. // // So this test is just confirmation. diff --git a/test/files/run/t6502.scala b/test/files/run/t6502.scala index 52fabef6b8..d6b15a0189 100644 --- a/test/files/run/t6502.scala +++ b/test/files/run/t6502.scala @@ -123,7 +123,7 @@ object Test extends StoreReporterDirectTest { } def test6(): Unit = { - // Avoid java.lang.NoClassDefFoundError triggered by the old appoach of using a Java + // Avoid java.lang.NoClassDefFoundError triggered by the old approach of using a Java // classloader to parse .class files in order to read their names. val jar = "test6.jar" compileCode(app6, jar) diff --git a/test/pending/pos/t1786.scala b/test/pending/pos/t1786.scala index 6299eb9eae..16ce4301bc 100644 --- a/test/pending/pos/t1786.scala +++ b/test/pending/pos/t1786.scala @@ -1,5 +1,5 @@ /** This a consequence of the current type checking algorithm, where bounds are checked only after variables are instantiated. - * I believe this will change once we go to contraint-based type inference. + * I believe this will change once we go to constraint-based type inference. * Alternatively, we can pursue a more extensive fix to SI-6169 * * The below code shows a compiler flaw in that the wildcard "_" as value for a bounded type parameter either diff --git a/test/pending/run/idempotency-partial-functions.scala b/test/pending/run/idempotency-partial-functions.scala index b26c442599..c9d650ca89 100644 --- a/test/pending/run/idempotency-partial-functions.scala +++ b/test/pending/run/idempotency-partial-functions.scala @@ -6,7 +6,7 @@ import scala.tools.reflect.Eval // Related to SI-6187 // // Moved to pending as we are currently blocked by the inability -// to reify the parent types of the anoymous function class, +// to reify the parent types of the anonymous function class, // which are not part of the tree, but rather only part of the // ClassInfoType. object Test extends App { diff --git a/test/scaladoc/resources/implicits-base-res.scala b/test/scaladoc/resources/implicits-base-res.scala index 1d17e9a6d3..559d21997f 100644 --- a/test/scaladoc/resources/implicits-base-res.scala +++ b/test/scaladoc/resources/implicits-base-res.scala @@ -52,7 +52,7 @@ object A { * def convToGtColonDoubleA(x: Double) // enrichA3: no constraints * def convToManifestA(x: Double) // enrichA7: no constraints * def convToMyNumericA(x: Double) // enrichA6: (if showAll is set) with a constraint that there is x: MyNumeric[Double] implicit in scope - * def convToNumericA(x: Double) // enrichA1: no constraintsd + * def convToNumericA(x: Double) // enrichA1: no constraints * def convToEnrichedA(x: Bar[Foo[Double]]) // enrichA5: no constraints, SHADOWED * def convToEnrichedA(x: Double) // enrichA0: no constraints, SHADOWED * def convToTraversableOps(x: Double) // enrichA7: no constraints -- cgit v1.2.3 From 2dcc4c42fdcefee08add9dbdcf619ab5da745674 Mon Sep 17 00:00:00 2001 From: Michał Pociecha Date: Wed, 17 Jun 2015 11:46:31 +0200 Subject: Fix another several typos I just used text search to check whether there are no more typos like these corrected by janekdb, and by the way fixed also some other ones which I saw. --- src/compiler/scala/tools/nsc/typechecker/Typers.scala | 2 +- src/scaladoc/scala/tools/nsc/doc/Settings.scala | 4 ++-- src/scaladoc/scala/tools/nsc/doc/model/Entity.scala | 2 +- .../scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala | 4 ++-- src/scaladoc/scala/tools/nsc/doc/model/diagram/DiagramFactory.scala | 2 +- test/files/neg/t2866.check | 2 +- test/files/neg/t2866.scala | 2 +- test/files/pos/t6648.scala | 2 +- test/scaladoc/run/implicits-base.scala | 2 +- test/scaladoc/scalacheck/HtmlFactoryTest.scala | 2 +- 10 files changed, 12 insertions(+), 12 deletions(-) diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index 8dd65a78ed..b5129af9ec 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -4988,7 +4988,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper TypeTreeWithDeferredRefCheck(){ () => // wrap the tree and include the bounds check -- refchecks will perform this check (that the beta reduction was indeed allowed) and unwrap // we can't simply use original in refchecks because it does not contains types - // (and the only typed trees we have have been mangled so they're not quite the original tree anymore) + // (and the only typed trees we have been mangled so they're not quite the original tree anymore) checkBounds(result, tpt1.tpe.prefix, tpt1.symbol.owner, tpt1.symbol.typeParams, argtypes, "") result // you only get to see the wrapped tree after running this check :-p } setType (result.tpe) setPos(result.pos) diff --git a/src/scaladoc/scala/tools/nsc/doc/Settings.scala b/src/scaladoc/scala/tools/nsc/doc/Settings.scala index 44683f1755..067b2b2c29 100644 --- a/src/scaladoc/scala/tools/nsc/doc/Settings.scala +++ b/src/scaladoc/scala/tools/nsc/doc/Settings.scala @@ -143,7 +143,7 @@ class Settings(error: String => Unit, val printMsg: String => Unit = println(_)) "dot" // by default, just pick up the system-wide dot ) - /** The maxium nuber of normal classes to show in the diagram */ + /** The maximum number of normal classes to show in the diagram */ val docDiagramsMaxNormalClasses = IntSetting( "-diagrams-max-classes", "The maximum number of superclasses or subclasses to show in a diagram", @@ -152,7 +152,7 @@ class Settings(error: String => Unit, val printMsg: String => Unit = println(_)) _ => None ) - /** The maxium nuber of implcit classes to show in the diagram */ + /** The maximum number of implicit classes to show in the diagram */ val docDiagramsMaxImplicitClasses = IntSetting( "-diagrams-max-implicits", "The maximum number of implicitly converted classes to show in a diagram", diff --git a/src/scaladoc/scala/tools/nsc/doc/model/Entity.scala b/src/scaladoc/scala/tools/nsc/doc/model/Entity.scala index c2b1fa6bfb..90de51d763 100644 --- a/src/scaladoc/scala/tools/nsc/doc/model/Entity.scala +++ b/src/scaladoc/scala/tools/nsc/doc/model/Entity.scala @@ -298,7 +298,7 @@ trait DocTemplateEntity extends MemberTemplateEntity { /** The shadowing information for the implicitly added members */ def implicitsShadowing: Map[MemberEntity, ImplicitMemberShadowing] - /** Classes that can be implcitly converted to this class */ + /** Classes that can be implicitly converted to this class */ def incomingImplicitlyConvertedClasses: List[(DocTemplateEntity, ImplicitConversion)] /** Classes to which this class can be implicitly converted to diff --git a/src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala b/src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala index 27c3d39269..559bcea80d 100644 --- a/src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala +++ b/src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala @@ -396,7 +396,7 @@ trait ModelFactoryImplicitSupport { def isHiddenConversion = settings.hiddenImplicits(conversionQualifiedName) - override def toString = "Implcit conversion from " + sym.tpe + " to " + toType + " done by " + convSym + override def toString = "Implicit conversion from " + sym.tpe + " to " + toType + " done by " + convSym } /* ========================= HELPER METHODS ========================== */ @@ -557,7 +557,7 @@ trait ModelFactoryImplicitSupport { * * The trick here is that the resultType does not matter - the condition for removal it that paramss have the same * structure (A => B => C may not override (A, B) => C) and that all the types involved are - * of the implcit conversion's member are subtypes of the parent members' parameters */ + * of the implicit conversion's member are subtypes of the parent members' parameters */ def isDistinguishableFrom(t1: Type, t2: Type): Boolean = { // Vlad: I tried using matches but it's not exactly what we need: // (p: AnyRef)AnyRef matches ((t: String)AnyRef returns false -- but we want that to be true diff --git a/src/scaladoc/scala/tools/nsc/doc/model/diagram/DiagramFactory.scala b/src/scaladoc/scala/tools/nsc/doc/model/diagram/DiagramFactory.scala index 87d7ece8f2..093899231e 100644 --- a/src/scaladoc/scala/tools/nsc/doc/model/diagram/DiagramFactory.scala +++ b/src/scaladoc/scala/tools/nsc/doc/model/diagram/DiagramFactory.scala @@ -51,7 +51,7 @@ trait DiagramFactory extends DiagramDirectiveParser { case p: (TemplateEntity, TypeEntity) if !classExcluded(p._1) => NormalNode(p._2, Some(p._1))() }.reverse - // incoming implcit conversions + // incoming implicit conversions lazy val incomingImplicitNodes = tpl.incomingImplicitlyConvertedClasses.map { case (incomingTpl, conv) => ImplicitNode(makeType(incomingTpl.sym.tpe, tpl), Some(incomingTpl))(implicitTooltip(from=incomingTpl, to=tpl, conv=conv)) diff --git a/test/files/neg/t2866.check b/test/files/neg/t2866.check index 340fb8da22..bc0da7e355 100644 --- a/test/files/neg/t2866.check +++ b/test/files/neg/t2866.check @@ -5,7 +5,7 @@ t2866.scala:42: error: ambiguous implicit values: both value two of type Int and value one in object A of type => Int match expected type Int - assert(implicitly[Int] == 2) // !!! Not ambiguous in 2.8.0. Ambigous in 2.7.6 + assert(implicitly[Int] == 2) // !!! Not ambiguous in 2.8.0. Ambiguous in 2.7.6 ^ t2866.scala:50: error: ambiguous implicit values: both value two of type Int diff --git a/test/files/neg/t2866.scala b/test/files/neg/t2866.scala index 55ebff9710..6be8bf9e89 100644 --- a/test/files/neg/t2866.scala +++ b/test/files/neg/t2866.scala @@ -39,7 +39,7 @@ object Test { import A.one assert(implicitly[Int] == 1) implicit val two = 2 - assert(implicitly[Int] == 2) // !!! Not ambiguous in 2.8.0. Ambigous in 2.7.6 + assert(implicitly[Int] == 2) // !!! Not ambiguous in 2.8.0. Ambiguous in 2.7.6 } locally { diff --git a/test/files/pos/t6648.scala b/test/files/pos/t6648.scala index 9593ebfee9..b8f24870cc 100644 --- a/test/files/pos/t6648.scala +++ b/test/files/pos/t6648.scala @@ -10,7 +10,7 @@ class Transformer { } object transformer1 extends Transformer { - // Adding explicit type arguments, or making the impilcit view + // Adding explicit type arguments, or making the implicit view // seqToNodeSeq explicit avoids the crash NodeSeq.foo { // These both avoid the crash: diff --git a/test/scaladoc/run/implicits-base.scala b/test/scaladoc/run/implicits-base.scala index 8f8652cdb3..ea87a670bb 100644 --- a/test/scaladoc/run/implicits-base.scala +++ b/test/scaladoc/run/implicits-base.scala @@ -94,7 +94,7 @@ object Test extends ScaladocModelTest { assert(isShadowed(conv._member("convToEnrichedA"))) assert(conv._member("convToEnrichedA").resultType.name == "Double") - // def convToNumericA: Double // enrichA1: no constraintsd + // def convToNumericA: Double // enrichA1: no constraints conv = B._conversion(A.qualifiedName + ".enrichA1") assert(conv.members.length == 1) assert(conv.constraints.length == 0) diff --git a/test/scaladoc/scalacheck/HtmlFactoryTest.scala b/test/scaladoc/scalacheck/HtmlFactoryTest.scala index 6a6b1f8901..578e0382eb 100644 --- a/test/scaladoc/scalacheck/HtmlFactoryTest.scala +++ b/test/scaladoc/scalacheck/HtmlFactoryTest.scala @@ -711,7 +711,7 @@ object Test extends Properties("HtmlFactory") { property("class") = files.get("com/example/p1/Clazz.html") match { case Some(node: scala.xml.Node) => { - property("implicit convertion") = + property("implicit conversion") = node.toString contains "implicit " property("gt4s") = -- cgit v1.2.3 From dfb70b632c1e8a2c6ce27eaacb74dbbb47ce9532 Mon Sep 17 00:00:00 2001 From: Adriaan Moors Date: Wed, 17 Jun 2015 11:06:31 -0700 Subject: SI-9339 Support classpaths with no single compatible jline As usual, the repl will use whatever jline 2 jar on the classpath, if there is one. Failing that, there's a fallback and an override. If instantiating the standard `jline.InteractiveReader` fails, we fall back to an embedded, shaded, version of jline, provided by `jline_embedded.InteractiveReader`. (Assume `import scala.tools.nsc.interpreter._` for this message.) The instantiation of `InteractiveReader` eagerly exercises jline, so that a linkage error will result if jline is missing or if the provided one is not binary compatible. The property `scala.repl.reader` overrides this behavior, if set to the FQN of a class that looks like `YourInteractiveReader` below. ``` class YourInteractiveReader(completer: () => Completion) extends InteractiveReader ``` The repl logs which classes it tried to instantiate under `-Ydebug`. # Changes to source & build The core of the repl (`src/repl`) no longer depends on jline. The jline interface is now in `src/repl-jline`. The embedded jline + our interface to it are generated by the `quick.repl` target. The build now also enforces that only `src/repl-jline` depends on jline. The sources in `src/repl` are now sure to be independent of it, though they do use reflection to instantiate a suitable subclass of `InteractiveReader`, as explained above. The `quick.repl` target builds the sources in `src/repl` and `src/repl-jline`, producing a jar for the `repl-jline` classes, which is then transformed using jarjar to obtain a shaded copy of the `scala.tools.nsc.interpreter.jline` package. Jarjar is used to combine the `jline` jar and the `repl-jline` into a new jar, rewriting package names as follows: - `org.fusesource` -> `scala.tools.fusesource_embedded` - `jline` -> `scala.tools.jline_embedded` - `scala.tools.nsc.interpreter.jline` -> `scala.tools.nsc.interpreter.jline_embedded` Classes not reachable from `scala.tools.**` are pruned, as well as empty dirs. The classes in the `repl-jline` jar as well as those in the rewritten one are copied to the repl's output directory. PS: The sbt build is not updated, sorry. PPS: A more recent fork of jarjar: https://github.com/shevek/jarjar. --- build.sbt | 1 + build.xml | 40 +++++- .../nsc/interpreter/jline/FileBackedHistory.scala | 93 ++++++++++++++ .../nsc/interpreter/jline/JLineDelimiter.scala | 25 ++++ .../tools/nsc/interpreter/jline/JLineHistory.scala | 77 +++++++++++ .../tools/nsc/interpreter/jline/JLineReader.scala | 143 +++++++++++++++++++++ src/repl/scala/tools/nsc/interpreter/ILoop.scala | 40 ++++-- .../nsc/interpreter/jline/FileBackedHistory.scala | 93 -------------- .../nsc/interpreter/jline/JLineDelimiter.scala | 25 ---- .../tools/nsc/interpreter/jline/JLineHistory.scala | 77 ----------- .../tools/nsc/interpreter/jline/JLineReader.scala | 143 --------------------- 11 files changed, 407 insertions(+), 350 deletions(-) create mode 100644 src/repl-jline/scala/tools/nsc/interpreter/jline/FileBackedHistory.scala create mode 100644 src/repl-jline/scala/tools/nsc/interpreter/jline/JLineDelimiter.scala create mode 100644 src/repl-jline/scala/tools/nsc/interpreter/jline/JLineHistory.scala create mode 100644 src/repl-jline/scala/tools/nsc/interpreter/jline/JLineReader.scala delete mode 100644 src/repl/scala/tools/nsc/interpreter/jline/FileBackedHistory.scala delete mode 100644 src/repl/scala/tools/nsc/interpreter/jline/JLineDelimiter.scala delete mode 100644 src/repl/scala/tools/nsc/interpreter/jline/JLineHistory.scala delete mode 100644 src/repl/scala/tools/nsc/interpreter/jline/JLineReader.scala diff --git a/build.sbt b/build.sbt index 553c217d4a..76d66481d0 100644 --- a/build.sbt +++ b/build.sbt @@ -191,6 +191,7 @@ lazy val interactive = configureAsSubproject(project) .settings(disableDocsAndPublishingTasks: _*) .dependsOn(compiler) +// TODO: SI-9339 embed shaded copy of jline & its interface (see #4563) lazy val repl = configureAsSubproject(project) .settings(libraryDependencies += jlineDep) .settings(disableDocsAndPublishingTasks: _*) diff --git a/build.xml b/build.xml index 421646a2b0..589e1931b8 100755 --- a/build.xml +++ b/build.xml @@ -275,6 +275,10 @@ TODO:
    + + + + @@ -696,7 +700,7 @@ TODO: - + @@ -799,6 +803,11 @@ TODO: + + + + + @@ -873,6 +882,8 @@ TODO: + + @@ -1076,6 +1087,7 @@ TODO: + + + + + + + + + + + + + + + + + diff --git a/src/repl-jline/scala/tools/nsc/interpreter/jline/FileBackedHistory.scala b/src/repl-jline/scala/tools/nsc/interpreter/jline/FileBackedHistory.scala new file mode 100644 index 0000000000..b6c9792ec0 --- /dev/null +++ b/src/repl-jline/scala/tools/nsc/interpreter/jline/FileBackedHistory.scala @@ -0,0 +1,93 @@ +/* NSC -- new Scala compiler + * Copyright 2005-2015 LAMP/EPFL + * @author Paul Phillips + */ + +package scala.tools.nsc.interpreter.jline + +import _root_.jline.console.history.PersistentHistory + + +import scala.tools.nsc.interpreter +import scala.tools.nsc.io.{File, Path} + +/** TODO: file locking. + */ +trait FileBackedHistory extends JLineHistory with PersistentHistory { + def maxSize: Int + + protected lazy val historyFile: File = FileBackedHistory.defaultFile + private var isPersistent = true + + locally { + load() + } + + def withoutSaving[T](op: => T): T = { + val saved = isPersistent + isPersistent = false + try op + finally isPersistent = saved + } + + def addLineToFile(item: CharSequence): Unit = { + if (isPersistent) + append(item + "\n") + } + + /** Overwrites the history file with the current memory. */ + protected def sync(): Unit = { + val lines = asStrings map (_ + "\n") + historyFile.writeAll(lines: _*) + } + + /** Append one or more lines to the history file. */ + protected def append(lines: String*): Unit = { + historyFile.appendAll(lines: _*) + } + + def load(): Unit = { + if (!historyFile.canRead) + historyFile.createFile() + + val lines: IndexedSeq[String] = { + try historyFile.lines().toIndexedSeq + catch { + // It seems that control characters in the history file combined + // with the default codec can lead to nio spewing exceptions. Rather + // than abandon hope we'll try to read it as ISO-8859-1 + case _: Exception => + try historyFile.lines("ISO-8859-1").toIndexedSeq + catch { + case _: Exception => Vector() + } + } + } + + interpreter.repldbg("Loading " + lines.size + " into history.") + + // avoid writing to the history file + withoutSaving(lines takeRight maxSize foreach add) + // truncate the history file if it's too big. + if (lines.size > maxSize) { + interpreter.repldbg("File exceeds maximum size: truncating to " + maxSize + " entries.") + sync() + } + moveToEnd() + } + + def flush(): Unit = () + + def purge(): Unit = historyFile.truncate() +} + +object FileBackedHistory { + // val ContinuationChar = '\003' + // val ContinuationNL: String = Array('\003', '\n').mkString + + import scala.tools.nsc.Properties.userHome + + def defaultFileName = ".scala_history" + + def defaultFile: File = File(Path(userHome) / defaultFileName) +} diff --git a/src/repl-jline/scala/tools/nsc/interpreter/jline/JLineDelimiter.scala b/src/repl-jline/scala/tools/nsc/interpreter/jline/JLineDelimiter.scala new file mode 100644 index 0000000000..c18a9809a0 --- /dev/null +++ b/src/repl-jline/scala/tools/nsc/interpreter/jline/JLineDelimiter.scala @@ -0,0 +1,25 @@ +/* NSC -- new Scala compiler + * Copyright 2005-2013 LAMP/EPFL + * @author Paul Phillips + */ + +package scala.tools.nsc.interpreter.jline + +import scala.tools.nsc.interpreter + +import _root_.jline.console.completer.ArgumentCompleter.{ ArgumentDelimiter, ArgumentList } + +// implements a jline interface +class JLineDelimiter extends ArgumentDelimiter { + def toJLine(args: List[String], cursor: Int) = args match { + case Nil => new ArgumentList(new Array[String](0), 0, 0, cursor) + case xs => new ArgumentList(xs.toArray, xs.size - 1, xs.last.length, cursor) + } + + def delimit(buffer: CharSequence, cursor: Int) = { + val p = interpreter.Parsed(buffer.toString, cursor) + toJLine(p.args, cursor) + } + + def isDelimiter(buffer: CharSequence, cursor: Int) = interpreter.Parsed(buffer.toString, cursor).isDelimiter +} diff --git a/src/repl-jline/scala/tools/nsc/interpreter/jline/JLineHistory.scala b/src/repl-jline/scala/tools/nsc/interpreter/jline/JLineHistory.scala new file mode 100644 index 0000000000..1f6a1f7022 --- /dev/null +++ b/src/repl-jline/scala/tools/nsc/interpreter/jline/JLineHistory.scala @@ -0,0 +1,77 @@ +/* NSC -- new Scala compiler + * Copyright 2005-2013 LAMP/EPFL + * @author Paul Phillips + */ + +package scala.tools.nsc.interpreter.jline + +import java.util.{Iterator => JIterator, ListIterator => JListIterator} + +import _root_.jline.{console => jconsole} +import jconsole.history.History.{Entry => JEntry} +import jconsole.history.{History => JHistory} + +import scala.tools.nsc.interpreter +import scala.tools.nsc.interpreter.session.{History, SimpleHistory} + + +/** A straight scalification of the jline interface which mixes + * in the sparse jline-independent one too. + */ +trait JLineHistory extends JHistory with History { + def size: Int + def isEmpty: Boolean + def index: Int + def clear(): Unit + def get(index: Int): CharSequence + def add(line: CharSequence): Unit + def replace(item: CharSequence): Unit + + def entries(index: Int): JListIterator[JEntry] + def entries(): JListIterator[JEntry] + def iterator: JIterator[JEntry] + + def current(): CharSequence + def previous(): Boolean + def next(): Boolean + def moveToFirst(): Boolean + def moveToLast(): Boolean + def moveTo(index: Int): Boolean + def moveToEnd(): Unit + + override def historicize(text: String): Boolean = { + text.lines foreach add + moveToEnd() + true + } +} + +object JLineHistory { + class JLineFileHistory extends SimpleHistory with FileBackedHistory { + override def add(item: CharSequence): Unit = { + if (!isEmpty && last == item) + interpreter.repldbg("Ignoring duplicate entry '" + item + "'") + else { + super.add(item) + addLineToFile(item) + } + } + override def toString = "History(size = " + size + ", index = " + index + ")" + + import scala.collection.JavaConverters._ + + override def asStrings(from: Int, to: Int): List[String] = + entries(from).asScala.take(to - from).map(_.value.toString).toList + + case class Entry(index: Int, value: CharSequence) extends JEntry { + override def toString = value.toString + } + + private def toEntries(): Seq[JEntry] = buf.zipWithIndex map { case (x, i) => Entry(i, x)} + def entries(idx: Int): JListIterator[JEntry] = toEntries().asJava.listIterator(idx) + def entries(): JListIterator[JEntry] = toEntries().asJava.listIterator() + def iterator: JIterator[JEntry] = toEntries().iterator.asJava + } + + def apply(): History = try new JLineFileHistory catch { case x: Exception => new SimpleHistory() } +} diff --git a/src/repl-jline/scala/tools/nsc/interpreter/jline/JLineReader.scala b/src/repl-jline/scala/tools/nsc/interpreter/jline/JLineReader.scala new file mode 100644 index 0000000000..f0fce13fe8 --- /dev/null +++ b/src/repl-jline/scala/tools/nsc/interpreter/jline/JLineReader.scala @@ -0,0 +1,143 @@ +/** NSC -- new Scala compiler + * + * Copyright 2005-2015 LAMP/EPFL + * @author Stepan Koltsov + * @author Adriaan Moors + */ + +package scala.tools.nsc.interpreter.jline + +import java.util.{Collection => JCollection, List => JList} + +import _root_.jline.{console => jconsole} +import jconsole.completer.{Completer, ArgumentCompleter} +import jconsole.history.{History => JHistory} + + +import scala.tools.nsc.interpreter +import scala.tools.nsc.interpreter.Completion +import scala.tools.nsc.interpreter.Completion.Candidates +import scala.tools.nsc.interpreter.session.History + +/** + * Reads from the console using JLine. + * + * Eagerly instantiates all relevant JLine classes, so that we can detect linkage errors on `new JLineReader` and retry. + */ +class InteractiveReader(completer: () => Completion) extends interpreter.InteractiveReader { + val interactive = true + + val history: History = new JLineHistory.JLineFileHistory() + + private val consoleReader = { + val reader = new JLineConsoleReader() + + reader setPaginationEnabled interpreter.`package`.isPaged + + // ASAP + reader setExpandEvents false + + reader setHistory history.asInstanceOf[JHistory] + + reader + } + + private[this] var _completion: Completion = interpreter.NoCompletion + def completion: Completion = _completion + + override def postInit() = { + _completion = completer() + + consoleReader.initCompletion(completion) + } + + def reset() = consoleReader.getTerminal().reset() + def redrawLine() = consoleReader.redrawLineAndFlush() + def readOneLine(prompt: String) = consoleReader.readLine(prompt) + def readOneKey(prompt: String) = consoleReader.readOneKey(prompt) +} + +// implements a jline interface +private class JLineConsoleReader extends jconsole.ConsoleReader with interpreter.VariColumnTabulator { + val isAcross = interpreter.`package`.isAcross + val marginSize = 3 + + def width = getTerminal.getWidth() + def height = getTerminal.getHeight() + + private def morePrompt = "--More--" + + private def emulateMore(): Int = { + val key = readOneKey(morePrompt) + try key match { + case '\r' | '\n' => 1 + case 'q' => -1 + case _ => height - 1 + } + finally { + eraseLine() + // TODO: still not quite managing to erase --More-- and get + // back to a scala prompt without another keypress. + if (key == 'q') { + putString(getPrompt()) + redrawLine() + flush() + } + } + } + + override def printColumns(items: JCollection[_ <: CharSequence]): Unit = { + import scala.tools.nsc.interpreter.javaCharSeqCollectionToScala + printColumns_(items: List[String]) + } + + private def printColumns_(items: List[String]): Unit = if (items exists (_ != "")) { + val grouped = tabulate(items) + var linesLeft = if (isPaginationEnabled()) height - 1 else Int.MaxValue + grouped foreach { xs => + println(xs.mkString) + linesLeft -= 1 + if (linesLeft <= 0) { + linesLeft = emulateMore() + if (linesLeft < 0) + return + } + } + } + + def readOneKey(prompt: String) = { + this.print(prompt) + this.flush() + this.readCharacter() + } + + def eraseLine() = resetPromptLine("", "", 0) + + def redrawLineAndFlush(): Unit = { + flush(); drawLine(); flush() + } + + // A hook for running code after the repl is done initializing. + def initCompletion(completion: Completion): Unit = { + this setBellEnabled false + + if (completion ne interpreter.NoCompletion) { + val jlineCompleter = new ArgumentCompleter(new JLineDelimiter, + new Completer { + val tc = completion.completer() + def complete(_buf: String, cursor: Int, candidates: JList[CharSequence]): Int = { + val buf = if (_buf == null) "" else _buf + val Candidates(newCursor, newCandidates) = tc.complete(buf, cursor) + newCandidates foreach (candidates add _) + newCursor + } + } + ) + + jlineCompleter setStrict false + + this addCompleter jlineCompleter + this setAutoprintThreshold 400 // max completion candidates without warning + } + } +} diff --git a/src/repl/scala/tools/nsc/interpreter/ILoop.scala b/src/repl/scala/tools/nsc/interpreter/ILoop.scala index 3ce9668b97..a3047ccc8e 100644 --- a/src/repl/scala/tools/nsc/interpreter/ILoop.scala +++ b/src/repl/scala/tools/nsc/interpreter/ILoop.scala @@ -26,6 +26,8 @@ import scala.concurrent.{ ExecutionContext, Await, Future, future } import ExecutionContext.Implicits._ import java.io.{ BufferedReader, FileReader } +import scala.util.{Try, Success, Failure} + /** The Scala interactive shell. It provides a read-eval-print loop * around the Interpreter class. * After instantiation, clients should call the main() method. @@ -860,18 +862,36 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter) * with SimpleReader. */ def chooseReader(settings: Settings): InteractiveReader = { - def mkJLineReader(completer: () => Completion): InteractiveReader = - try new jline.JLineReader(completer) - catch { - case ex@(_: Exception | _: NoClassDefFoundError) => - Console.println(f"Failed to created JLineReader: ${ex}%nFalling back to SimpleReader.") - SimpleReader() - } - if (settings.Xnojline || Properties.isEmacsShell) SimpleReader() else { - if (settings.noCompletion) mkJLineReader(() => NoCompletion) - else mkJLineReader(() => new JLineCompletion(intp)) + type Completer = () => Completion + type ReaderMaker = Completer => InteractiveReader + + def instantiate(className: String): ReaderMaker = completer => { + if (settings.debug) Console.println(s"Trying to instantiate a InteractiveReader from $className") + Class.forName(className).getConstructor(classOf[Completer]). + newInstance(completer). + asInstanceOf[InteractiveReader] + } + + def mkReader(maker: ReaderMaker) = + if (settings.noCompletion) maker(() => NoCompletion) + else maker(() => new JLineCompletion(intp)) // JLineCompletion is a misnomer -- it's not tied to jline + + def internalClass(kind: String) = s"scala.tools.nsc.interpreter.$kind.InteractiveReader" + val readerClasses = sys.props.get("scala.repl.reader").toStream ++ Stream(internalClass("jline"), internalClass("jline_embedded")) + val readers = readerClasses map (cls => Try { mkReader(instantiate(cls)) }) + + val reader = (readers collect { case Success(reader) => reader } headOption) getOrElse SimpleReader() + + if (settings.debug) { + val readerDiags = (readerClasses, readers).zipped map { + case (cls, Failure(e)) => s" - $cls --> " + e.getStackTrace.mkString(e.toString+"\n\t", "\n\t","\n") + case (cls, Success(_)) => s" - $cls OK" + } + Console.println(s"All InteractiveReaders tried: ${readerDiags.mkString("\n","\n","\n")}") + } + reader } } diff --git a/src/repl/scala/tools/nsc/interpreter/jline/FileBackedHistory.scala b/src/repl/scala/tools/nsc/interpreter/jline/FileBackedHistory.scala deleted file mode 100644 index b6c9792ec0..0000000000 --- a/src/repl/scala/tools/nsc/interpreter/jline/FileBackedHistory.scala +++ /dev/null @@ -1,93 +0,0 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2015 LAMP/EPFL - * @author Paul Phillips - */ - -package scala.tools.nsc.interpreter.jline - -import _root_.jline.console.history.PersistentHistory - - -import scala.tools.nsc.interpreter -import scala.tools.nsc.io.{File, Path} - -/** TODO: file locking. - */ -trait FileBackedHistory extends JLineHistory with PersistentHistory { - def maxSize: Int - - protected lazy val historyFile: File = FileBackedHistory.defaultFile - private var isPersistent = true - - locally { - load() - } - - def withoutSaving[T](op: => T): T = { - val saved = isPersistent - isPersistent = false - try op - finally isPersistent = saved - } - - def addLineToFile(item: CharSequence): Unit = { - if (isPersistent) - append(item + "\n") - } - - /** Overwrites the history file with the current memory. */ - protected def sync(): Unit = { - val lines = asStrings map (_ + "\n") - historyFile.writeAll(lines: _*) - } - - /** Append one or more lines to the history file. */ - protected def append(lines: String*): Unit = { - historyFile.appendAll(lines: _*) - } - - def load(): Unit = { - if (!historyFile.canRead) - historyFile.createFile() - - val lines: IndexedSeq[String] = { - try historyFile.lines().toIndexedSeq - catch { - // It seems that control characters in the history file combined - // with the default codec can lead to nio spewing exceptions. Rather - // than abandon hope we'll try to read it as ISO-8859-1 - case _: Exception => - try historyFile.lines("ISO-8859-1").toIndexedSeq - catch { - case _: Exception => Vector() - } - } - } - - interpreter.repldbg("Loading " + lines.size + " into history.") - - // avoid writing to the history file - withoutSaving(lines takeRight maxSize foreach add) - // truncate the history file if it's too big. - if (lines.size > maxSize) { - interpreter.repldbg("File exceeds maximum size: truncating to " + maxSize + " entries.") - sync() - } - moveToEnd() - } - - def flush(): Unit = () - - def purge(): Unit = historyFile.truncate() -} - -object FileBackedHistory { - // val ContinuationChar = '\003' - // val ContinuationNL: String = Array('\003', '\n').mkString - - import scala.tools.nsc.Properties.userHome - - def defaultFileName = ".scala_history" - - def defaultFile: File = File(Path(userHome) / defaultFileName) -} diff --git a/src/repl/scala/tools/nsc/interpreter/jline/JLineDelimiter.scala b/src/repl/scala/tools/nsc/interpreter/jline/JLineDelimiter.scala deleted file mode 100644 index c18a9809a0..0000000000 --- a/src/repl/scala/tools/nsc/interpreter/jline/JLineDelimiter.scala +++ /dev/null @@ -1,25 +0,0 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Paul Phillips - */ - -package scala.tools.nsc.interpreter.jline - -import scala.tools.nsc.interpreter - -import _root_.jline.console.completer.ArgumentCompleter.{ ArgumentDelimiter, ArgumentList } - -// implements a jline interface -class JLineDelimiter extends ArgumentDelimiter { - def toJLine(args: List[String], cursor: Int) = args match { - case Nil => new ArgumentList(new Array[String](0), 0, 0, cursor) - case xs => new ArgumentList(xs.toArray, xs.size - 1, xs.last.length, cursor) - } - - def delimit(buffer: CharSequence, cursor: Int) = { - val p = interpreter.Parsed(buffer.toString, cursor) - toJLine(p.args, cursor) - } - - def isDelimiter(buffer: CharSequence, cursor: Int) = interpreter.Parsed(buffer.toString, cursor).isDelimiter -} diff --git a/src/repl/scala/tools/nsc/interpreter/jline/JLineHistory.scala b/src/repl/scala/tools/nsc/interpreter/jline/JLineHistory.scala deleted file mode 100644 index 1f6a1f7022..0000000000 --- a/src/repl/scala/tools/nsc/interpreter/jline/JLineHistory.scala +++ /dev/null @@ -1,77 +0,0 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Paul Phillips - */ - -package scala.tools.nsc.interpreter.jline - -import java.util.{Iterator => JIterator, ListIterator => JListIterator} - -import _root_.jline.{console => jconsole} -import jconsole.history.History.{Entry => JEntry} -import jconsole.history.{History => JHistory} - -import scala.tools.nsc.interpreter -import scala.tools.nsc.interpreter.session.{History, SimpleHistory} - - -/** A straight scalification of the jline interface which mixes - * in the sparse jline-independent one too. - */ -trait JLineHistory extends JHistory with History { - def size: Int - def isEmpty: Boolean - def index: Int - def clear(): Unit - def get(index: Int): CharSequence - def add(line: CharSequence): Unit - def replace(item: CharSequence): Unit - - def entries(index: Int): JListIterator[JEntry] - def entries(): JListIterator[JEntry] - def iterator: JIterator[JEntry] - - def current(): CharSequence - def previous(): Boolean - def next(): Boolean - def moveToFirst(): Boolean - def moveToLast(): Boolean - def moveTo(index: Int): Boolean - def moveToEnd(): Unit - - override def historicize(text: String): Boolean = { - text.lines foreach add - moveToEnd() - true - } -} - -object JLineHistory { - class JLineFileHistory extends SimpleHistory with FileBackedHistory { - override def add(item: CharSequence): Unit = { - if (!isEmpty && last == item) - interpreter.repldbg("Ignoring duplicate entry '" + item + "'") - else { - super.add(item) - addLineToFile(item) - } - } - override def toString = "History(size = " + size + ", index = " + index + ")" - - import scala.collection.JavaConverters._ - - override def asStrings(from: Int, to: Int): List[String] = - entries(from).asScala.take(to - from).map(_.value.toString).toList - - case class Entry(index: Int, value: CharSequence) extends JEntry { - override def toString = value.toString - } - - private def toEntries(): Seq[JEntry] = buf.zipWithIndex map { case (x, i) => Entry(i, x)} - def entries(idx: Int): JListIterator[JEntry] = toEntries().asJava.listIterator(idx) - def entries(): JListIterator[JEntry] = toEntries().asJava.listIterator() - def iterator: JIterator[JEntry] = toEntries().iterator.asJava - } - - def apply(): History = try new JLineFileHistory catch { case x: Exception => new SimpleHistory() } -} diff --git a/src/repl/scala/tools/nsc/interpreter/jline/JLineReader.scala b/src/repl/scala/tools/nsc/interpreter/jline/JLineReader.scala deleted file mode 100644 index 414868a7e5..0000000000 --- a/src/repl/scala/tools/nsc/interpreter/jline/JLineReader.scala +++ /dev/null @@ -1,143 +0,0 @@ -/** NSC -- new Scala compiler - * - * Copyright 2005-2015 LAMP/EPFL - * @author Stepan Koltsov - * @author Adriaan Moors - */ - -package scala.tools.nsc.interpreter.jline - -import java.util.{Collection => JCollection, List => JList} - -import _root_.jline.{console => jconsole} -import jconsole.completer.{Completer, ArgumentCompleter} -import jconsole.history.{History => JHistory} - - -import scala.tools.nsc.interpreter -import scala.tools.nsc.interpreter.Completion -import scala.tools.nsc.interpreter.Completion.Candidates -import scala.tools.nsc.interpreter.session.History - -/** - * Reads from the console using JLine. - * - * Eagerly instantiates all relevant JLine classes, so that we can detect linkage errors on `new JLineReader` and retry. - */ -class JLineReader(completer: () => Completion) extends interpreter.InteractiveReader { - val interactive = true - - val history: History = new JLineHistory.JLineFileHistory() - - private val consoleReader = { - val reader = new JLineConsoleReader() - - reader setPaginationEnabled interpreter.`package`.isPaged - - // ASAP - reader setExpandEvents false - - reader setHistory history.asInstanceOf[JHistory] - - reader - } - - private[this] var _completion: Completion = interpreter.NoCompletion - def completion: Completion = _completion - - override def postInit() = { - _completion = completer() - - consoleReader.initCompletion(completion) - } - - def reset() = consoleReader.getTerminal().reset() - def redrawLine() = consoleReader.redrawLineAndFlush() - def readOneLine(prompt: String) = consoleReader.readLine(prompt) - def readOneKey(prompt: String) = consoleReader.readOneKey(prompt) -} - -// implements a jline interface -private class JLineConsoleReader extends jconsole.ConsoleReader with interpreter.VariColumnTabulator { - val isAcross = interpreter.`package`.isAcross - val marginSize = 3 - - def width = getTerminal.getWidth() - def height = getTerminal.getHeight() - - private def morePrompt = "--More--" - - private def emulateMore(): Int = { - val key = readOneKey(morePrompt) - try key match { - case '\r' | '\n' => 1 - case 'q' => -1 - case _ => height - 1 - } - finally { - eraseLine() - // TODO: still not quite managing to erase --More-- and get - // back to a scala prompt without another keypress. - if (key == 'q') { - putString(getPrompt()) - redrawLine() - flush() - } - } - } - - override def printColumns(items: JCollection[_ <: CharSequence]): Unit = { - import scala.tools.nsc.interpreter.javaCharSeqCollectionToScala - printColumns_(items: List[String]) - } - - private def printColumns_(items: List[String]): Unit = if (items exists (_ != "")) { - val grouped = tabulate(items) - var linesLeft = if (isPaginationEnabled()) height - 1 else Int.MaxValue - grouped foreach { xs => - println(xs.mkString) - linesLeft -= 1 - if (linesLeft <= 0) { - linesLeft = emulateMore() - if (linesLeft < 0) - return - } - } - } - - def readOneKey(prompt: String) = { - this.print(prompt) - this.flush() - this.readCharacter() - } - - def eraseLine() = resetPromptLine("", "", 0) - - def redrawLineAndFlush(): Unit = { - flush(); drawLine(); flush() - } - - // A hook for running code after the repl is done initializing. - def initCompletion(completion: Completion): Unit = { - this setBellEnabled false - - if (completion ne interpreter.NoCompletion) { - val jlineCompleter = new ArgumentCompleter(new JLineDelimiter, - new Completer { - val tc = completion.completer() - def complete(_buf: String, cursor: Int, candidates: JList[CharSequence]): Int = { - val buf = if (_buf == null) "" else _buf - val Candidates(newCursor, newCandidates) = tc.complete(buf, cursor) - newCandidates foreach (candidates add _) - newCursor - } - } - ) - - jlineCompleter setStrict false - - this addCompleter jlineCompleter - this setAutoprintThreshold 400 // max completion candidates without warning - } - } -} -- cgit v1.2.3 From bb4b79c5d1f6bffc2ad6e8466be2dce6a44c0fcb Mon Sep 17 00:00:00 2001 From: Rex Kerr Date: Fri, 30 Jan 2015 18:05:24 -0800 Subject: SI-8930 - Vector updated, +:, and :+ slow when typed as Seq[A] Vector was intercepting only the IndexedSeq CanBuildFrom to quickly generate new vectors. Now it intercepts immutable.Seq and collection.Seq as well. There are other possibilities (collection.IndexedSeq), but they will probably arise rarely, and to avoid an absurdly long set of checks we would need a marker trait (that is not binary compatible). --- src/library/scala/collection/immutable/Vector.scala | 17 ++++++++++++----- 1 file changed, 12 insertions(+), 5 deletions(-) diff --git a/src/library/scala/collection/immutable/Vector.scala b/src/library/scala/collection/immutable/Vector.scala index 47a623a616..46d5d0c69c 100644 --- a/src/library/scala/collection/immutable/Vector.scala +++ b/src/library/scala/collection/immutable/Vector.scala @@ -132,19 +132,25 @@ override def companion: GenericCompanion[Vector] = Vector throw new IndexOutOfBoundsException(index.toString) } - + // If we have a default builder, there are faster ways to perform some operations + @inline private[this] def isDefaultCBF[A, B, That](bf: CanBuildFrom[Vector[A], B, That]): Boolean = + (bf eq IndexedSeq.ReusableCBF) || (bf eq collection.immutable.Seq.ReusableCBF) || (bf eq collection.Seq.ReusableCBF) + // SeqLike api override def updated[B >: A, That](index: Int, elem: B)(implicit bf: CanBuildFrom[Vector[A], B, That]): That = - if (bf eq IndexedSeq.ReusableCBF) updateAt(index, elem).asInstanceOf[That] // just ignore bf + if (isDefaultCBF[A, B, That](bf)) + updateAt(index, elem).asInstanceOf[That] // ignore bf--it will just give a Vector, and slowly else super.updated(index, elem)(bf) override def +:[B >: A, That](elem: B)(implicit bf: CanBuildFrom[Vector[A], B, That]): That = - if (bf eq IndexedSeq.ReusableCBF) appendFront(elem).asInstanceOf[That] // just ignore bf + if (isDefaultCBF[A, B, That](bf)) + appendFront(elem).asInstanceOf[That] // ignore bf--it will just give a Vector, and slowly else super.+:(elem)(bf) override def :+[B >: A, That](elem: B)(implicit bf: CanBuildFrom[Vector[A], B, That]): That = - if (bf eq IndexedSeq.ReusableCBF) appendBack(elem).asInstanceOf[That] // just ignore bf + if (isDefaultCBF(bf)) + appendBack(elem).asInstanceOf[That] // ignore bf--it will just give a Vector, and slowly else super.:+(elem)(bf) override def take(n: Int): Vector[A] = { @@ -211,7 +217,8 @@ override def companion: GenericCompanion[Vector] = Vector // concat (suboptimal but avoids worst performance gotchas) override def ++[B >: A, That](that: GenTraversableOnce[B])(implicit bf: CanBuildFrom[Vector[A], B, That]): That = { - if (bf eq IndexedSeq.ReusableCBF) { + if (isDefaultCBF(bf)) { + // We are sure we will create a Vector, so let's do it efficiently import Vector.{Log2ConcatFaster, TinyAppendFaster} if (that.isEmpty) this.asInstanceOf[That] else { -- cgit v1.2.3 From 1a9ffaa895e37c141561783c8596810c26d69d6a Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Wed, 17 Jun 2015 13:10:48 -0700 Subject: SI-9206 BooleanProp if set and not untrue Previously, handy `sys.BooleanProp.keyExists` ignored the property value. While trying not to make any real estate puns, this commit will let it go false if a value is supplied that is not true in the usual Java sense. But what is truth? Allows `scala -Dscala.color=off`, for example. --- src/library/scala/sys/BooleanProp.scala | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/src/library/scala/sys/BooleanProp.scala b/src/library/scala/sys/BooleanProp.scala index 74b0a9077b..e5e4668edb 100644 --- a/src/library/scala/sys/BooleanProp.scala +++ b/src/library/scala/sys/BooleanProp.scala @@ -63,12 +63,13 @@ object BooleanProp { def valueIsTrue[T](key: String): BooleanProp = new BooleanPropImpl(key, _.toLowerCase == "true") /** As an alternative, this method creates a BooleanProp which is true - * if the key exists in the map. This way -Dfoo.bar is enough to be - * considered true. + * if the key exists in the map and is not assigned a value other than "true", + * compared case-insensitively, or the empty string. This way -Dmy.property + * results in a true-valued property, but -Dmy.property=false does not. * * @return A BooleanProp with a liberal truth policy */ - def keyExists[T](key: String): BooleanProp = new BooleanPropImpl(key, _ => true) + def keyExists[T](key: String): BooleanProp = new BooleanPropImpl(key, s => s == "" || s.equalsIgnoreCase("true")) /** A constant true or false property which ignores all method calls. */ -- cgit v1.2.3 From aa98d9a8c19ca27d85b62d1eccfc868440dc9ab2 Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Wed, 17 Jun 2015 13:24:53 -0700 Subject: SI-9206 REPL prompt is more easily configured The scala shell prompt can be provided as either a system property or in compiler.properties. The prompt string is taken as a format string with one argument that is the version string. ``` $ scala -Dscala.repl.prompt="%nScala %s> " Welcome to Scala version 2.11.7-20150616-093756-43a56fb5a1 (Java HotSpot(TM) 64-Bit Server VM, Java 1.8.0_45). Type in expressions to have them evaluated. Type :help for more information. Scala 2.11.7-20150616-093756-43a56fb5a1> 42 res0: Int = 42 Scala 2.11.7-20150616-093756-43a56fb5a1> :quit ``` --- src/repl/scala/tools/nsc/interpreter/ILoop.scala | 22 +++++++++++----------- .../scala/tools/nsc/interpreter/ReplProps.scala | 10 +++++++++- 2 files changed, 20 insertions(+), 12 deletions(-) diff --git a/src/repl/scala/tools/nsc/interpreter/ILoop.scala b/src/repl/scala/tools/nsc/interpreter/ILoop.scala index 4221126caa..11c843248a 100644 --- a/src/repl/scala/tools/nsc/interpreter/ILoop.scala +++ b/src/repl/scala/tools/nsc/interpreter/ILoop.scala @@ -197,10 +197,8 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter) echo("%d %s".format(index + offset, line)) } - private val currentPrompt = Properties.shellPromptString - /** Prompt to print when awaiting input */ - def prompt = currentPrompt + def prompt = replProps.prompt import LoopCommand.{ cmd, nullary } @@ -410,14 +408,8 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter) } private def readOneLine() = { - import scala.io.AnsiColor.{ MAGENTA, RESET } out.flush() - in readLine ( - if (replProps.colorOk) - MAGENTA + prompt + RESET - else - prompt - ) + in readLine prompt } /** The main read-eval-print loop for the repl. It calls @@ -776,6 +768,14 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter) private object paste extends Pasted { val ContinueString = " | " val PromptString = "scala> " + val testPrompt = PromptString.trim + val testOurPrompt = prompt.trim + val testBoth = testPrompt != testOurPrompt + + def isPrompt(line: String) = { + val text = line.trim + text == testOurPrompt || (testBoth && text == testPrompt) + } def interpret(line: String): Unit = { echo(line.trim) @@ -785,7 +785,7 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter) def transcript(start: String) = { echo("\n// Detected repl transcript paste: ctrl-D to finish.\n") - apply(Iterator(start) ++ readWhile(_.trim != PromptString.trim)) + apply(Iterator(start) ++ readWhile(!isPrompt(_))) } } import paste.{ ContinueString, PromptString } diff --git a/src/repl/scala/tools/nsc/interpreter/ReplProps.scala b/src/repl/scala/tools/nsc/interpreter/ReplProps.scala index 8c4faf7278..19f66e98a2 100644 --- a/src/repl/scala/tools/nsc/interpreter/ReplProps.scala +++ b/src/repl/scala/tools/nsc/interpreter/ReplProps.scala @@ -11,11 +11,19 @@ import Prop._ class ReplProps { private def bool(name: String) = BooleanProp.keyExists(name) - private def int(name: String) = IntProp(name) + private def int(name: String) = Prop[Int](name) // This property is used in TypeDebugging. Let's recycle it. val colorOk = bool("scala.color") + // Handy system prop for shell prompt, or else pick it up from compiler.properties + val prompt = { + import scala.io.AnsiColor.{ MAGENTA, RESET } + val p = Prop[String]("scala.repl.prompt").option getOrElse Properties.shellPromptString + val q = String.format(p, Properties.versionNumberString) + if (colorOk) s"$MAGENTA$q$RESET" else q + } + val info = bool("scala.repl.info") val debug = bool("scala.repl.debug") val trace = bool("scala.repl.trace") -- cgit v1.2.3 From c3aca109e95e2259d9909f8457a1422c5c995940 Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Wed, 17 Jun 2015 14:47:13 -0700 Subject: SI-9206 Fix REPL code indentation To make code in error messages line up with the original line of code, templated code is indented by the width of the prompt. Use the raw prompt (without ANSI escapes or newlines) to determine the indentation. Also, indent only once per line. --- src/compiler/scala/tools/nsc/Properties.scala | 2 +- .../scala/tools/nsc/interpreter/Formatting.scala | 27 +++--- src/repl/scala/tools/nsc/interpreter/ILoop.scala | 9 +- src/repl/scala/tools/nsc/interpreter/IMain.scala | 19 +++-- .../scala/tools/nsc/interpreter/ReplProps.scala | 6 +- test/files/jvm/interpreter.check | 18 ++-- test/files/run/constrained-types.check | 8 +- test/files/run/kind-repl-command.check | 6 +- test/files/run/reify-repl-fail-gracefully.check | 6 +- test/files/run/reify_newimpl_22.check | 6 +- test/files/run/reify_newimpl_23.check | 2 +- test/files/run/reify_newimpl_25.check | 6 +- test/files/run/reify_newimpl_26.check | 2 +- test/files/run/repl-bare-expr.check | 36 ++++---- test/files/run/repl-colon-type.check | 4 +- test/files/run/repl-parens.check | 36 ++++---- test/files/run/repl-paste-2.check | 2 +- test/files/run/repl-reset.check | 24 +++--- test/files/run/repl-trim-stack-trace.scala | 6 +- test/files/run/t4542.check | 2 +- test/files/run/t4594-repl-settings.scala | 2 +- test/files/run/t5655.check | 12 +-- test/files/run/t7319.check | 18 ++-- test/files/run/t7747-repl.check | 96 +++++++++++----------- test/files/run/t9170.scala | 24 +++--- test/files/run/t9206.scala | 26 ++++++ test/files/run/xMigration.check | 18 ++-- 27 files changed, 223 insertions(+), 200 deletions(-) create mode 100644 test/files/run/t9206.scala diff --git a/src/compiler/scala/tools/nsc/Properties.scala b/src/compiler/scala/tools/nsc/Properties.scala index 9f160e2485..ca7d8776d4 100644 --- a/src/compiler/scala/tools/nsc/Properties.scala +++ b/src/compiler/scala/tools/nsc/Properties.scala @@ -13,7 +13,7 @@ object Properties extends scala.util.PropertiesTrait { // settings based on jar properties, falling back to System prefixed by "scala." def residentPromptString = scalaPropOrElse("resident.prompt", "\nnsc> ") - def shellPromptString = scalaPropOrElse("shell.prompt", "\nscala> ") + def shellPromptString = scalaPropOrElse("shell.prompt", "%nscala> ") // message to display at EOF (which by default ends with // a newline so as not to break the user's terminal) def shellInterruptedString = scalaPropOrElse("shell.interrupted", f":quit$lineSeparator") diff --git a/src/repl/scala/tools/nsc/interpreter/Formatting.scala b/src/repl/scala/tools/nsc/interpreter/Formatting.scala index 43e653edfd..844997429c 100644 --- a/src/repl/scala/tools/nsc/interpreter/Formatting.scala +++ b/src/repl/scala/tools/nsc/interpreter/Formatting.scala @@ -8,28 +8,25 @@ package interpreter import util.stringFromWriter -trait Formatting { - def prompt: String +class Formatting(indent: Int) { - def spaces(code: String): String = { + private val indentation = " " * indent + + private def indenting(code: String): Boolean = { /** Heuristic to avoid indenting and thereby corrupting """-strings and XML literals. */ val tokens = List("\"\"\"", "") val noIndent = (code contains "\n") && (tokens exists code.contains) - if (noIndent) "" - else prompt drop 1 map (_ => ' ') + !noIndent } /** Indent some code by the width of the scala> prompt. * This way, compiler error messages read better. */ - def indentCode(code: String) = { - val indent = spaces(code) - stringFromWriter(str => - for (line <- code.lines) { - str print indent - str print (line + "\n") - str.flush() - } - ) - } + def indentCode(code: String) = stringFromWriter(str => + for (line <- code.lines) { + if (indenting(code)) str print indentation + str println line + str.flush() + } + ) } diff --git a/src/repl/scala/tools/nsc/interpreter/ILoop.scala b/src/repl/scala/tools/nsc/interpreter/ILoop.scala index 11c843248a..89061730f6 100644 --- a/src/repl/scala/tools/nsc/interpreter/ILoop.scala +++ b/src/repl/scala/tools/nsc/interpreter/ILoop.scala @@ -109,11 +109,10 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter) } class ILoopInterpreter extends IMain(settings, out) { - outer => - - override lazy val formatting = new Formatting { - def prompt = ILoop.this.prompt - } + // the expanded prompt but without color escapes and without leading newline, for purposes of indenting + override lazy val formatting: Formatting = new Formatting( + (replProps.promptString format Properties.versionNumberString).lines.toList.last.length + ) override protected def parentClassLoader = settings.explicitParentLoader.getOrElse( classOf[ILoop].getClassLoader ) } diff --git a/src/repl/scala/tools/nsc/interpreter/IMain.scala b/src/repl/scala/tools/nsc/interpreter/IMain.scala index e355d9f864..2550a5dc57 100644 --- a/src/repl/scala/tools/nsc/interpreter/IMain.scala +++ b/src/repl/scala/tools/nsc/interpreter/IMain.scala @@ -112,12 +112,13 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set def this(factory: ScriptEngineFactory) = this(factory, new Settings()) def this() = this(new Settings()) - lazy val formatting: Formatting = new Formatting { - val prompt = Properties.shellPromptString - } + // the expanded prompt but without color escapes and without leading newline, for purposes of indenting + lazy val formatting: Formatting = new Formatting( + (replProps.promptString format Properties.versionNumberString).lines.toList.last.length + ) lazy val reporter: ReplReporter = new ReplReporter(this) - import formatting._ + import formatting.indentCode import reporter.{ printMessage, printUntruncatedMessage } // This exists mostly because using the reporter too early leads to deadlock. @@ -468,7 +469,7 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set } private def requestFromLine(line: String, synthetic: Boolean): Either[IR.Result, Request] = { - val content = indentCode(line) + val content = line //indentCode(line) val trees = parse(content) match { case parse.Incomplete => return Left(IR.Incomplete) case parse.Error => return Left(IR.Error) @@ -909,10 +910,10 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set else List("def %s = %s".format("$line", tquoted(originalLine)), "def %s = Nil".format("$trees")) } def preamble = s""" - |$preambleHeader - |%s%s%s - """.stripMargin.format(lineRep.readName, envLines.map(" " + _ + ";\n").mkString, - importsPreamble, indentCode(toCompute)) + |${preambleHeader format lineRep.readName} + |${envLines mkString (" ", ";\n ", ";\n")} + |$importsPreamble + |${indentCode(toCompute)}""".stripMargin val generate = (m: MemberHandler) => m extraCodeToEvaluate Request.this diff --git a/src/repl/scala/tools/nsc/interpreter/ReplProps.scala b/src/repl/scala/tools/nsc/interpreter/ReplProps.scala index 19f66e98a2..945129a868 100644 --- a/src/repl/scala/tools/nsc/interpreter/ReplProps.scala +++ b/src/repl/scala/tools/nsc/interpreter/ReplProps.scala @@ -17,11 +17,11 @@ class ReplProps { val colorOk = bool("scala.color") // Handy system prop for shell prompt, or else pick it up from compiler.properties + val promptString = Prop[String]("scala.repl.prompt").option getOrElse Properties.shellPromptString val prompt = { import scala.io.AnsiColor.{ MAGENTA, RESET } - val p = Prop[String]("scala.repl.prompt").option getOrElse Properties.shellPromptString - val q = String.format(p, Properties.versionNumberString) - if (colorOk) s"$MAGENTA$q$RESET" else q + val p = promptString format Properties.versionNumberString + if (colorOk) s"$MAGENTA$p$RESET" else p } val info = bool("scala.repl.info") diff --git a/test/files/jvm/interpreter.check b/test/files/jvm/interpreter.check index d03edb638c..9e875235c7 100644 --- a/test/files/jvm/interpreter.check +++ b/test/files/jvm/interpreter.check @@ -32,7 +32,7 @@ scala> val four: anotherint = 4 four: anotherint = 4 scala> val bogus: anotherint = "hello" -:8: error: type mismatch; +:11: error: type mismatch; found : String("hello") required: anotherint (which expands to) Int @@ -280,13 +280,13 @@ scala> // both of the following should abort immediately: scala> def x => y => z :1: error: '=' expected but '=>' found. - def x => y => z - ^ +def x => y => z + ^ scala> [1,2,3] :1: error: illegal start of definition - [1,2,3] - ^ +[1,2,3] +^ scala> @@ -355,7 +355,7 @@ defined class Term scala> def f(e: Exp) = e match { // non-exhaustive warning here case _:Fact => 3 } -:18: warning: match may not be exhaustive. +:21: warning: match may not be exhaustive. It would fail on the following inputs: Exp(), Term() def f(e: Exp) = e match { // non-exhaustive warning here ^ @@ -365,6 +365,6 @@ scala> :quit plusOne: (x: Int)Int res0: Int = 6 res0: String = after reset -:8: error: not found: value plusOne - plusOne(5) // should be undefined now - ^ +:11: error: not found: value plusOne + plusOne(5) // should be undefined now + ^ diff --git a/test/files/run/constrained-types.check b/test/files/run/constrained-types.check index 89a08d5ccb..6dbf8088c9 100644 --- a/test/files/run/constrained-types.check +++ b/test/files/run/constrained-types.check @@ -135,16 +135,16 @@ y: String = hello scala> scala> val x = 3 : Int @Annot(e+f+g+h) // should have a graceful error message -:8: error: not found: value e +:11: error: not found: value e val x = 3 : Int @Annot(e+f+g+h) // should have a graceful error message ^ -:8: error: not found: value f +:11: error: not found: value f val x = 3 : Int @Annot(e+f+g+h) // should have a graceful error message ^ -:8: error: not found: value g +:11: error: not found: value g val x = 3 : Int @Annot(e+f+g+h) // should have a graceful error message ^ -:8: error: not found: value h +:11: error: not found: value h val x = 3 : Int @Annot(e+f+g+h) // should have a graceful error message ^ diff --git a/test/files/run/kind-repl-command.check b/test/files/run/kind-repl-command.check index 586b2710e1..1853213555 100644 --- a/test/files/run/kind-repl-command.check +++ b/test/files/run/kind-repl-command.check @@ -21,8 +21,8 @@ scala> :k new { def empty = false } AnyRef{def empty: Boolean}'s kind is A scala> :k Nonexisting -:8: error: not found: value Nonexisting - Nonexisting - ^ +:11: error: not found: value Nonexisting + Nonexisting + ^ scala> :quit diff --git a/test/files/run/reify-repl-fail-gracefully.check b/test/files/run/reify-repl-fail-gracefully.check index c9e69744d6..eac4d25869 100644 --- a/test/files/run/reify-repl-fail-gracefully.check +++ b/test/files/run/reify-repl-fail-gracefully.check @@ -10,8 +10,8 @@ import scala.reflect.runtime.universe._ scala> scala> reify -:12: error: too few argument lists for macro invocation - reify - ^ +:15: error: too few argument lists for macro invocation + reify + ^ scala> :quit diff --git a/test/files/run/reify_newimpl_22.check b/test/files/run/reify_newimpl_22.check index 952f384a1c..24334df92c 100644 --- a/test/files/run/reify_newimpl_22.check +++ b/test/files/run/reify_newimpl_22.check @@ -17,9 +17,9 @@ scala> { } println(code.eval) } -:15: free term: Ident(TermName("x")) defined by res0 in :14:21 - val code = reify { - ^ +:18: free term: Ident(TermName("x")) defined by res0 in :17:14 + val code = reify { + ^ 2 scala> :quit diff --git a/test/files/run/reify_newimpl_23.check b/test/files/run/reify_newimpl_23.check index b7e9bfdfbc..f8379958db 100644 --- a/test/files/run/reify_newimpl_23.check +++ b/test/files/run/reify_newimpl_23.check @@ -16,7 +16,7 @@ scala> def foo[T]{ } println(code.eval) } -:13: free type: Ident(TypeName("T")) defined by foo in :12:16 +:16: free type: Ident(TypeName("T")) defined by foo in :15:16 val code = reify { ^ foo: [T]=> Unit diff --git a/test/files/run/reify_newimpl_25.check b/test/files/run/reify_newimpl_25.check index 4f36ba10ee..f9a5d7b578 100644 --- a/test/files/run/reify_newimpl_25.check +++ b/test/files/run/reify_newimpl_25.check @@ -7,9 +7,9 @@ scala> { val tt = implicitly[TypeTag[x.type]] println(tt) } -:11: free term: Ident(TermName("x")) defined by res0 in :10:21 - val tt = implicitly[TypeTag[x.type]] - ^ +:14: free term: Ident(TermName("x")) defined by res0 in :13:14 + val tt = implicitly[TypeTag[x.type]] + ^ TypeTag[x.type] scala> :quit diff --git a/test/files/run/reify_newimpl_26.check b/test/files/run/reify_newimpl_26.check index 681b862795..bd77d3d707 100644 --- a/test/files/run/reify_newimpl_26.check +++ b/test/files/run/reify_newimpl_26.check @@ -6,7 +6,7 @@ scala> def foo[T]{ val tt = implicitly[WeakTypeTag[List[T]]] println(tt) } -:9: free type: Ident(TypeName("T")) defined by foo in :7:16 +:12: free type: Ident(TypeName("T")) defined by foo in :10:16 val tt = implicitly[WeakTypeTag[List[T]]] ^ foo: [T]=> Unit diff --git a/test/files/run/repl-bare-expr.check b/test/files/run/repl-bare-expr.check index 07cf23412f..38ad7e818d 100644 --- a/test/files/run/repl-bare-expr.check +++ b/test/files/run/repl-bare-expr.check @@ -2,33 +2,33 @@ Type in expressions to have them evaluated. Type :help for more information. scala> 2 ; 3 -:7: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses - 2 ;; - ^ +:10: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses + 2 ;; + ^ res0: Int = 3 scala> { 2 ; 3 } -:8: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses - { 2 ; 3 } - ^ +:11: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses + { 2 ; 3 } + ^ res1: Int = 3 scala> 5 ; 10 ; case object Cow ; 20 ; class Moo { override def toString = "Moooooo" } ; 30 ; def bippy = { 1 + 2 + 3 } ; bippy+88+11 -:7: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses - 5 ; 10 ; case object Cow ; 20 ; class Moo { override def toString = "Moooooo" } ; 30 ; def bippy = { - ^ -:7: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses - 5 ; 10 ; case object Cow ; 20 ; class Moo { override def toString = "Moooooo" } ; 30 ; def bippy = { - ^ -:7: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses - 5 ; 10 ; case object Cow ; 20 ; class Moo { override def toString = "Moooooo" } ; 30 ; def bippy = { - ^ -:7: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses - 5 ; 10 ; case object Cow ; 20 ; class Moo { override def toString = "Moooooo" } ; 30 ; def bippy = { - ^ +:10: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses + 5 ; 10 ; case object Cow ; 20 ; class Moo { override def toString = "Moooooo" } ; 30 ; def bippy = { + ^ +:10: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses + 5 ; 10 ; case object Cow ; 20 ; class Moo { override def toString = "Moooooo" } ; 30 ; def bippy = { + ^ +:10: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses + 5 ; 10 ; case object Cow ; 20 ; class Moo { override def toString = "Moooooo" } ; 30 ; def bippy = { + ^ +:10: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses + 5 ; 10 ; case object Cow ; 20 ; class Moo { override def toString = "Moooooo" } ; 30 ; def bippy = { + ^ defined object Cow defined class Moo bippy: Int diff --git a/test/files/run/repl-colon-type.check b/test/files/run/repl-colon-type.check index 9898027c1d..fa33af3beb 100644 --- a/test/files/run/repl-colon-type.check +++ b/test/files/run/repl-colon-type.check @@ -3,8 +3,8 @@ Type :help for more information. scala> :type List[1, 2, 3] :1: error: identifier expected but integer literal found. - List[1, 2, 3] - ^ +List[1, 2, 3] + ^ scala> :type List(1, 2, 3) List[Int] diff --git a/test/files/run/repl-parens.check b/test/files/run/repl-parens.check index 74d15ff93c..756a063b68 100644 --- a/test/files/run/repl-parens.check +++ b/test/files/run/repl-parens.check @@ -20,12 +20,12 @@ scala> ( (2 + 2 ) ) res5: Int = 4 scala> 5 ; ( (2 + 2 ) ) ; ((5)) -:7: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses - 5 ; ( (2 + 2 ) ) ;; - ^ -:7: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses - 5 ; ( (2 + 2 ) ) ;; - ^ +:10: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses + 5 ; ( (2 + 2 ) ) ;; + ^ +:10: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses + 5 ; ( (2 + 2 ) ) ;; + ^ res6: Int = 5 scala> (((2 + 2)), ((2 + 2))) @@ -40,18 +40,18 @@ res9: String = 4423 scala> scala> 55 ; ((2 + 2)) ; (1, 2, 3) -:7: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses - 55 ; ((2 + 2)) ;; - ^ -:7: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses - 55 ; ((2 + 2)) ;; - ^ +:10: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses + 55 ; ((2 + 2)) ;; + ^ +:10: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses + 55 ; ((2 + 2)) ;; + ^ res10: (Int, Int, Int) = (1,2,3) scala> 55 ; (x: Int) => x + 1 ; () => ((5)) -:7: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses - 55 ; (x: Int) => x + 1 ;; - ^ +:10: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses + 55 ; (x: Int) => x + 1 ;; + ^ res11: () => Int = scala> @@ -60,9 +60,9 @@ scala> () => 5 res12: () => Int = scala> 55 ; () => 5 -:7: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses - 55 ;; - ^ +:10: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses + 55 ;; + ^ res13: () => Int = scala> () => { class X ; new X } diff --git a/test/files/run/repl-paste-2.check b/test/files/run/repl-paste-2.check index 6ea8e2f419..bc80615107 100644 --- a/test/files/run/repl-paste-2.check +++ b/test/files/run/repl-paste-2.check @@ -44,7 +44,7 @@ scala> res5 + res6 res1: Int = 690 scala> val x = dingus -:7: error: not found: value dingus +:10: error: not found: value dingus val x = dingus ^ diff --git a/test/files/run/repl-reset.check b/test/files/run/repl-reset.check index cd7893bbc3..a2d88fd445 100644 --- a/test/files/run/repl-reset.check +++ b/test/files/run/repl-reset.check @@ -30,23 +30,23 @@ Forgetting all expression results and named terms: $intp, BippyBungus, x1, x2, x Forgetting defined types: BippyBungus scala> x1 + x2 + x3 -:8: error: not found: value x1 - x1 + x2 + x3 - ^ -:8: error: not found: value x2 - x1 + x2 + x3 - ^ -:8: error: not found: value x3 - x1 + x2 + x3 - ^ +:11: error: not found: value x1 + x1 + x2 + x3 + ^ +:11: error: not found: value x2 + x1 + x2 + x3 + ^ +:11: error: not found: value x3 + x1 + x2 + x3 + ^ scala> val x1 = 4 x1: Int = 4 scala> new BippyBungus -:8: error: not found: type BippyBungus - new BippyBungus - ^ +:11: error: not found: type BippyBungus + new BippyBungus + ^ scala> class BippyBungus() { def f = 5 } defined class BippyBungus diff --git a/test/files/run/repl-trim-stack-trace.scala b/test/files/run/repl-trim-stack-trace.scala index a53ce3b3e4..b8c1668691 100644 --- a/test/files/run/repl-trim-stack-trace.scala +++ b/test/files/run/repl-trim-stack-trace.scala @@ -13,7 +13,7 @@ f: Nothing scala> f java.lang.Exception: Uh-oh - at .f(:7) + at .f(:10) ... 69 elided scala> def f = throw new Exception("") @@ -21,7 +21,7 @@ f: Nothing scala> f java.lang.Exception: - at .f(:7) + at .f(:10) ... 69 elided scala> def f = throw new Exception @@ -29,7 +29,7 @@ f: Nothing scala> f java.lang.Exception - at .f(:7) + at .f(:10) ... 69 elided scala> :quit""" diff --git a/test/files/run/t4542.check b/test/files/run/t4542.check index f7716dc2f0..f7aad3deb2 100644 --- a/test/files/run/t4542.check +++ b/test/files/run/t4542.check @@ -7,7 +7,7 @@ scala> @deprecated("foooo", "ReplTest version 1.0-FINAL") class Foo() { defined class Foo scala> val f = new Foo -:8: warning: class Foo is deprecated: foooo +:11: warning: class Foo is deprecated: foooo val f = new Foo ^ f: Foo = Bippy diff --git a/test/files/run/t4594-repl-settings.scala b/test/files/run/t4594-repl-settings.scala index db5dc19866..1b883983cf 100644 --- a/test/files/run/t4594-repl-settings.scala +++ b/test/files/run/t4594-repl-settings.scala @@ -17,7 +17,7 @@ object Test extends SessionTest { |scala> :settings -deprecation | |scala> def b = depp - |:8: warning: method depp is deprecated: Please don't do that. + |:11: warning: method depp is deprecated: Please don't do that. | def b = depp | ^ |b: String diff --git a/test/files/run/t5655.check b/test/files/run/t5655.check index 4bbc54b641..e8375c3e90 100644 --- a/test/files/run/t5655.check +++ b/test/files/run/t5655.check @@ -8,19 +8,19 @@ scala> import x._ import x._ scala> x -:12: error: reference to x is ambiguous; +:15: error: reference to x is ambiguous; it is imported twice in the same scope by import x._ and import x - x - ^ + x + ^ scala> x -:12: error: reference to x is ambiguous; +:15: error: reference to x is ambiguous; it is imported twice in the same scope by import x._ and import x - x - ^ + x + ^ scala> :quit diff --git a/test/files/run/t7319.check b/test/files/run/t7319.check index e35cfc90c0..885136b432 100644 --- a/test/files/run/t7319.check +++ b/test/files/run/t7319.check @@ -17,25 +17,25 @@ warning: there was one feature warning; re-run with -feature for details convert: [F[X <: F[X]]](builder: F[_ <: F[_]])Int scala> convert(Some[Int](0)) -:12: error: no type parameters for method convert: (builder: F[_ <: F[_]])Int exist so that it can be applied to arguments (Some[Int]) +:15: error: no type parameters for method convert: (builder: F[_ <: F[_]])Int exist so that it can be applied to arguments (Some[Int]) --- because --- argument expression's type is not compatible with formal parameter type; found : Some[Int] required: ?F[_$1] forSome { type _$1 <: ?F[_$2] forSome { type _$2 } } - convert(Some[Int](0)) - ^ -:12: error: type mismatch; + convert(Some[Int](0)) + ^ +:15: error: type mismatch; found : Some[Int] required: F[_ <: F[_]] - convert(Some[Int](0)) - ^ + convert(Some[Int](0)) + ^ scala> Range(1,2).toArray: Seq[_] -:11: error: polymorphic expression cannot be instantiated to expected type; +:14: error: polymorphic expression cannot be instantiated to expected type; found : [B >: Int]Array[B] required: Seq[_] - Range(1,2).toArray: Seq[_] - ^ + Range(1,2).toArray: Seq[_] + ^ scala> 0 res2: Int = 0 diff --git a/test/files/run/t7747-repl.check b/test/files/run/t7747-repl.check index 5f436ba6b1..98e3d3b821 100644 --- a/test/files/run/t7747-repl.check +++ b/test/files/run/t7747-repl.check @@ -17,33 +17,33 @@ scala> val z = x * y z: Int = 156 scala> 2 ; 3 -:7: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses - 2 ;; - ^ +:10: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses + 2 ;; + ^ res0: Int = 3 scala> { 2 ; 3 } -:8: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses - { 2 ; 3 } - ^ +:11: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses + { 2 ; 3 } + ^ res1: Int = 3 scala> 5 ; 10 ; case object Cow ; 20 ; class Moo { override def toString = "Moooooo" } ; 30 ; def bippy = { 1 + 2 + 3 } ; bippy+88+11 -:7: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses - 5 ; 10 ; case object Cow ; 20 ; class Moo { override def toString = "Moooooo" } ; 30 ; def bippy = { - ^ -:7: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses - 5 ; 10 ; case object Cow ; 20 ; class Moo { override def toString = "Moooooo" } ; 30 ; def bippy = { - ^ -:7: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses - 5 ; 10 ; case object Cow ; 20 ; class Moo { override def toString = "Moooooo" } ; 30 ; def bippy = { - ^ -:7: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses - 5 ; 10 ; case object Cow ; 20 ; class Moo { override def toString = "Moooooo" } ; 30 ; def bippy = { - ^ +:10: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses + 5 ; 10 ; case object Cow ; 20 ; class Moo { override def toString = "Moooooo" } ; 30 ; def bippy = { + ^ +:10: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses + 5 ; 10 ; case object Cow ; 20 ; class Moo { override def toString = "Moooooo" } ; 30 ; def bippy = { + ^ +:10: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses + 5 ; 10 ; case object Cow ; 20 ; class Moo { override def toString = "Moooooo" } ; 30 ; def bippy = { + ^ +:10: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses + 5 ; 10 ; case object Cow ; 20 ; class Moo { override def toString = "Moooooo" } ; 30 ; def bippy = { + ^ defined object Cow defined class Moo bippy: Int @@ -83,12 +83,12 @@ scala> ( (2 + 2 ) ) res10: Int = 4 scala> 5 ; ( (2 + 2 ) ) ; ((5)) -:7: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses - 5 ; ( (2 + 2 ) ) ;; - ^ -:7: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses - 5 ; ( (2 + 2 ) ) ;; - ^ +:10: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses + 5 ; ( (2 + 2 ) ) ;; + ^ +:10: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses + 5 ; ( (2 + 2 ) ) ;; + ^ res11: Int = 5 scala> (((2 + 2)), ((2 + 2))) @@ -103,18 +103,18 @@ res14: String = 4423 scala> scala> 55 ; ((2 + 2)) ; (1, 2, 3) -:7: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses - 55 ; ((2 + 2)) ;; - ^ -:7: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses - 55 ; ((2 + 2)) ;; - ^ +:10: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses + 55 ; ((2 + 2)) ;; + ^ +:10: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses + 55 ; ((2 + 2)) ;; + ^ res15: (Int, Int, Int) = (1,2,3) scala> 55 ; (x: Int) => x + 1 ; () => ((5)) -:9: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses - 55 ; (x: Int) => x + 1 ;; - ^ +:12: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses + 55 ; (x: Int) => x + 1 ;; + ^ res16: () => Int = scala> @@ -123,9 +123,9 @@ scala> () => 5 res17: () => Int = scala> 55 ; () => 5 -:7: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses - 55 ;; - ^ +:10: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses + 55 ;; + ^ res18: () => Int = scala> () => { class X ; new X } @@ -211,23 +211,23 @@ Forgetting all expression results and named terms: $intp, BippyBungus, Bovine, C Forgetting defined types: BippyBungus, Moo, Ruminant scala> x1 + x2 + x3 -:8: error: not found: value x1 - x1 + x2 + x3 - ^ -:8: error: not found: value x2 - x1 + x2 + x3 - ^ -:8: error: not found: value x3 - x1 + x2 + x3 - ^ +:11: error: not found: value x1 + x1 + x2 + x3 + ^ +:11: error: not found: value x2 + x1 + x2 + x3 + ^ +:11: error: not found: value x3 + x1 + x2 + x3 + ^ scala> val x1 = 4 x1: Int = 4 scala> new BippyBungus -:8: error: not found: type BippyBungus - new BippyBungus - ^ +:11: error: not found: type BippyBungus + new BippyBungus + ^ scala> class BippyBungus() { def f = 5 } defined class BippyBungus diff --git a/test/files/run/t9170.scala b/test/files/run/t9170.scala index 25a0e84581..67a58d6803 100644 --- a/test/files/run/t9170.scala +++ b/test/files/run/t9170.scala @@ -10,17 +10,17 @@ object Test extends SessionTest { Type :help for more information. scala> object Y { def f[A](a: => A) = 1 ; def f[A](a: => Either[Exception, A]) = 2 } -:7: error: double definition: -def f[A](a: => A): Int at line 7 and -def f[A](a: => Either[Exception,A]): Int at line 7 +:10: error: double definition: +def f[A](a: => A): Int at line 10 and +def f[A](a: => Either[Exception,A]): Int at line 10 have same type after erasure: (a: Function0)Int object Y { def f[A](a: => A) = 1 ; def f[A](a: => Either[Exception, A]) = 2 } ^ scala> object Y { def f[A](a: => A) = 1 ; def f[A](a: => Either[Exception, A]) = 2 } -:7: error: double definition: -def f[A](a: => A): Int at line 7 and -def f[A](a: => Either[Exception,A]): Int at line 7 +:10: error: double definition: +def f[A](a: => A): Int at line 10 and +def f[A](a: => Either[Exception,A]): Int at line 10 have same type after erasure: (a: Function0)Int object Y { def f[A](a: => A) = 1 ; def f[A](a: => Either[Exception, A]) = 2 } ^ @@ -29,9 +29,9 @@ scala> object Y { | def f[A](a: => A) = 1 | def f[A](a: => Either[Exception, A]) = 2 | } -:9: error: double definition: -def f[A](a: => A): Int at line 8 and -def f[A](a: => Either[Exception,A]): Int at line 9 +:12: error: double definition: +def f[A](a: => A): Int at line 11 and +def f[A](a: => Either[Exception,A]): Int at line 12 have same type after erasure: (a: Function0)Int def f[A](a: => Either[Exception, A]) = 2 ^ @@ -46,9 +46,9 @@ object Y { // Exiting paste mode, now interpreting. -:9: error: double definition: -def f[A](a: => A): Int at line 8 and -def f[A](a: => Either[Exception,A]): Int at line 9 +:12: error: double definition: +def f[A](a: => A): Int at line 11 and +def f[A](a: => Either[Exception,A]): Int at line 12 have same type after erasure: (a: Function0)Int def f[A](a: => Either[Exception, A]) = 2 ^ diff --git a/test/files/run/t9206.scala b/test/files/run/t9206.scala new file mode 100644 index 0000000000..c0484d9217 --- /dev/null +++ b/test/files/run/t9206.scala @@ -0,0 +1,26 @@ + +import scala.tools.partest.SessionTest + +object Test extends SessionTest { +//Welcome to Scala version 2.11.6 (Java HotSpot(TM) 64-Bit Server VM, Java 1.8.0_40). + def session = + s"""|Type in expressions to have them evaluated. + |Type :help for more information. + | + |scala> val i: Int = "foo" + |:10: error: type mismatch; + | found : String("foo") + | required: Int + | val i: Int = "foo" + | ^ + | + |scala> { val j = 42 ; val i: Int = "foo" + j } + |:11: error: type mismatch; + | found : String + | required: Int + | { val j = 42 ; val i: Int = "foo" + j } + | ^ + | + |scala> :quit""" + +} diff --git a/test/files/run/xMigration.check b/test/files/run/xMigration.check index 378f7bb6c3..304132a848 100644 --- a/test/files/run/xMigration.check +++ b/test/files/run/xMigration.check @@ -12,10 +12,10 @@ res1: Iterable[String] = MapLike(eis) scala> :setting -Xmigration:any scala> Map(1 -> "eis").values // warn -:8: warning: method values in trait MapLike has changed semantics in version 2.8.0: +:11: warning: method values in trait MapLike has changed semantics in version 2.8.0: `values` returns `Iterable[B]` rather than `Iterator[B]`. - Map(1 -> "eis").values // warn - ^ + Map(1 -> "eis").values // warn + ^ res2: Iterable[String] = MapLike(eis) scala> :setting -Xmigration:2.8 @@ -26,10 +26,10 @@ res3: Iterable[String] = MapLike(eis) scala> :setting -Xmigration:2.7 scala> Map(1 -> "eis").values // warn -:8: warning: method values in trait MapLike has changed semantics in version 2.8.0: +:11: warning: method values in trait MapLike has changed semantics in version 2.8.0: `values` returns `Iterable[B]` rather than `Iterator[B]`. - Map(1 -> "eis").values // warn - ^ + Map(1 -> "eis").values // warn + ^ res4: Iterable[String] = MapLike(eis) scala> :setting -Xmigration:2.11 @@ -40,10 +40,10 @@ res5: Iterable[String] = MapLike(eis) scala> :setting -Xmigration // same as :any scala> Map(1 -> "eis").values // warn -:8: warning: method values in trait MapLike has changed semantics in version 2.8.0: +:11: warning: method values in trait MapLike has changed semantics in version 2.8.0: `values` returns `Iterable[B]` rather than `Iterator[B]`. - Map(1 -> "eis").values // warn - ^ + Map(1 -> "eis").values // warn + ^ res6: Iterable[String] = MapLike(eis) scala> :quit -- cgit v1.2.3 From a9053f429983cc322a2fd2eca8fd0188036c7378 Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Wed, 17 Jun 2015 23:25:39 -0700 Subject: SI-9206 Verbose REPL prompt for info mode Only for exactly `-Dscala.repl.info`, include the complete version number string in the REPL prompt. One could imagine this is the mode for posting snippets to stackoverflow. --- src/repl/scala/tools/nsc/interpreter/ReplProps.scala | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/src/repl/scala/tools/nsc/interpreter/ReplProps.scala b/src/repl/scala/tools/nsc/interpreter/ReplProps.scala index 945129a868..df65e9974d 100644 --- a/src/repl/scala/tools/nsc/interpreter/ReplProps.scala +++ b/src/repl/scala/tools/nsc/interpreter/ReplProps.scala @@ -6,6 +6,7 @@ package scala.tools.nsc package interpreter +import Properties.shellPromptString import scala.sys._ import Prop._ @@ -16,19 +17,19 @@ class ReplProps { // This property is used in TypeDebugging. Let's recycle it. val colorOk = bool("scala.color") + val info = bool("scala.repl.info") + val debug = bool("scala.repl.debug") + val trace = bool("scala.repl.trace") + val power = bool("scala.repl.power") + // Handy system prop for shell prompt, or else pick it up from compiler.properties - val promptString = Prop[String]("scala.repl.prompt").option getOrElse Properties.shellPromptString + val promptString = Prop[String]("scala.repl.prompt").option getOrElse (if (info) "%nscala %s> " else shellPromptString) val prompt = { import scala.io.AnsiColor.{ MAGENTA, RESET } val p = promptString format Properties.versionNumberString if (colorOk) s"$MAGENTA$p$RESET" else p } - val info = bool("scala.repl.info") - val debug = bool("scala.repl.debug") - val trace = bool("scala.repl.trace") - val power = bool("scala.repl.power") - /** CSV of paged,across to enable pagination or `-x` style * columns, "across" instead of down the column. Since * pagination turns off columnar output, these flags are -- cgit v1.2.3 From 63812c18b23de60039fd3267e4806449ea679972 Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Fri, 19 Jun 2015 20:28:40 +0200 Subject: SI-9359 Fix InnerClass entry flags for nested Java enums The access flags in InnerClass entries for nested Java enums were basically completely off. A first step is to use the recently introduced backend method `javaClassfileFlags`, which is now moved to BCodeAsmCommon. See its doc for an explanation. Then the flags of the enum class symbol were off. An enum is - final if none of its values has a class body - abstract if it has an abstract method (https://docs.oracle.com/javase/specs/jls/se7/html/jls-8.html#jls-8.9) When using the ClassfileParser: - ENUM was never added. I guess that's just an oversight. - ABSTRACT (together with SEALED) was always added. This is to enable exhaustiveness checking, see 3f7b8b5. This is a hack and we have to go through the class members in the backend to find out if the enum actually has the `ACC_ABSTRACT` flag or not. When using the JavaParser: - FINAL was never added. - ABSTRACT was never added. This commit fixes all of the above and tests cases (Java enum read from the classfile and from source). --- .../tools/nsc/backend/jvm/BCodeAsmCommon.scala | 52 ++++++++++++++++++++++ .../scala/tools/nsc/backend/jvm/BTypes.scala | 13 +----- .../tools/nsc/backend/jvm/BTypesFromSymbols.scala | 29 ------------ .../scala/tools/nsc/backend/jvm/GenASM.scala | 8 +--- .../scala/tools/nsc/javac/JavaParsers.scala | 24 ++++++++-- .../nsc/symtab/classfile/ClassfileParser.scala | 2 + .../reflect/internal/ClassfileConstants.scala | 13 +++--- test/files/run/t9359.check | 18 ++++++++ test/files/run/t9359/A_1.java | 19 ++++++++ test/files/run/t9359/B_2.java | 19 ++++++++ test/files/run/t9359/Test_2.scala | 28 ++++++++++++ 11 files changed, 169 insertions(+), 56 deletions(-) create mode 100644 test/files/run/t9359.check create mode 100644 test/files/run/t9359/A_1.java create mode 100644 test/files/run/t9359/B_2.java create mode 100644 test/files/run/t9359/Test_2.scala diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeAsmCommon.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeAsmCommon.scala index eadc404bee..dec5adc9aa 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BCodeAsmCommon.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeAsmCommon.scala @@ -9,6 +9,7 @@ package backend.jvm import scala.tools.nsc.Global import scala.tools.nsc.backend.jvm.BTypes.{InternalName, MethodInlineInfo, InlineInfo} import BackendReporting.ClassSymbolInfoFailureSI9111 +import scala.tools.asm /** * This trait contains code shared between GenBCode and GenASM that depends on types defined in @@ -228,6 +229,44 @@ final class BCodeAsmCommon[G <: Global](val global: G) { sym.isPackageClass || sym.isModuleClass && isOriginallyStaticOwner(sym.originalOwner) } + /** + * Reconstruct the classfile flags from a Java defined class symbol. + * + * The implementation of this method is slightly different that `javaFlags` in BTypesFromSymbols. + * The javaFlags method is primarily used to map Scala symbol flags to sensible classfile flags + * that are used in the generated classfiles. For example, all classes emitted by the Scala + * compiler have ACC_PUBLIC. + * + * When building a [[ClassBType]] from a Java class symbol, the flags in the type's `info` have + * to correspond exactly to the flags in the classfile. For example, if the class is package + * protected (i.e., it doesn't have the ACC_PUBLIC flag), this needs to be reflected in the + * ClassBType. For example, the inliner needs the correct flags for access checks. + * + * Class flags are listed here: + * https://docs.oracle.com/javase/specs/jvms/se7/html/jvms-4.html#jvms-4.1-200-E.1 + */ + def javaClassfileFlags(classSym: Symbol): Int = { + assert(classSym.isJava, s"Expected Java class symbol, got ${classSym.fullName}") + import asm.Opcodes._ + def enumFlags = ACC_ENUM | { + // Java enums have the `ACC_ABSTRACT` flag if they have a deferred method. + // We cannot trust `hasAbstractFlag`: the ClassfileParser adds `ABSTRACT` and `SEALED` to all + // Java enums for exhaustiveness checking. + val hasAbstractMethod = classSym.info.decls.exists(s => s.isMethod && s.isDeferred) + if (hasAbstractMethod) ACC_ABSTRACT else 0 + } + GenBCode.mkFlags( + if (classSym.isPublic) ACC_PUBLIC else 0, + if (classSym.isFinal) ACC_FINAL else 0, + // see the link above. javac does the same: ACC_SUPER for all classes, but not interfaces. + if (classSym.isInterface) ACC_INTERFACE else ACC_SUPER, + // for Java enums, we cannot trust `hasAbstractFlag` (see comment in enumFlags) + if (!classSym.hasEnumFlag && classSym.hasAbstractFlag) ACC_ABSTRACT else 0, + if (classSym.isArtifact) ACC_SYNTHETIC else 0, + if (classSym.hasEnumFlag) enumFlags else 0 + ) + } + /** * The member classes of a class symbol. Note that the result of this method depends on the * current phase, for example, after lambdalift, all local classes become member of the enclosing @@ -399,3 +438,16 @@ final class BCodeAsmCommon[G <: Global](val global: G) { InlineInfo(traitSelfType, isEffectivelyFinal, methodInlineInfos, warning) } } + +object BCodeAsmCommon { + /** + * Valid flags for InnerClass attribute entry. + * See http://docs.oracle.com/javase/specs/jvms/se8/html/jvms-4.html#jvms-4.7.6 + */ + val INNER_CLASSES_FLAGS = { + asm.Opcodes.ACC_PUBLIC | asm.Opcodes.ACC_PRIVATE | asm.Opcodes.ACC_PROTECTED | + asm.Opcodes.ACC_STATIC | asm.Opcodes.ACC_FINAL | asm.Opcodes.ACC_INTERFACE | + asm.Opcodes.ACC_ABSTRACT | asm.Opcodes.ACC_SYNTHETIC | asm.Opcodes.ACC_ANNOTATION | + asm.Opcodes.ACC_ENUM + } +} diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BTypes.scala b/src/compiler/scala/tools/nsc/backend/jvm/BTypes.scala index e61190bf3a..176292669c 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BTypes.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BTypes.scala @@ -898,7 +898,7 @@ abstract class BTypes { // the static flag in the InnerClass table has a special meaning, see InnerClass comment i.flags & ~Opcodes.ACC_STATIC, if (isStaticNestedClass) Opcodes.ACC_STATIC else 0 - ) & ClassBType.INNER_CLASSES_FLAGS + ) & BCodeAsmCommon.INNER_CLASSES_FLAGS ) }) @@ -987,17 +987,6 @@ abstract class BTypes { } object ClassBType { - /** - * Valid flags for InnerClass attribute entry. - * See http://docs.oracle.com/javase/specs/jvms/se8/html/jvms-4.html#jvms-4.7.6 - */ - private val INNER_CLASSES_FLAGS = { - asm.Opcodes.ACC_PUBLIC | asm.Opcodes.ACC_PRIVATE | asm.Opcodes.ACC_PROTECTED | - asm.Opcodes.ACC_STATIC | asm.Opcodes.ACC_FINAL | asm.Opcodes.ACC_INTERFACE | - asm.Opcodes.ACC_ABSTRACT | asm.Opcodes.ACC_SYNTHETIC | asm.Opcodes.ACC_ANNOTATION | - asm.Opcodes.ACC_ENUM - } - // Primitive classes have no super class. A ClassBType for those is only created when // they are actually being compiled (e.g., when compiling scala/Boolean.scala). private val hasNoSuper = Set( diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala b/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala index fffb9286b8..93734d3935 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala @@ -213,35 +213,6 @@ class BTypesFromSymbols[G <: Global](val global: G) extends BTypes { assert(!primitiveTypeMap.contains(sym) || isCompilingPrimitive, sym) } - /** - * Reconstruct the classfile flags from a Java defined class symbol. - * - * The implementation of this method is slightly different that [[javaFlags]]. The javaFlags - * method is primarily used to map Scala symbol flags to sensible classfile flags that are used - * in the generated classfiles. For example, all classes emitted by the Scala compiler have - * ACC_PUBLIC. - * - * When building a [[ClassBType]] from a Java class symbol, the flags in the type's `info` have - * to correspond exactly to the flags in the classfile. For example, if the class is package - * protected (i.e., it doesn't have the ACC_PUBLIC flag), this needs to be reflected in the - * ClassBType. For example, the inliner needs the correct flags for access checks. - * - * Class flags are listed here: - * https://docs.oracle.com/javase/specs/jvms/se7/html/jvms-4.html#jvms-4.1-200-E.1 - */ - private def javaClassfileFlags(classSym: Symbol): Int = { - assert(classSym.isJava, s"Expected Java class symbol, got ${classSym.fullName}") - import asm.Opcodes._ - GenBCode.mkFlags( - if (classSym.isPublic) ACC_PUBLIC else 0, - if (classSym.isFinal) ACC_FINAL else 0, - if (classSym.isInterface) ACC_INTERFACE else ACC_SUPER, // see the link above. javac does the same: ACC_SUPER for all classes, but not interfaces. - if (classSym.hasAbstractFlag) ACC_ABSTRACT else 0, - if (classSym.isArtifact) ACC_SYNTHETIC else 0, - if (classSym.hasEnumFlag) ACC_ENUM else 0 - ) - } - private def setClassInfo(classSym: Symbol, classBType: ClassBType): ClassBType = { val superClassSym = if (classSym.isImplClass) ObjectClass else classSym.superClass assert( diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala index 76af40b330..71686fd9d7 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala @@ -479,10 +479,6 @@ abstract class GenASM extends SubComponent with BytecodeWriters { self => val CLASS_CONSTRUCTOR_NAME = "" val INSTANCE_CONSTRUCTOR_NAME = "" - val INNER_CLASSES_FLAGS = - (asm.Opcodes.ACC_PUBLIC | asm.Opcodes.ACC_PRIVATE | asm.Opcodes.ACC_PROTECTED | - asm.Opcodes.ACC_STATIC | asm.Opcodes.ACC_INTERFACE | asm.Opcodes.ACC_ABSTRACT | asm.Opcodes.ACC_FINAL) - // ----------------------------------------------------------------------------------------- // factory methods // ----------------------------------------------------------------------------------------- @@ -756,9 +752,9 @@ abstract class GenASM extends SubComponent with BytecodeWriters { self => val flagsWithFinal: Int = mkFlags( // See comment in BTypes, when is a class marked static in the InnerClass table. if (isOriginallyStaticOwner(innerSym.originalOwner)) asm.Opcodes.ACC_STATIC else 0, - javaFlags(innerSym), + (if (innerSym.isJava) javaClassfileFlags(innerSym) else javaFlags(innerSym)) & ~asm.Opcodes.ACC_STATIC, if(isDeprecated(innerSym)) asm.Opcodes.ACC_DEPRECATED else 0 // ASM pseudo-access flag - ) & (INNER_CLASSES_FLAGS | asm.Opcodes.ACC_DEPRECATED) + ) & (BCodeAsmCommon.INNER_CLASSES_FLAGS | asm.Opcodes.ACC_DEPRECATED) val flags = if (innerSym.isModuleClass) flagsWithFinal & ~asm.Opcodes.ACC_FINAL else flagsWithFinal // For SI-5676, object overriding. val jname = javaName(innerSym) // never null val oname = outerName(innerSym) // null when method-enclosed diff --git a/src/compiler/scala/tools/nsc/javac/JavaParsers.scala b/src/compiler/scala/tools/nsc/javac/JavaParsers.scala index d34c14be0f..9708cba281 100644 --- a/src/compiler/scala/tools/nsc/javac/JavaParsers.scala +++ b/src/compiler/scala/tools/nsc/javac/JavaParsers.scala @@ -761,9 +761,13 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { val interfaces = interfacesOpt() accept(LBRACE) val buf = new ListBuffer[Tree] + var enumIsFinal = true def parseEnumConsts() { if (in.token != RBRACE && in.token != SEMI && in.token != EOF) { - buf += enumConst(enumType) + val (const, hasClassBody) = enumConst(enumType) + buf += const + // if any of the enum constants has a class body, the enum class is not final (JLS 8.9.) + enumIsFinal &&= !hasClassBody if (in.token == COMMA) { in.nextToken() parseEnumConsts() @@ -793,15 +797,25 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { accept(RBRACE) val superclazz = AppliedTypeTree(javaLangDot(tpnme.Enum), List(enumType)) + val finalFlag = if (enumIsFinal) Flags.FINAL else 0l + val abstractFlag = { + // javac adds `ACC_ABSTRACT` to enum classes with deferred members + val hasAbstractMember = body exists { + case d: DefDef => d.mods.isDeferred + case _ => false + } + if (hasAbstractMember) Flags.ABSTRACT else 0l + } addCompanionObject(consts ::: statics ::: predefs, atPos(pos) { - ClassDef(mods | Flags.ENUM, name, List(), + ClassDef(mods | Flags.ENUM | finalFlag | abstractFlag, name, List(), makeTemplate(superclazz :: interfaces, body)) }) } - def enumConst(enumType: Tree) = { + def enumConst(enumType: Tree): (ValDef, Boolean) = { annotations() - atPos(in.currentPos) { + var hasClassBody = false + val res = atPos(in.currentPos) { val name = ident() if (in.token == LPAREN) { // skip arguments @@ -809,12 +823,14 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { accept(RPAREN) } if (in.token == LBRACE) { + hasClassBody = true // skip classbody skipAhead() accept(RBRACE) } ValDef(Modifiers(Flags.ENUM | Flags.STABLE | Flags.JAVA | Flags.STATIC), name.toTermName, enumType, blankExpr) } + (res, hasClassBody) } def typeDecl(mods: Modifiers): List[Tree] = in.token match { diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala index 518a402230..660028eab8 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala @@ -539,6 +539,8 @@ abstract class ClassfileParser { devWarning(s"no linked class for java enum $sym in ${sym.owner}. A referencing class file might be missing an InnerClasses entry.") case linked => if (!linked.isSealed) + // Marking the enum class SEALED | ABSTRACT enables exhaustiveness checking. + // This is a bit of a hack and requires excluding the ABSTRACT flag in the backend, see method javaClassfileFlags. linked setFlag (SEALED | ABSTRACT) linked addChild sym } diff --git a/src/reflect/scala/reflect/internal/ClassfileConstants.scala b/src/reflect/scala/reflect/internal/ClassfileConstants.scala index e0a6757d34..53241fb15b 100644 --- a/src/reflect/scala/reflect/internal/ClassfileConstants.scala +++ b/src/reflect/scala/reflect/internal/ClassfileConstants.scala @@ -344,10 +344,12 @@ object ClassfileConstants { case JAVA_ACC_STATIC => STATIC case JAVA_ACC_ABSTRACT => if (isAnnotation) 0L else if (isClass) ABSTRACT else DEFERRED case JAVA_ACC_INTERFACE => if (isAnnotation) 0L else TRAIT | INTERFACE | ABSTRACT + case JAVA_ACC_ENUM => ENUM case _ => 0L } - private def translateFlags(jflags: Int, baseFlags: Long, isAnnotation: Boolean, isClass: Boolean): Long = { - def translateFlag0(jflags: Int): Long = translateFlag(jflags, isAnnotation, isClass) + private def translateFlags(jflags: Int, baseFlags: Long, isClass: Boolean): Long = { + val isAnnot = isAnnotation(jflags) + def translateFlag0(jflags: Int): Long = translateFlag(jflags, isAnnot, isClass) var res: Long = JAVA | baseFlags /* fast, elegant, maintainable, pick any two... */ res |= translateFlag0(jflags & JAVA_ACC_PRIVATE) @@ -357,17 +359,18 @@ object ClassfileConstants { res |= translateFlag0(jflags & JAVA_ACC_STATIC) res |= translateFlag0(jflags & JAVA_ACC_ABSTRACT) res |= translateFlag0(jflags & JAVA_ACC_INTERFACE) + res |= translateFlag0(jflags & JAVA_ACC_ENUM) res } def classFlags(jflags: Int): Long = { - translateFlags(jflags, 0, isAnnotation(jflags), isClass = true) + translateFlags(jflags, 0, isClass = true) } def fieldFlags(jflags: Int): Long = { - translateFlags(jflags, if ((jflags & JAVA_ACC_FINAL) == 0) MUTABLE else 0 , isAnnotation(jflags), isClass = false) + translateFlags(jflags, if ((jflags & JAVA_ACC_FINAL) == 0) MUTABLE else 0 , isClass = false) } def methodFlags(jflags: Int): Long = { - translateFlags(jflags, if ((jflags & JAVA_ACC_BRIDGE) != 0) BRIDGE | ARTIFACT else 0, isAnnotation(jflags), isClass = false) + translateFlags(jflags, if ((jflags & JAVA_ACC_BRIDGE) != 0) BRIDGE | ARTIFACT else 0, isClass = false) } } object FlagTranslation extends FlagTranslation { } diff --git a/test/files/run/t9359.check b/test/files/run/t9359.check new file mode 100644 index 0000000000..8dcfe4f60a --- /dev/null +++ b/test/files/run/t9359.check @@ -0,0 +1,18 @@ + // access flags 0x4009 + public static enum INNERCLASS A_1$A1N A_1 A1N + + // access flags 0x4409 + public static abstract enum INNERCLASS A_1$A1N_ABSTRACT A_1 A1N_ABSTRACT + + // access flags 0x4019 + public final static enum INNERCLASS A_1$A1N_FINAL A_1 A1N_FINAL + + // access flags 0x4009 + public static enum INNERCLASS B_2$A1N B_2 A1N + + // access flags 0x4409 + public static abstract enum INNERCLASS B_2$A1N_ABSTRACT B_2 A1N_ABSTRACT + + // access flags 0x4019 + public final static enum INNERCLASS B_2$A1N_FINAL B_2 A1N_FINAL + diff --git a/test/files/run/t9359/A_1.java b/test/files/run/t9359/A_1.java new file mode 100644 index 0000000000..3ac82ed55f --- /dev/null +++ b/test/files/run/t9359/A_1.java @@ -0,0 +1,19 @@ +public class A_1 { + // nested final + public static enum A1N_FINAL { + A1N_FINAL_VAL + } + + // nested, non-final + public enum A1N { + A1N_VAL { } // value has a body, so a class extending A1N is generated + } + + // nested, non-final, abstract + public enum A1N_ABSTRACT { + A1N_ABSTRACT_VAL { + void foo() { return; } + }; + abstract void foo(); // abstract member makes the enum class abstract + } +} diff --git a/test/files/run/t9359/B_2.java b/test/files/run/t9359/B_2.java new file mode 100644 index 0000000000..d824facda9 --- /dev/null +++ b/test/files/run/t9359/B_2.java @@ -0,0 +1,19 @@ +public class B_2 { + // nested final + public enum A1N_FINAL { + A1N_FINAL_VAL + } + + // nested, non-final + public enum A1N { + A1N_VAL { } // value has a body, so a class extending A1N is generated + } + + // nested, non-final, abstract + public enum A1N_ABSTRACT { + A1N_ABSTRACT_VAL { + void foo() { return; } + }; + abstract void foo(); // abstract member makes the enum class abstract + } +} diff --git a/test/files/run/t9359/Test_2.scala b/test/files/run/t9359/Test_2.scala new file mode 100644 index 0000000000..869c51b619 --- /dev/null +++ b/test/files/run/t9359/Test_2.scala @@ -0,0 +1,28 @@ +import scala.tools.partest.BytecodeTest +import scala.tools.asm +import asm.tree.{ClassNode, InnerClassNode} +import asm.{Opcodes => Flags} +import scala.collection.JavaConverters._ + +class C { + def f1: A_1.A1N_FINAL = A_1.A1N_FINAL.A1N_FINAL_VAL + def f2: A_1.A1N = A_1.A1N.A1N_VAL + def f3: A_1.A1N_ABSTRACT = A_1.A1N_ABSTRACT.A1N_ABSTRACT_VAL + + def f4: B_2.A1N_FINAL = B_2.A1N_FINAL.A1N_FINAL_VAL + def f5: B_2.A1N = B_2.A1N.A1N_VAL + def f6: B_2.A1N_ABSTRACT = B_2.A1N_ABSTRACT.A1N_ABSTRACT_VAL +} + +object Test extends BytecodeTest { + def tost(n: InnerClassNode) = { + val t = new asm.util.Textifier + t.visitInnerClass(n.name, n.outerName, n.innerName, n.access) + t.getText.get(0); + } + def show(): Unit = { + for (n <- loadClassNode("C").innerClasses.asScala.toList.sortBy(_.name)) { + println(tost(n)) + } + } +} -- cgit v1.2.3 From 5c7eefcabf92010dc730ed1a57160cbe6a3c0eaf Mon Sep 17 00:00:00 2001 From: Adriaan Moors Date: Fri, 19 Jun 2015 15:26:33 -0700 Subject: Pin to non-crashy redcarpet --- Gemfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Gemfile b/Gemfile index 6921f792c3..b74696e550 100644 --- a/Gemfile +++ b/Gemfile @@ -4,4 +4,4 @@ source "https://rubygems.org" gem "jekyll", "2.5.3" gem "rouge" # gem 's3_website' -# gem 'redcarpet' +gem "redcarpet", "3.2.3" -- cgit v1.2.3 From 958e6259baf0ea303f6cee468be35b18107ffd41 Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Sat, 20 Jun 2015 08:44:57 +0200 Subject: Fix spurious test failure under -Ybackend:GenBCode --- test/files/run/t7582.check | 5 ----- test/files/run/t7582/InlineHolder.scala | 3 +++ test/files/run/t7582b.check | 5 ----- test/files/run/t7582b/InlineHolder.scala | 3 +++ 4 files changed, 6 insertions(+), 10 deletions(-) diff --git a/test/files/run/t7582.check b/test/files/run/t7582.check index 2a11210000..0cfbf08886 100644 --- a/test/files/run/t7582.check +++ b/test/files/run/t7582.check @@ -1,6 +1 @@ -#partest !-Ybackend:GenBCode -warning: there was one inliner warning; re-run with -Yinline-warnings for details -#partest -Ybackend:GenBCode -warning: there was one inliner warning; re-run with -Yopt-warnings for details -#partest 2 diff --git a/test/files/run/t7582/InlineHolder.scala b/test/files/run/t7582/InlineHolder.scala index a18b9effaa..3cbf233ce1 100644 --- a/test/files/run/t7582/InlineHolder.scala +++ b/test/files/run/t7582/InlineHolder.scala @@ -1,3 +1,6 @@ +/* + * filter: inliner warning; re-run with + */ package p1 { object InlineHolder { @inline def inlinable = p1.PackageProtectedJava.protectedMethod() + 1 diff --git a/test/files/run/t7582b.check b/test/files/run/t7582b.check index 2a11210000..0cfbf08886 100644 --- a/test/files/run/t7582b.check +++ b/test/files/run/t7582b.check @@ -1,6 +1 @@ -#partest !-Ybackend:GenBCode -warning: there was one inliner warning; re-run with -Yinline-warnings for details -#partest -Ybackend:GenBCode -warning: there was one inliner warning; re-run with -Yopt-warnings for details -#partest 2 diff --git a/test/files/run/t7582b/InlineHolder.scala b/test/files/run/t7582b/InlineHolder.scala index a18b9effaa..3cbf233ce1 100644 --- a/test/files/run/t7582b/InlineHolder.scala +++ b/test/files/run/t7582b/InlineHolder.scala @@ -1,3 +1,6 @@ +/* + * filter: inliner warning; re-run with + */ package p1 { object InlineHolder { @inline def inlinable = p1.PackageProtectedJava.protectedMethod() + 1 -- cgit v1.2.3 From 3bfafbcf5903fc5c43190f1738c50bbbe0ca7d2b Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Sat, 20 Jun 2015 13:52:34 -0700 Subject: SI-9206 Accept paste with custom prompt But sans test. --- .../scala/tools/partest/ReplTest.scala | 12 +++++++----- src/repl/scala/tools/nsc/interpreter/ILoop.scala | 20 ++++++++------------ src/repl/scala/tools/nsc/interpreter/Pasted.scala | 20 +++++++++++++------- test/files/run/t9206.scala | 13 +++++++++++++ 4 files changed, 41 insertions(+), 24 deletions(-) diff --git a/src/partest-extras/scala/tools/partest/ReplTest.scala b/src/partest-extras/scala/tools/partest/ReplTest.scala index 5b65d6ab9b..1fde2370d3 100644 --- a/src/partest-extras/scala/tools/partest/ReplTest.scala +++ b/src/partest-extras/scala/tools/partest/ReplTest.scala @@ -75,18 +75,20 @@ abstract class SessionTest extends ReplTest { * Retain user input: prompt lines and continuations, without the prefix; or pasted text plus ctl-D. */ import SessionTest._ - override final def code = input findAllMatchIn (expected mkString ("", "\n", "\n")) map { - case input(null, null, prompted) => + lazy val pasted = input(prompt) + override final def code = pasted findAllMatchIn (expected mkString ("", "\n", "\n")) map { + case pasted(null, null, prompted) => def continued(m: Match): Option[String] = m match { case margin(text) => Some(text) case _ => None } margin.replaceSomeIn(prompted, continued) - case input(cmd, pasted, null) => + case pasted(cmd, pasted, null) => cmd + pasted + "\u0004" } mkString - final def prompt = "scala> " + // Just the last line of the interactive prompt + def prompt = "scala> " /** Default test is to compare expected and actual output and emit the diff on a failed comparison. */ override def show() = { @@ -98,7 +100,7 @@ abstract class SessionTest extends ReplTest { } object SessionTest { // \R for line break is Java 8, \v for vertical space might suffice - val input = """(?m)^scala> (:pa.*\u000A)// Entering paste mode.*\u000A\u000A((?:.*\u000A)*)\u000A// Exiting paste mode.*\u000A|^scala> (.*\u000A(?:\s*\| .*\u000A)*)""".r + def input(prompt: String) = s"""(?m)^$prompt(:pa.*\u000A)// Entering paste mode.*\u000A\u000A((?:.*\u000A)*)\u000A// Exiting paste mode.*\u000A|^scala> (.*\u000A(?:\\s*\\| .*\u000A)*)""".r val margin = """(?m)^\s*\| (.*)$""".r } diff --git a/src/repl/scala/tools/nsc/interpreter/ILoop.scala b/src/repl/scala/tools/nsc/interpreter/ILoop.scala index 89061730f6..992cafb9c6 100644 --- a/src/repl/scala/tools/nsc/interpreter/ILoop.scala +++ b/src/repl/scala/tools/nsc/interpreter/ILoop.scala @@ -765,16 +765,13 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter) } private object paste extends Pasted { + import scala.util.matching.Regex.quote val ContinueString = " | " - val PromptString = "scala> " - val testPrompt = PromptString.trim - val testOurPrompt = prompt.trim - val testBoth = testPrompt != testOurPrompt + val PromptString = prompt.lines.toList.last + val anyPrompt = s"""\\s*(?:${quote(PromptString.trim)}|${quote(AltPromptString.trim)})\\s*""".r - def isPrompt(line: String) = { - val text = line.trim - text == testOurPrompt || (testBoth && text == testPrompt) - } + def isPrompted(line: String) = matchesPrompt(line) + def isPromptOnly(line: String) = line match { case anyPrompt() => true ; case _ => false } def interpret(line: String): Unit = { echo(line.trim) @@ -784,10 +781,9 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter) def transcript(start: String) = { echo("\n// Detected repl transcript paste: ctrl-D to finish.\n") - apply(Iterator(start) ++ readWhile(!isPrompt(_))) + apply(Iterator(start) ++ readWhile(!isPromptOnly(_))) } } - import paste.{ ContinueString, PromptString } /** Interpret expressions starting with the first line. * Read lines until a complete compilation unit is available @@ -809,7 +805,7 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter) echo("You typed two blank lines. Starting a new command.") None } - else in.readLine(ContinueString) match { + else in.readLine(paste.ContinueString) match { case null => // we know compilation is going to fail since we're at EOF and the // parser thinks the input is still incomplete, but since this is @@ -833,7 +829,7 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter) * and avoid the interpreter, as it's likely not valid scala code. */ if (code == "") None - else if (!paste.running && code.trim.startsWith(PromptString)) { + else if (!paste.running && paste.isPrompted(code)) { paste.transcript(code) None } diff --git a/src/repl/scala/tools/nsc/interpreter/Pasted.scala b/src/repl/scala/tools/nsc/interpreter/Pasted.scala index f5db3d9e3a..1600f32946 100644 --- a/src/repl/scala/tools/nsc/interpreter/Pasted.scala +++ b/src/repl/scala/tools/nsc/interpreter/Pasted.scala @@ -16,17 +16,21 @@ package interpreter * the same result. */ abstract class Pasted { + def interpret(line: String): Unit def ContinueString: String def PromptString: String - def interpret(line: String): Unit + def AltPromptString: String = "scala> " + + private val testBoth = PromptString != AltPromptString + private val spacey = " \t".toSet - def matchesPrompt(line: String) = matchesString(line, PromptString) + def matchesPrompt(line: String) = matchesString(line, PromptString) || testBoth && matchesString(line, AltPromptString) def matchesContinue(line: String) = matchesString(line, ContinueString) def running = isRunning private def matchesString(line: String, target: String): Boolean = ( (line startsWith target) || - (line.nonEmpty && " \t".toSet(line.head) && matchesString(line.tail, target)) + (line.nonEmpty && spacey(line.head) && matchesString(line.tail, target)) ) private def stripString(line: String, target: String) = line indexOf target match { case -1 => line @@ -39,7 +43,9 @@ abstract class Pasted { private class PasteAnalyzer(val lines: List[String]) { val referenced = lines flatMap (resReference findAllIn _.trim.stripPrefix("res")) toSet - val cmds = lines reduceLeft append split PromptString filterNot (_.trim == "") toList + val ActualPromptString = lines find matchesPrompt map (s => + if (matchesString(s, PromptString)) PromptString else AltPromptString) getOrElse PromptString + val cmds = lines reduceLeft append split ActualPromptString filterNot (_.trim == "") toList /** If it's a prompt or continuation line, strip the formatting bits and * assemble the code. Otherwise ship it off to be analyzed for res references @@ -67,10 +73,10 @@ abstract class Pasted { */ def fixResRefs(code: String, line: String) = line match { case resCreation(resName) if referenced(resName) => - code.lastIndexOf(PromptString) match { + code.lastIndexOf(ActualPromptString) match { case -1 => code case idx => - val (str1, str2) = code splitAt (idx + PromptString.length) + val (str1, str2) = code splitAt (idx + ActualPromptString.length) str2 match { case resAssign(`resName`) => code case _ => "%sval %s = { %s }".format(str1, resName, str2) @@ -82,7 +88,7 @@ abstract class Pasted { def run() { println("// Replaying %d commands from transcript.\n" format cmds.size) cmds foreach { cmd => - print(PromptString) + print(ActualPromptString) interpret(cmd) } } diff --git a/test/files/run/t9206.scala b/test/files/run/t9206.scala index c0484d9217..2942d0f73e 100644 --- a/test/files/run/t9206.scala +++ b/test/files/run/t9206.scala @@ -2,6 +2,7 @@ import scala.tools.partest.SessionTest object Test extends SessionTest { + //override def prompt = "XXX> " //Welcome to Scala version 2.11.6 (Java HotSpot(TM) 64-Bit Server VM, Java 1.8.0_40). def session = s"""|Type in expressions to have them evaluated. @@ -22,5 +23,17 @@ object Test extends SessionTest { | ^ | |scala> :quit""" + /* + |XXX> XXX> def f = 42 + | + |// Detected repl transcript paste: ctrl-D to finish. + | + |// Replaying 1 commands from transcript. + | + |XXX> def f = 42 + |f: Int + | + |XXX> :quit""" + */ } -- cgit v1.2.3 From 7968421bd6515eeb88fb420bae3ff3bc23e5876d Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Sun, 21 Jun 2015 11:13:30 -0700 Subject: SI-9206 Local refactor to save eyesight We talk about bit rot but not about how dust accumulates on code that hasn't been swept since the last time the furniture was moved around. --- src/repl/scala/tools/nsc/interpreter/ILoop.scala | 79 +++++++++++------------ src/repl/scala/tools/nsc/interpreter/Pasted.scala | 2 +- 2 files changed, 39 insertions(+), 42 deletions(-) diff --git a/src/repl/scala/tools/nsc/interpreter/ILoop.scala b/src/repl/scala/tools/nsc/interpreter/ILoop.scala index 992cafb9c6..bf4d107215 100644 --- a/src/repl/scala/tools/nsc/interpreter/ILoop.scala +++ b/src/repl/scala/tools/nsc/interpreter/ILoop.scala @@ -783,8 +783,16 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter) echo("\n// Detected repl transcript paste: ctrl-D to finish.\n") apply(Iterator(start) ++ readWhile(!isPromptOnly(_))) } + + def unapply(line: String): Boolean = isPrompted(line) + } + + private object invocation { + def unapply(line: String): Boolean = Completion.looksLikeInvocation(line) } + private val lineComment = """\s*//.*""".r // all comment + /** Interpret expressions starting with the first line. * Read lines until a complete compilation unit is available * or until a syntax error has been seen. If a full unit is @@ -795,53 +803,42 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter) // signal completion non-completion input has been received in.completion.resetVerbosity() - def reallyInterpret = { - val reallyResult = intp.interpret(code) - (reallyResult, reallyResult match { - case IR.Error => None - case IR.Success => Some(code) - case IR.Incomplete => - if (in.interactive && code.endsWith("\n\n")) { - echo("You typed two blank lines. Starting a new command.") + def reallyInterpret = intp.interpret(code) match { + case IR.Error => None + case IR.Success => Some(code) + case IR.Incomplete if in.interactive && code.endsWith("\n\n") => + echo("You typed two blank lines. Starting a new command.") + None + case IR.Incomplete => + in.readLine(paste.ContinueString) match { + case null => + // we know compilation is going to fail since we're at EOF and the + // parser thinks the input is still incomplete, but since this is + // a file being read non-interactively we want to fail. So we send + // it straight to the compiler for the nice error message. + intp.compileString(code) None - } - else in.readLine(paste.ContinueString) match { - case null => - // we know compilation is going to fail since we're at EOF and the - // parser thinks the input is still incomplete, but since this is - // a file being read non-interactively we want to fail. So we send - // it straight to the compiler for the nice error message. - intp.compileString(code) - None - - case line => interpretStartingWith(code + "\n" + line) - } - }) + + case line => interpretStartingWith(code + "\n" + line) + } } - /** Here we place ourselves between the user and the interpreter and examine - * the input they are ostensibly submitting. We intervene in several cases: + /* Here we place ourselves between the user and the interpreter and examine + * the input they are ostensibly submitting. We intervene in several cases: * - * 1) If the line starts with "scala> " it is assumed to be an interpreter paste. - * 2) If the line starts with "." (but not ".." or "./") it is treated as an invocation - * on the previous result. - * 3) If the Completion object's execute returns Some(_), we inject that value - * and avoid the interpreter, as it's likely not valid scala code. + * 1) If the line starts with "scala> " it is assumed to be an interpreter paste. + * 2) If the line starts with "." (but not ".." or "./") it is treated as an invocation + * on the previous result. + * 3) If the Completion object's execute returns Some(_), we inject that value + * and avoid the interpreter, as it's likely not valid scala code. */ - if (code == "") None - else if (!paste.running && paste.isPrompted(code)) { - paste.transcript(code) - None - } - else if (Completion.looksLikeInvocation(code) && intp.mostRecentVar != "") { - interpretStartingWith(intp.mostRecentVar + code) + code match { + case "" => None + case lineComment() => None // line comment, do nothing + case paste() if !paste.running => paste.transcript(code) ; None + case invocation() if intp.mostRecentVar != "" => interpretStartingWith(intp.mostRecentVar + code) + case _ => reallyInterpret } - else if (code.trim startsWith "//") { - // line comment, do nothing - None - } - else - reallyInterpret._2 } // runs :load `file` on any files passed via -i diff --git a/src/repl/scala/tools/nsc/interpreter/Pasted.scala b/src/repl/scala/tools/nsc/interpreter/Pasted.scala index 1600f32946..5f388eb15b 100644 --- a/src/repl/scala/tools/nsc/interpreter/Pasted.scala +++ b/src/repl/scala/tools/nsc/interpreter/Pasted.scala @@ -85,7 +85,7 @@ abstract class Pasted { case _ => code } - def run() { + def run(): Unit = { println("// Replaying %d commands from transcript.\n" format cmds.size) cmds foreach { cmd => print(ActualPromptString) -- cgit v1.2.3 From 2530699c103460c0e8388179aa91db6cedb89ecc Mon Sep 17 00:00:00 2001 From: Janek Bogucki Date: Sun, 21 Jun 2015 22:44:30 +0100 Subject: Fix 36 typos (d-f) --- src/compiler/scala/reflect/quasiquotes/Holes.scala | 2 +- src/compiler/scala/tools/nsc/PhaseAssembly.scala | 2 +- src/compiler/scala/tools/nsc/backend/icode/GenICode.scala | 4 ++-- .../scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala | 2 +- src/compiler/scala/tools/nsc/backend/jvm/BCodeSkelBuilder.scala | 2 +- src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala | 2 +- src/compiler/scala/tools/nsc/backend/jvm/opt/ByteCodeRepository.scala | 2 +- src/compiler/scala/tools/nsc/backend/jvm/opt/LocalOpt.scala | 4 ++-- src/compiler/scala/tools/nsc/settings/ScalaSettings.scala | 2 +- src/compiler/scala/tools/nsc/transform/Delambdafy.scala | 2 +- src/compiler/scala/tools/nsc/transform/patmat/Logic.scala | 2 +- src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala | 2 +- src/library/scala/collection/immutable/Stream.scala | 2 +- src/reflect/scala/reflect/internal/ReificationSupport.scala | 2 +- .../scala/tools/nsc/doc/html/page/diagram/DotDiagramGenerator.scala | 2 +- src/scaladoc/scala/tools/nsc/doc/model/ModelFactory.scala | 2 +- test/files/jvm/unreachable/Test.scala | 2 +- test/files/neg/t3995.scala | 2 +- test/files/neg/t8237-default.scala | 2 +- test/files/neg/t8463.scala | 2 +- test/files/pos/SI-4012-b.scala | 2 +- test/files/pos/delambdafy-patterns.scala | 2 +- test/files/pos/t7200b.scala | 2 +- test/files/run/deeps.scala | 2 +- test/files/run/finally.scala | 2 +- test/files/run/iq.scala | 2 +- test/files/run/names-defaults.scala | 2 +- test/files/run/nullable-lazyvals.scala | 2 +- test/files/run/t6240-universe-code-gen.scala | 4 ++-- test/files/run/t8601-closure-elim.scala | 2 +- test/files/run/t8708_b/Test_2.scala | 2 +- test/files/specialized/constant_lambda.scala | 2 +- test/scaladoc/run/groups.scala | 2 +- 33 files changed, 36 insertions(+), 36 deletions(-) diff --git a/src/compiler/scala/reflect/quasiquotes/Holes.scala b/src/compiler/scala/reflect/quasiquotes/Holes.scala index 6fa6b9b37a..47084fc317 100644 --- a/src/compiler/scala/reflect/quasiquotes/Holes.scala +++ b/src/compiler/scala/reflect/quasiquotes/Holes.scala @@ -151,7 +151,7 @@ trait Holes { self: Quasiquotes => else None } - /** Map high-rank unquotee onto an expression that eveluates as a list of given rank. + /** Map high-rank unquotee onto an expression that evaluates as a list of given rank. * * All possible combinations of representations are given in the table below: * diff --git a/src/compiler/scala/tools/nsc/PhaseAssembly.scala b/src/compiler/scala/tools/nsc/PhaseAssembly.scala index 4b32aab5ee..ef9818c62d 100644 --- a/src/compiler/scala/tools/nsc/PhaseAssembly.scala +++ b/src/compiler/scala/tools/nsc/PhaseAssembly.scala @@ -226,7 +226,7 @@ trait PhaseAssembly { } /** Given the phases set, will build a dependency graph from the phases set - * Using the aux. method of the DependencyGraph to create nodes and egdes. + * Using the aux. method of the DependencyGraph to create nodes and edges. */ private def phasesSetToDepGraph(phsSet: mutable.HashSet[SubComponent]): DependencyGraph = { val graph = new DependencyGraph() diff --git a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala index 137954b52d..3e23291e92 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala @@ -1022,7 +1022,7 @@ abstract class GenICode extends SubComponent { tree match { case Literal(Constant(null)) if generatedType == NullReference && expectedType != UNIT => // literal null on the stack (as opposed to a boxed null, see SI-8233), - // we can bypass `adapt` which would otherwise emitt a redundant [DROP, CONSTANT(null)] + // we can bypass `adapt` which would otherwise emit a redundant [DROP, CONSTANT(null)] // except one case: when expected type is UNIT (unboxed) where we need to emit just a DROP case _ => adapt(generatedType, expectedType, resCtx, tree.pos) @@ -2108,7 +2108,7 @@ abstract class GenICode extends SubComponent { /** * Represent a label in the current method code. In order * to support forward jumps, labels can be created without - * having a deisgnated target block. They can later be attached + * having a designated target block. They can later be attached * by calling `anchor`. */ class Label(val symbol: Symbol) { diff --git a/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala b/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala index 058b6a161d..64c9901a3e 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala @@ -387,7 +387,7 @@ abstract class TypeFlowAnalysis { Moreover, it's often the case that the last CALL_METHOD of interest ("of interest" equates to "being tracked in `isOnWatchlist`) isn't the last instruction on the block. There are cases where the typeflows computed past this `lastInstruction` are needed, and cases when they aren't. - The reasoning behind this decsision is described in `populatePerimeter()`. All `blockTransfer()` needs to do (in order to know at which instruction it can stop) + The reasoning behind this decision is described in `populatePerimeter()`. All `blockTransfer()` needs to do (in order to know at which instruction it can stop) is querying `isOnPerimeter`. Upon visiting a CALL_METHOD that's an inlining candidate, the relevant pieces of information about the pre-instruction typestack are collected for future use. diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeSkelBuilder.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeSkelBuilder.scala index a2fd22d24c..0f67852804 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BCodeSkelBuilder.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeSkelBuilder.scala @@ -90,7 +90,7 @@ abstract class BCodeSkelBuilder extends BCodeHelpers { override def getCurrentCUnit(): CompilationUnit = { cunit } - /* ---------------- helper utils for generating classes and fiels ---------------- */ + /* ---------------- helper utils for generating classes and fields ---------------- */ def genPlainClass(cd: ClassDef) { assert(cnode == null, "GenBCode detected nested methods.") diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala b/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala index fffb9286b8..356af36455 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala @@ -322,7 +322,7 @@ class BTypesFromSymbols[G <: Global](val global: G) extends BTypes { val javaCompatMembers = { if (linkedClass != NoSymbol && isTopLevelModuleClass(linkedClass)) // phase travel to exitingPickler: this makes sure that memberClassesForInnerClassTable only sees member - // classes, not local classes of the companion module (E in the exmaple) that were lifted by lambdalift. + // classes, not local classes of the companion module (E in the example) that were lifted by lambdalift. exitingPickler(memberClassesForInnerClassTable(linkedClass)) else Nil diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/ByteCodeRepository.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/ByteCodeRepository.scala index 607b7145d6..dbf19744fa 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/opt/ByteCodeRepository.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/ByteCodeRepository.scala @@ -127,7 +127,7 @@ class ByteCodeRepository(val classPath: ClassFileLookup[AbstractFile], val isJav case Nil => Left(failedClasses) } - // In a MethodInsnNode, the `owner` field may be an array descriptor, for exmple when invoking `clone`. We don't have a method node to return in this case. + // In a MethodInsnNode, the `owner` field may be an array descriptor, for example when invoking `clone`. We don't have a method node to return in this case. if (ownerInternalNameOrArrayDescriptor.charAt(0) == '[') Left(MethodNotFound(name, descriptor, ownerInternalNameOrArrayDescriptor, Nil)) else diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/LocalOpt.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/LocalOpt.scala index 5f51a94673..bd5bab28b5 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/opt/LocalOpt.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/LocalOpt.scala @@ -262,7 +262,7 @@ object LocalOptImpls { * the same index, but distinct start / end ranges are different variables, they may have not the * same type or name. */ - def removeUnusedLocalVariableNodes(method: MethodNode)(fistLocalIndex: Int = parametersSize(method), renumber: Int => Int = identity): Boolean = { + def removeUnusedLocalVariableNodes(method: MethodNode)(firstLocalIndex: Int = parametersSize(method), renumber: Int => Int = identity): Boolean = { def variableIsUsed(start: AbstractInsnNode, end: LabelNode, varIndex: Int): Boolean = { start != end && (start match { case v: VarInsnNode if v.`var` == varIndex => true @@ -276,7 +276,7 @@ object LocalOptImpls { val local = localsIter.next() val index = local.index // parameters and `this` (the lowest indices, starting at 0) are never removed or renumbered - if (index >= fistLocalIndex) { + if (index >= firstLocalIndex) { if (!variableIsUsed(local.start, local.end, index)) localsIter.remove() else if (renumber(index) != index) local.index = renumber(index) } diff --git a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala index 953e43eaca..d3cdf69d30 100644 --- a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala +++ b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala @@ -76,7 +76,7 @@ trait ScalaSettings extends AbsScalaSettings val implicitConversions = Choice("implicitConversions", "Allow definition of implicit functions called views") val higherKinds = Choice("higherKinds", "Allow higher-kinded types") val existentials = Choice("existentials", "Existential types (besides wildcard types) can be written and inferred") - val macros = Choice("experimental.macros", "Allow macro defintion (besides implementation and application)") + val macros = Choice("experimental.macros", "Allow macro definition (besides implementation and application)") } val language = { val description = "Enable or disable language features" diff --git a/src/compiler/scala/tools/nsc/transform/Delambdafy.scala b/src/compiler/scala/tools/nsc/transform/Delambdafy.scala index 55ab73028e..5a7f6c52da 100644 --- a/src/compiler/scala/tools/nsc/transform/Delambdafy.scala +++ b/src/compiler/scala/tools/nsc/transform/Delambdafy.scala @@ -444,7 +444,7 @@ abstract class Delambdafy extends Transform with TypingTransformers with ast.Tre def adaptAndPostErase(tree: Tree, pt: Type): (Boolean, Tree) = { val (needsAdapt, adaptedTree) = adapt(tree, pt) val trans = postErasure.newTransformer(unit) - val postErasedTree = trans.atOwner(currentOwner)(trans.transform(adaptedTree)) // SI-8017 elimnates ErasedValueTypes + val postErasedTree = trans.atOwner(currentOwner)(trans.transform(adaptedTree)) // SI-8017 eliminates ErasedValueTypes (needsAdapt, postErasedTree) } diff --git a/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala b/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala index 227c45b3a7..49a4990722 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala @@ -510,7 +510,7 @@ trait ScalaLogic extends Interface with Logic with TreeAndTypeAnalysis { def propForEqualsTo(c: Const): Prop = {observed(); symForEqualsTo.getOrElse(c, False)} // [implementation NOTE: don't access until all potential equalities have been registered using registerEquality]p - /** the information needed to construct the boolean proposition that encods the equality proposition (V = C) + /** the information needed to construct the boolean proposition that encodes the equality proposition (V = C) * * that models a type test pattern `_: C` or constant pattern `C`, where the type test gives rise to a TypeConst C, * and the constant pattern yields a ValueConst C diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala index e1fe220556..e0fcc05de2 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala @@ -642,7 +642,7 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging { } // override def apply - // debug.patmat("before fixerupper: "+ xTree) + // debug.patmat("before fixerUpper: "+ xTree) // currentRun.trackerFactory.snapshot() // debug.patmat("after fixerupper") // currentRun.trackerFactory.snapshot() diff --git a/src/library/scala/collection/immutable/Stream.scala b/src/library/scala/collection/immutable/Stream.scala index 7edd36dc22..17cf02cce6 100644 --- a/src/library/scala/collection/immutable/Stream.scala +++ b/src/library/scala/collection/immutable/Stream.scala @@ -153,7 +153,7 @@ import scala.language.implicitConversions * * - The fact that `tail` works at all is of interest. In the definition of * `fibs` we have an initial `(0, 1, Stream(...))` so `tail` is deterministic. - * If we deinfed `fibs` such that only `0` were concretely known then the act + * If we defined `fibs` such that only `0` were concretely known then the act * of determining `tail` would require the evaluation of `tail` which would * cause an infinite recursion and stack overflow. If we define a definition * where the tail is not initially computable then we're going to have an diff --git a/src/reflect/scala/reflect/internal/ReificationSupport.scala b/src/reflect/scala/reflect/internal/ReificationSupport.scala index eddfec82e7..d393a841b7 100644 --- a/src/reflect/scala/reflect/internal/ReificationSupport.scala +++ b/src/reflect/scala/reflect/internal/ReificationSupport.scala @@ -802,7 +802,7 @@ trait ReificationSupport { self: SymbolTable => require(enums.nonEmpty, "enumerators can't be empty") enums.head match { case SyntacticValFrom(_, _) => - case t => throw new IllegalArgumentException(s"$t is not a valid fist enumerator of for loop") + case t => throw new IllegalArgumentException(s"$t is not a valid first enumerator of for loop") } enums.tail.foreach { case SyntacticValEq(_, _) | SyntacticValFrom(_, _) | SyntacticFilter(_) => diff --git a/src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DotDiagramGenerator.scala b/src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DotDiagramGenerator.scala index b541cf721b..320a8e23b2 100644 --- a/src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DotDiagramGenerator.scala +++ b/src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DotDiagramGenerator.scala @@ -383,7 +383,7 @@ class DotDiagramGenerator(settings: doc.Settings, dotRunner: DotRunner) extends if (dotId.count(_ == '|') == 1) { val Array(klass, id) = dotId.toString.split("\\|") /* Sometimes dot "forgets" to add the image -- that's very annoying, but it seems pretty random, and simple - * tests like excute 20K times and diff the output don't trigger the bug -- so it's up to us to place the image + * tests like execute 20K times and diff the output don't trigger the bug -- so it's up to us to place the image * back in the node */ val kind = getKind(klass) if (kind != "") diff --git a/src/scaladoc/scala/tools/nsc/doc/model/ModelFactory.scala b/src/scaladoc/scala/tools/nsc/doc/model/ModelFactory.scala index 03d71f15a3..3cbcbc433e 100644 --- a/src/scaladoc/scala/tools/nsc/doc/model/ModelFactory.scala +++ b/src/scaladoc/scala/tools/nsc/doc/model/ModelFactory.scala @@ -478,7 +478,7 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { override lazy val comment = { def nonRootTemplate(sym: Symbol): Option[DocTemplateImpl] = if (sym eq RootPackage) None else findTemplateMaybe(sym) - /* Variable precendence order for implicitly added members: Take the variable defifinitions from ... + /* Variable precendence order for implicitly added members: Take the variable definitions from ... * 1. the target of the implicit conversion * 2. the definition template (owner) * 3. the current template diff --git a/test/files/jvm/unreachable/Test.scala b/test/files/jvm/unreachable/Test.scala index 3f520eb106..4c0fcb2ae8 100644 --- a/test/files/jvm/unreachable/Test.scala +++ b/test/files/jvm/unreachable/Test.scala @@ -6,7 +6,7 @@ import scala.collection.JavaConverters._ object Test extends BytecodeTest { def show: Unit = { val classNode = loadClassNode("Foo_1") - // Foo_1 is full of unreachable code which if not elimintated + // Foo_1 is full of unreachable code which if not eliminated // will result in NOPs as can be confirmed by adding -Ydisable-unreachable-prevention // to Foo_1.flags for (methodNode <- classNode.methods.asScala) { diff --git a/test/files/neg/t3995.scala b/test/files/neg/t3995.scala index b03617ac86..c79f2a5865 100644 --- a/test/files/neg/t3995.scala +++ b/test/files/neg/t3995.scala @@ -27,6 +27,6 @@ object Test { // can be accessed with unambiguous stable prefixes, the implicits infos // which are members of these companion objects." // - // The skolem is stable, but it doen't seem much good to us + // The skolem is stable, but it does not seem much good to us (new Lift).apply("") } diff --git a/test/files/neg/t8237-default.scala b/test/files/neg/t8237-default.scala index f695aa523f..a4370046bd 100644 --- a/test/files/neg/t8237-default.scala +++ b/test/files/neg/t8237-default.scala @@ -1,4 +1,4 @@ -// This test case was extracte from `names-defaults-neg.scala` +// This test case was extracted from `names-defaults-neg.scala` // It pinpoints an improvement an error message that results from // a type inference failure object Test extends App { diff --git a/test/files/neg/t8463.scala b/test/files/neg/t8463.scala index 7c954fd834..1337f8bece 100644 --- a/test/files/neg/t8463.scala +++ b/test/files/neg/t8463.scala @@ -7,7 +7,7 @@ object Test { /* If SI-8230 is fixed, and `viewExists` is changed to no longer leak ambiguity errors, you might expect the check file for this test to - change as folloes: + change as follows: @@ -1,18 +1,10 @@ -t8463.scala:5: error: no type parameters for method apply: (activity: diff --git a/test/files/pos/SI-4012-b.scala b/test/files/pos/SI-4012-b.scala index 6bc8592766..f6d84963e4 100644 --- a/test/files/pos/SI-4012-b.scala +++ b/test/files/pos/SI-4012-b.scala @@ -6,7 +6,7 @@ object Sub extends Super[Int] { // it is expected that super[Super].superb crashes, since // specialization does parent class rewiring, and the super // of Sub becomes Super$mcII$sp and not Super. But I consider - // this normal behavior -- if you want, I can modify duplicatiors + // this normal behavior -- if you want, I can modify duplicators // to make this work, but I consider it's best to keep this // let the user know Super is not the superclass anymore. // super[Super].superb - Vlad diff --git a/test/files/pos/delambdafy-patterns.scala b/test/files/pos/delambdafy-patterns.scala index 95d498629b..ca9eaa67e3 100644 --- a/test/files/pos/delambdafy-patterns.scala +++ b/test/files/pos/delambdafy-patterns.scala @@ -2,7 +2,7 @@ class DelambdafyPatterns { def bar: Unit = () def wildcardPatternInTryCatch: Unit => Unit = (x: Unit) => // patterns in try..catch are preserved so we need to be - // careful when it comes to free variable detction + // careful when it comes to free variable detection // in particular a is _not_ free variable, also the // `_` identifier has no symbol attached to it try bar catch { diff --git a/test/files/pos/t7200b.scala b/test/files/pos/t7200b.scala index 9d579c6ef9..59be898fd0 100644 --- a/test/files/pos/t7200b.scala +++ b/test/files/pos/t7200b.scala @@ -10,7 +10,7 @@ trait Foo { object O extends Foo { def coflatMap[A <: T](f: A) = { val f2 = coflatMap(f) // inferred in 2.9.2 / 2.10.0 as [Nothing] - f2.t // so this does't type check. + f2.t // so this fails to type check. f2 } } diff --git a/test/files/run/deeps.scala b/test/files/run/deeps.scala index 6049cc6024..1546112ed5 100644 --- a/test/files/run/deeps.scala +++ b/test/files/run/deeps.scala @@ -3,7 +3,7 @@ //############################################################################ //############################################################################ -// need to revisit array equqality +// need to revisit array equality object Test { def testEquals1 { diff --git a/test/files/run/finally.scala b/test/files/run/finally.scala index 2c01edaaef..b66354ca03 100644 --- a/test/files/run/finally.scala +++ b/test/files/run/finally.scala @@ -93,7 +93,7 @@ object Test extends App { } } - // nested finallies with return value + // nested finally blocks with return value def nestedFinalies: Int = try { try { diff --git a/test/files/run/iq.scala b/test/files/run/iq.scala index 1eb1d40e37..0ccf67a2e9 100644 --- a/test/files/run/iq.scala +++ b/test/files/run/iq.scala @@ -69,7 +69,7 @@ object iq { val (_, q7) = q6.dequeue //val q8 = q7 + 10 + 11 //deprecated val q8 = q7.enqueue(10).enqueue(11) - /* Test dequeu + /* Test dequeue * Expected: q8: Queue(2,3,4,5,6,7,8,9,10,11) */ Console.println("q8: " + q8) diff --git a/test/files/run/names-defaults.scala b/test/files/run/names-defaults.scala index b7ed490cbc..c364425ec9 100644 --- a/test/files/run/names-defaults.scala +++ b/test/files/run/names-defaults.scala @@ -86,7 +86,7 @@ object Test extends App { def f(a: Object) = "first" val f: String => String = a => "second" } - println(t5.f(new Sub1())) // firsst + println(t5.f(new Sub1())) // first println(t5.f("dfklj")) // second object t6 { diff --git a/test/files/run/nullable-lazyvals.scala b/test/files/run/nullable-lazyvals.scala index c201e74e75..be5d82f3a7 100644 --- a/test/files/run/nullable-lazyvals.scala +++ b/test/files/run/nullable-lazyvals.scala @@ -24,7 +24,7 @@ object Test extends App { // test that try-finally does not generated a liftedTry // helper. This would already fail the first part of the test, - // but this check will help diganose it (if the single access to a + // but this check will help diagnose it (if the single access to a // private field does not happen directly in the lazy val, it won't // be nulled). for (f <- foo.getClass.getDeclaredMethods) { diff --git a/test/files/run/t6240-universe-code-gen.scala b/test/files/run/t6240-universe-code-gen.scala index 9f7061ee1b..60e1f76b54 100644 --- a/test/files/run/t6240-universe-code-gen.scala +++ b/test/files/run/t6240-universe-code-gen.scala @@ -13,8 +13,8 @@ object Test extends App { (sym.isMethod && sym.asMethod.isLazy) || sym.isModule ) - val forcables = tp.members.sorted.filter(isLazyAccessorOrObject) - forcables.map { + val forceables = tp.members.sorted.filter(isLazyAccessorOrObject) + forceables.map { sym => val path = s"$prefix.${sym.name}" " " + ( diff --git a/test/files/run/t8601-closure-elim.scala b/test/files/run/t8601-closure-elim.scala index 2c5b03af77..ebeb16e0c7 100644 --- a/test/files/run/t8601-closure-elim.scala +++ b/test/files/run/t8601-closure-elim.scala @@ -11,7 +11,7 @@ object Test extends BytecodeTest { val classNode = loadClassNode("Foo") val methodNode = getMethod(classNode, "b") val ops = methodNode.instructions.iterator.asScala.map(_.getOpcode).toList - assert(!ops.contains(asm.Opcodes.NEW), ops)// should be allocation free if the closure is eliminiated + assert(!ops.contains(asm.Opcodes.NEW), ops)// should be allocation free if the closure is eliminated } test("b") } diff --git a/test/files/run/t8708_b/Test_2.scala b/test/files/run/t8708_b/Test_2.scala index c978490609..fae3c677ec 100644 --- a/test/files/run/t8708_b/Test_2.scala +++ b/test/files/run/t8708_b/Test_2.scala @@ -13,7 +13,7 @@ object Test extends DirectTest { val c = g.rootMirror.getRequiredClass("p.C") println(c.info.decls) val t = c.info.member(g.newTypeName("T")) - // this test ensrues that the dummy class symbol is not entered in the + // this test ensures that the dummy class symbol is not entered in the // scope of trait T during unpickling. println(t.info.decls) }) diff --git a/test/files/specialized/constant_lambda.scala b/test/files/specialized/constant_lambda.scala index bb9a97403e..7c5358ce10 100644 --- a/test/files/specialized/constant_lambda.scala +++ b/test/files/specialized/constant_lambda.scala @@ -1,4 +1,4 @@ -// during development of late delmabdafying there was a problem where +// during development of late delambdafying there was a problem where // specialization would undo some of the work done in uncurry if the body of the // lambda had a constant type. That would result in a compiler crash as // when the delambdafy phase got a tree shape it didn't understand diff --git a/test/scaladoc/run/groups.scala b/test/scaladoc/run/groups.scala index c9e4a8679b..ad5cca89b8 100644 --- a/test/scaladoc/run/groups.scala +++ b/test/scaladoc/run/groups.scala @@ -38,7 +38,7 @@ object Test extends ScaladocModelTest { * @groupdesc C Group C is introduced by B */ trait B { - /** baz descriptopn + /** baz description * @group C */ def baz = 3 } -- cgit v1.2.3 From a74bdc72efe0c59e7ae8269f00e0941fbe0c0c0c Mon Sep 17 00:00:00 2001 From: Simon Ochsenreither Date: Mon, 22 Jun 2015 02:02:42 +0200 Subject: Spec: Add lost references, cleanup --- spec/05-classes-and-objects.md | 26 ++--- spec/15-changelog.md | 260 ++++++++++++++++++++++------------------- 2 files changed, 154 insertions(+), 132 deletions(-) diff --git a/spec/05-classes-and-objects.md b/spec/05-classes-and-objects.md index 8be792d3cb..cff5442641 100644 --- a/spec/05-classes-and-objects.md +++ b/spec/05-classes-and-objects.md @@ -501,14 +501,13 @@ declaration in a template. Such members can be accessed only from within the directly enclosing template and its companion module or [companion class](#object-definitions). -The modifier can be _qualified_ with an identifier $C$ (e.g. -`private[$C$]`) that must denote a class or package -enclosing the definition. Members labeled with such a modifier are -accessible respectively only from code inside the package $C$ or only -from code inside the class $C$ and its +A `private` modifier can be _qualified_ with an identifier $C$ (e.g. +`private[$C$]`) that must denote a class or package enclosing the definition. +Members labeled with such a modifier are accessible respectively only from code +inside the package $C$ or only from code inside the class $C$ and its [companion module](#object-definitions). -An different form of qualification is `private[this]`. A member +A different form of qualification is `private[this]`. A member $M$ marked with this modifier is called _object-protected_; it can be accessed only from within the object in which it is defined. That is, a selection $p.M$ is only legal if the prefix is `this` or `$O$.this`, for some @@ -533,11 +532,10 @@ Protected members of a class can be accessed from within - all templates that have the defining class as a base class, - the companion module of any of those classes. -A `protected` modifier can be qualified with an -identifier $C$ (e.g. `protected[$C$]`) that must denote a -class or package enclosing the definition. Members labeled with such -a modifier are also accessible respectively from all code inside the -package $C$ or from all code inside the class $C$ and its +A `protected` modifier can be qualified with an identifier $C$ (e.g. +`protected[$C$]`) that must denote a class or package enclosing the definition. +Members labeled with such a modifier are also accessible respectively from all +code inside the package $C$ or from all code inside the class $C$ and its [companion module](#object-definitions). A protected identifier $x$ may be used as a member name in a selection @@ -744,7 +742,7 @@ which when applied to parameters conforming to types $\mathit{ps}$ initializes instances of type `$c$[$\mathit{tps}\,$]` by evaluating the template $t$. -###### Example +###### Example – `val` and `var` parameters The following example illustrates `val` and `var` parameters of a class `C`: ```scala @@ -753,7 +751,7 @@ val c = new C(1, "abc", List()) c.z = c.y :: c.z ``` -### Example Private Constructor +###### Example – Private Constructor The following class can be created only from its companion module. ```scala @@ -835,7 +833,7 @@ This defines a class `LinkedList` with three constructors. The second constructor constructs an singleton list, while the third one constructs a list with a given head and tail. -## Case Classes +### Case Classes ```ebnf TmplDef ::= `case' `class' ClassDef diff --git a/spec/15-changelog.md b/spec/15-changelog.md index 54310c921c..3c8739359a 100644 --- a/spec/15-changelog.md +++ b/spec/15-changelog.md @@ -122,8 +122,8 @@ when compiling with `-target:jvm-1.4`, a Scala type such as #### Changes to Case Classes -The Scala compiler generates now for every case class a companion -extractor object (). For instance, given the case class: +The Scala compiler generates a [companion extractor object for every case class] +(05-classes-and-objects.html#case-classes) now. For instance, given the case class: case class X(elem: String) @@ -152,8 +152,9 @@ _(30-Nov-2007)_ #### Mutable variables introduced by pattern binding -Mutable variables can now be introduced by a pattern matching definition -(), just like values can. Examples: +[Mutable variables can now be introduced by a pattern matching definition] +(04-basic-declarations-and-definitions.html#variable-declarations-and-definitions), +just like values can. Examples: var (x, y) = if (positive) (1, 2) else (-1, -3) var hd :: tl = mylist @@ -161,7 +162,7 @@ Mutable variables can now be introduced by a pattern matching definition #### Self-types Self types can now be introduced without defining an alias name for -`this` (). Example: +[`this`](05-classes-and-objects.html#templates). Example: class C { type T <: Trait @@ -175,8 +176,8 @@ _(27-July-2007)_ #### Existential types -It is now possible to define existential types (). An existential type -has the form `T forSome {Q}` where `Q` is a sequence of value and/or +It is now possible to define [existential types](03-types.html#existential-types). +An existential type has the form `T forSome {Q}` where `Q` is a sequence of value and/or type declarations. Given the class definitions class Ref[T] @@ -189,8 +190,9 @@ one may for example write the following existential types #### Lazy values -It is now possible to define lazy value declarations using the new -modifier `lazy` (). A `lazy` value definition evaluates its right hand +It is now possible to define lazy value declarations using the new modifier +[`lazy`](04-basic-declarations-and-definitions.html#value-declarations-and-definitions). +A `lazy` value definition evaluates its right hand side \\(e\\) the first time the value is accessed. Example: import compat.Platform._ @@ -203,8 +205,8 @@ side \\(e\\) the first time the value is accessed. Example: #### Structural types -It is now possible to declare structural types using type refinements -(). For example: +It is now possible to declare structural types using [type refinements] +(03-types.html#compound-types). For example: class File(name: String) { def getName(): String = name @@ -230,10 +232,13 @@ Changes in Version 2.5 _(02-May-2007)_ -#### Type constructor polymorphism[^1] +#### Type constructor polymorphism -Type parameters () and abstract type members () can now also abstract -over type constructors (). +_Implemented by Adriaan Moors_ + +[Type parameters](04-basic-declarations-and-definitions.html#type-parameters) +and abstract +[type members](04-basic-declarations-and-definitions.html#type-declarations-and-type-aliases) can now also abstract over [type constructors](03-types.html#type-constructors). This allows a more precise `Iterable` interface: @@ -254,8 +259,9 @@ over a certain structure (e.g., a `List`) will yield the same structure #### Early object initialization -It is now possible to initialize some fields of an object before any -parent constructors are called (). This is particularly useful for +[Early object initialization](05-classes-and-objects.html#early-definitions) +makes it possible to initialize some fields of an object before any +parent constructors are called. This is particularly useful for traits, which do not have normal constructor parameters. Example: trait Greeting { @@ -273,10 +279,11 @@ called. Therefore, field `msg` in class is properly initialized to . #### For-comprehensions, revised -The syntax of for-comprehensions has changed (). In the new syntax, -generators do not start with a anymore, but filters start with an (and -are called guards). A semicolon in front of a guard is optional. For -example: +The syntax of [for-comprehensions](06-expressions.html#for-comprehensions-and-for-loops) +has changed. +In the new syntax, generators do not start with a `val` anymore, but filters +start with an `if` (and are called guards). +A semicolon in front of a guard is optional. For example: for (val x <- List(1, 2, 3); x % 2 == 0) println(x) @@ -288,8 +295,9 @@ The old syntax is still available but will be deprecated in the future. #### Implicit anonymous functions -It is now possible to define anonymous functions using underscores in -parameter position (). For instance, the expressions in the left column +It is now possible to define [anonymous functions using underscores] +(06-expressions.html#placeholder-syntax-for-anonymous-functions) in +parameter position. For instance, the expressions in the left column are each function values which expand to the anonymous functions on their right. @@ -300,8 +308,8 @@ their right. _.map(f) x => x.map(f) _.map(_ + 1) x => x.map(y => y + 1) -As a special case (), a partially unapplied method is now designated - `m _`   instead of the previous notation  `&m`. +As a special case, a [partially unapplied method](06-expressions.html#method-values) +is now designated `m _`   instead of the previous notation  `&m`. The new notation will displace the special syntax forms `.m()` for abstracting over method receivers and `&m` for treating an unapplied @@ -310,8 +318,9 @@ still available, but they will be deprecated in the future. #### Pattern matching anonymous functions, refined -It is now possible to use case clauses to define a function value -directly for functions of arities greater than one (). Previously, only +It is now possible to use [case clauses to define a function value] +(08-pattern-matching.html#pattern-matching-anonymous-functions) +directly for functions of arities greater than one. Previously, only unary functions could be defined that way. Example: def scalarProduct(xs: Array[Double], ys: Array[Double]) = @@ -326,8 +335,9 @@ _(09-Mar-2007)_ #### Object-local private and protected -The `private` and `protected` modifiers now accept a `[this]` qualifier -(). A definition \\(M\\) which is labelled `private[this]` is private, +The `private` and `protected` modifiers now accept a +[`[this]` qualifier](05-classes-and-objects.html#modifiers). +A definition \\(M\\) which is labelled `private[this]` is private, and in addition can be accessed only from within the current object. That is, the only legal prefixes for \\(M\\) are `this` or `$C$.this`. Analogously, a definition \\(M\\) which is labelled `protected[this]` is @@ -348,16 +358,17 @@ Analogously, for any sequence of expressions or patterns \\(x_1 #### Access modifiers for primary constructors -The primary constructor of a class can now be marked or (). If such an -access modifier is given, it comes between the name of the class and its +The primary constructor of a class can now be marked [`private` or `protected`] +(05-classes-and-objects.html#class-definitions). +If such an access modifier is given, it comes between the name of the class and its value parameters. Example: class C[T] private (x: T) { ... } #### Annotations -The support for attributes has been extended and its syntax changed (). -Attributes are now called *annotations*. The syntax has +The support for attributes has been extended and its syntax changed. +Attributes are now called [*annotations*](11-annotations.html). The syntax has been changed to follow Java’s conventions, e.g. `@attribute` instead of `[attribute]`. The old syntax is still available but will be deprecated in the future. @@ -373,12 +384,13 @@ symbol is accessed. #### Decidable subtyping The implementation of subtyping has been changed to prevent infinite -recursions. Termination of subtyping is now ensured by a new restriction -of class graphs to be finitary (). +recursions. +[Termination of subtyping](05-classes-and-objects.html#inheritance-closure) +is now ensured by a new restriction of class graphs to be finitary. #### Case classes cannot be abstract -It is now explicitly ruled out that case classes can be abstract (). The +It is now explicitly ruled out that case classes can be abstract. The specification was silent on this point before, but did not explain how abstract case classes were treated. The Scala compiler allowed the idiom. @@ -386,19 +398,21 @@ idiom. #### New syntax for self aliases and self types It is now possible to give an explicit alias name and/or type for the -self reference (). For instance, in +[self reference](05-classes-and-objects.html#templates) `this`. For instance, in class C { self: D => ... } -the name is introduced as an alias for within and the self type () of is -assumed to be . This construct is introduced now in order to replace +the name `self` is introduced as an alias for `this` within `C` and the +[self type](05-classes-and-objects.html#class-definitions) of `C` is +assumed to be `D`. This construct is introduced now in order to replace eventually both the qualified this construct and the clause in Scala. #### Assignment Operators -It is now possible to combine operators with assignments (). Example: +It is now possible to [combine operators with assignments] +(06-expressions.html#assignment-operators). Example: var x: int = 0 x += 1 @@ -410,8 +424,9 @@ _(23-Jan-2007)_ #### Extractors -It is now possible to define patterns independently of case classes, -using methods in extractor objects (). Here is an example: +It is now possible to define patterns independently of case classes, using +`unapply` methods in [extractor objects](08-pattern-matching.html#extractor-patterns). +Here is an example: object Twice { def apply(x:Int): int = x*2 @@ -422,25 +437,26 @@ using methods in extractor objects (). Here is an example: In the example, `Twice` is an extractor object with two methods: -- The method is used to build even numbers. +- The `apply` method is used to build even numbers. -- The method is used to decompose an even number; it is in a sense the - reverse of . `unapply` methods return option types: for a match that - suceeds, for a match that fails. Pattern variables are returned as - the elements of . If there are several variables, they are grouped - in a tuple. +- The `unapply` method is used to decompose an even number; it is in a sense + the reverse of `apply`. `unapply` methods return option types: + `Some(...)` for a match that suceeds, `None` for a match that fails. + Pattern variables are returned as the elements of `Some`. + If there are several variables, they are grouped in a tuple. -In the second-to-last line, ’s method is used to construct a number . In -the last line, is tested against the pattern . This pattern succeeds for -even numbers and assigns to the variable one half of the number that was -tested. The pattern match makes use of the method of object . More -details on extractors can be found in the paper “Matching Objects with +In the second-to-last line, `Twice`’s method is used to construct a number `x`. +In the last line, `x` is tested against the pattern `Twice(n)`. +This pattern succeeds for even numbers and assigns to the variable `n` one half +of the number that was tested. +The pattern match makes use of the `unapply` method of object `Twice`. +More details on extractors can be found in the paper “Matching Objects with Patterns” by Emir, Odersky and Williams. #### Tuples -A new lightweight syntax for tuples has been introduced (). For any -sequence of types \\(T_1 , … , T_n\\), +A new [lightweight syntax for tuples](06-expressions.html#tuples) has been introduced. +For any sequence of types \\(T_1 , … , T_n\\), \\(\{T_1 , … , T_n \}\\) is a shorthand for `Tuple$n$[$T_1 , … , T_n$]`. @@ -451,8 +467,8 @@ Analogously, for any sequence of expressions or patterns \\(x_1, … , x_n\\), #### Infix operators of greater arities It is now possible to use methods which have more than one parameter as -infix operators (). In this case, all method arguments are written as a -normal parameter list in parentheses. Example: +[infix operators](06-expressions.html#infix-operations). In this case, all +method arguments are written as a normal parameter list in parentheses. Example: class C { def +(x: int, y: String) = ... @@ -462,9 +478,9 @@ normal parameter list in parentheses. Example: #### Deprecated attribute -A new standard attribute `deprecated` is available (11-annotations.html#deprecation-annotations). If a member -definition is marked with this attribute, any reference to the member -will cause a “deprecated” warning message to be emitted. +A new standard attribute [`deprecated`](11-annotations.html#deprecation-annotations) +is available. If a member definition is marked with this attribute, any +reference to the member will cause a “deprecated” warning message to be emitted. Changes in Version 2.3 ---------------------- @@ -473,20 +489,23 @@ _(23-Nov-2006)_ #### Procedures -A simplified syntax for functions returning `unit` has been introduced -(). Scala now allows the following shorthands: +A simplified syntax for [methods returning `unit`] +(04-basic-declarations-and-definitions.html#procedures) has been introduced. +Scala now allows the following shorthands: `def f(params)` \\(\mbox{for}\\) `def f(params): unit` `def f(params) { ... }` \\(\mbox{for}\\) `def f(params): unit = { ... }` #### Type Patterns -The syntax of types in patterns has been refined (). Scala now -distinguishes between type variables (starting with a lower case letter) -and types as type arguments in patterns. Type variables are bound in the -pattern. Other type arguments are, as in previous versions, erased. The -Scala compiler will now issue an “unchecked” warning at places where -type erasure might compromise type-safety. +The [syntax of types in patterns](08-pattern-matching.html#type-patterns) has +been refined. +Scala now distinguishes between type variables (starting with a lower case +letter) and types as type arguments in patterns. +Type variables are bound in the pattern. +Other type arguments are, as in previous versions, erased. +The Scala compiler will now issue an “unchecked” warning at places where type +erasure might compromise type-safety. #### Standard Types @@ -505,43 +524,43 @@ _(23-Aug-2006)_ #### Visibility Qualifier for protected -Protected members can now have a visibility qualifier (), e.g. -`protected[]`. In particular, one can now simulate package -protected access as in Java writing +Protected members can now have a visibility qualifier, e.g. +[`protected[]`](05-classes-and-objects.html#protected). +In particular, one can now simulate package protected access as in Java writing protected[P] def X ... -where would name the package containing . +where would name the package containing `X`. #### Relaxation of Private Acess -Private members of a class can now be referenced from the companion -module of the class and vice versa () +[Private members of a class](05-classes-and-objects.html#private) can now be +referenced from the companion module of the class and vice versa. #### Implicit Lookup -The lookup method for implicit definitions has been generalized (). When -searching for an implicit definition matching a type \\(T\\), now are -considered +The lookup method for [implicit definitions](07-implicits.html#implicit-parameters) +has been generalized. +When searching for an implicit definition matching a type \\(T\\), now are considered 1. all identifiers accessible without prefix, and 2. all members of companion modules of classes associated with \\(T\\). -(The second clause is more general than before). Here, a class is -*associated* with a type \\(T\\) if it is referenced by -some part of \\(T\\), or if it is a base class of some part of \\(T\\). +(The second clause is more general than before). Here, a class is _associated_ +with a type \\(T\\) if it is referenced by some part of \\(T\\), or if it is a +base class of some part of \\(T\\). For instance, to find implicit members corresponding to the type HashSet[List[Int], String] -one would now look in the companion modules (aka static parts) of , , , -and . Before, it was just the static part of . +one would now look in the companion modules (aka static parts) of `HashSet`, +`List`, `Int`, and `String`. Before, it was just the static part of . #### Tightened Pattern Match -A typed pattern match with a singleton type now tests whether the -selector value is reference-equal to p (). Example: +A typed [pattern match with a singleton type `p.type`](08-pattern-matching.html#type-patterns) +now tests whether the selector value is reference-equal to `p`. Example: val p = List(1, 2, 3) val q = List(1, 2) @@ -551,8 +570,8 @@ selector value is reference-equal to p (). Example: case _: q.type => Console.println("q") } -This will match the second case and hence will print “”. Before, the -singleton types were erased to , and therefore the first case would have +This will match the second case and hence will print “q”. Before, the +singleton types were erased to `List`, and therefore the first case would have matched, which is non-sensical. Changes in Version 2.1.7 @@ -562,8 +581,8 @@ _(19-Jul-2006)_ #### Multi-Line string literals -It is now possible to write multi-line string-literals enclosed in -triple quotes (). Example: +It is now possible to write [multi-line string-literals] +(01-lexical-syntax.html#string-literals) enclosed in triple quotes. Example: """this is a multi-line @@ -574,13 +593,14 @@ string literals. #### Closure Syntax -The syntax of closures has been slightly restricted (). The form +The syntax of [closures](06-expressions.html#anonymous-functions) +has been slightly restricted. The form x: T => E is valid only when enclosed in braces, i.e.  `{ x: T => E }`. The following is illegal, because it might be read as the value x typed with -the type T =\> E: +the type `T => E`: val f = x: T => E @@ -596,8 +616,9 @@ _(24-May-2006)_ #### Class Literals -There is a new syntax for class literals (): For any class type \\(C\\), -`classOf[$C$]` designates the run-time representation of \\(C\\). +There is a new syntax for [class literals](06-expressions.html#literals): +For any class type \\(C\\), `classOf[$C$]` designates the run-time +representation of \\(C\\). Changes in Version 2.0 ---------------------- @@ -612,14 +633,14 @@ changes. #### New Keywords The following three words are now reserved; they cannot be used as -identifiers () +[identifiers](01-lexical-syntax.html#identifiers): implicit match requires #### Newlines as Statement Separators -Newlines can now be used as statement separators in place of semicolons -() +[Newlines](http://www.scala-lang.org/files/archive/spec/2.11/) +can now be used as statement separators in place of semicolons. #### Syntax Restrictions @@ -649,8 +670,8 @@ is no longer supported. A `with` connective is only allowed following an class C extends AnyRef with M { ... } . -However, assuming `M` is a trait (see [sec:traits]), it is also legal to -write +However, assuming `M` is a [trait](05-classes-and-objects.html#traits), +it is also legal to write class C extends M { ... } @@ -665,7 +686,7 @@ where `S` is the superclass of `M`. The only form of regular expression pattern that is currently supported is a sequence pattern, which might end in a sequence wildcard . Example: - case List(1, 2, _*) => ... // will match all lists starting with \code{1,2}. + case List(1, 2, _*) => ... // will match all lists starting with 1, 2, ... It is at current not clear whether this is a permanent restriction. We are evaluating the possibility of re-introducing full regular expression @@ -686,7 +707,8 @@ old syntax is still available but is considered deprecated. #### For-comprehensions -For-comprehensions () now admit value and pattern definitions. Example: +[For-comprehensions](06-expressions.html#for-comprehensions-and-for-loops) +now admit value and pattern definitions. Example: for { val x <- List.range(1, 100) @@ -700,8 +722,9 @@ for-comprehension. #### Conversions -The rules for implicit conversions of methods to functions () have been -tightened. Previously, a parameterized method used as a value was always +The rules for [implicit conversions of methods to functions] +(06-expressions.html#method-conversions) have been tightened. +Previously, a parameterized method used as a value was always implicitly converted to a function. This could lead to unexpected results when method arguments where forgotten. Consider for instance the statement below: @@ -750,10 +773,11 @@ instance, the `show` expression above will now be expanded to show(x.toString()) . Scala version 2.0 also relaxes the rules of overriding with respect to -empty parameter lists. The revised definition of *matching -members* () makes it now possible to override a method with an +empty parameter lists. The revised definition of +[_matching members_](05-classes-and-objects.html#class-members) +makes it now possible to override a method with an explicit, but empty parameter list `()` with a parameterless method, and -*vice versa*. For instance, the following class definition +_vice versa_. For instance, the following class definition is now legal: class C { @@ -766,26 +790,29 @@ parameter list. #### Class Parameters -A class parameter may now be prefixed by `val` or `var` (). +A [class parameter](05-classes-and-objects.html#class-definitions) +may now be prefixed by `val` or `var`. #### Private Qualifiers Previously, Scala had three levels of visibility: *private*, *protected* and *public*. There was no way to restrict accesses to members -of the current package, as in Java. Scala 2 now defines access -qualifiers that let one express this level of visibility, among others. -In the definition +of the current package, as in Java. + +Scala 2 now defines [access qualifiers](05-classes-and-objects.html#modifiers) +that let one express this level of visibility, among others. In the definition private[C] def f(...) access to `f` is restricted to all code within the class or package `C` -(which must contain the definition of `f`) () +(which must contain the definition of `f`). #### Changes in the Mixin Model -The model which details mixin composition of classes has changed -significantly. The main differences are: +The model which details [mixin composition of classes] +(05-classes-and-objects.html#templates) has changed significantly. +The main differences are: 1. We now distinguish between *traits* that are used as mixin classes and normal classes. The syntax of traits has been @@ -802,22 +829,19 @@ significantly. The main differences are: different mixins. This makes method overloading in Scala conceptually the same as in Java. -The new mixin model is explained in more detail in . - #### Implicit Parameters Views in Scala 1.0 have been replaced by the more general concept of -implicit parameters () +[implicit parameters](07-implicits.html#implicit-parameters). #### Flexible Typing of Pattern Matching The new version of Scala implements more flexible typing rules when it -comes to pattern matching over heterogeneous class hierarchies (). A -*heterogeneous class hierarchy* is one where subclasses +comes to [pattern matching over heterogeneous class hierarchies] +(08-pattern-matching.html#pattern-matching-expressions). +A *heterogeneous class hierarchy* is one where subclasses inherit a common superclass with different parameter types. With the new rules in Scala version 2.0 one can perform pattern matches over such hierarchies with more precise typings that keep track of the information -gained by comparing the types of a selector and a matching pattern (). +gained by comparing the types of a selector and a matching pattern. This gives Scala capabilities analogous to guarded algebraic data types. - -[^1]: Implemented by Adriaan Moors -- cgit v1.2.3 From ebb933379fc7cb68b6cbba1cc59e08be8263ae91 Mon Sep 17 00:00:00 2001 From: Bruno Bieth Date: Mon, 22 Jun 2015 16:30:31 +0200 Subject: SI-9253 avoid IndexOutOfBoundsException in TypeMaps.correspondingTypeArgument --- src/reflect/scala/reflect/internal/tpe/TypeMaps.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala b/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala index c705ca7069..15a87200f1 100644 --- a/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala +++ b/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala @@ -561,7 +561,7 @@ private[internal] trait TypeMaps { | tparams ${rhsSym.typeParams map own_s mkString ", "} |""" - if (argIndex < 0) + if (!rhsArgs.isDefinedAt(argIndex)) abort(s"Something is wrong: cannot find $lhs in applied type $rhs\n" + explain) else { val targ = rhsArgs(argIndex) -- cgit v1.2.3 From ada9fa0b91ddb64f6d15f7616b455247cbcf2243 Mon Sep 17 00:00:00 2001 From: Janek Bogucki Date: Mon, 22 Jun 2015 23:32:33 +0100 Subject: Fix 25 typos (g-i) --- src/compiler/scala/reflect/quasiquotes/Reifiers.scala | 2 +- src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala | 2 +- src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala | 2 +- src/compiler/scala/tools/nsc/typechecker/Implicits.scala | 2 +- src/compiler/scala/tools/nsc/typechecker/Infer.scala | 2 +- src/compiler/scala/tools/nsc/util/DocStrings.scala | 2 +- src/library/scala/collection/generic/Sorted.scala | 2 +- src/reflect/scala/reflect/api/FlagSets.scala | 2 +- src/reflect/scala/reflect/api/Printers.scala | 2 +- src/reflect/scala/reflect/internal/util/StripMarginInterpolator.scala | 2 +- test/files/jvm/javaReflection/Test.scala | 2 +- test/files/jvm/protectedacc.scala | 2 +- test/files/neg/t8431.scala | 2 +- test/files/pos/t8947/Macro_1.scala | 2 +- test/files/run/blame_eye_triple_eee-double.check | 2 +- test/files/run/blame_eye_triple_eee-double.scala | 2 +- test/files/run/blame_eye_triple_eee-float.check | 2 +- test/files/run/blame_eye_triple_eee-float.scala | 2 +- test/files/run/names-defaults.scala | 2 +- test/files/run/t0631.scala | 2 +- test/files/run/t2526.scala | 2 +- test/files/run/t7817-tree-gen.scala | 2 +- test/files/scalacheck/quasiquotes/RuntimeErrorProps.scala | 2 +- test/junit/scala/tools/nsc/backend/jvm/opt/InlinerTest.scala | 2 +- test/junit/scala/tools/nsc/backend/jvm/opt/MethodLevelOpts.scala | 2 +- 25 files changed, 25 insertions(+), 25 deletions(-) diff --git a/src/compiler/scala/reflect/quasiquotes/Reifiers.scala b/src/compiler/scala/reflect/quasiquotes/Reifiers.scala index e753c9787a..8462debe21 100644 --- a/src/compiler/scala/reflect/quasiquotes/Reifiers.scala +++ b/src/compiler/scala/reflect/quasiquotes/Reifiers.scala @@ -322,7 +322,7 @@ trait Reifiers { self: Quasiquotes => * in the domain of the fill function; * * 2. fold the groups into a sequence of lists added together with ++ using - * fill reification for holeMap and fallback reification for non-holeMap. + * fill reification for holeMap and fallback reification for non-holeMap. * * Example: * diff --git a/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala b/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala index 1b6631e7a4..8911a3a28c 100644 --- a/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala +++ b/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala @@ -378,7 +378,7 @@ abstract class DeadCodeElimination extends SubComponent { } else { i match { case NEW(REFERENCE(sym)) => - log(s"Eliminated instantation of $sym inside $m") + log(s"Eliminated instantiation of $sym inside $m") case STORE_LOCAL(l) if clobbers contains ((bb, idx)) => // if an unused instruction was a clobber of a used store to a reference or array type // then we'll replace it with the store of a null to make sure the reference is diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala index ea46116976..438a71061e 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala @@ -599,7 +599,7 @@ abstract class ICodeReader extends ClassfileParser { } case JVM.invokedynamic => // TODO, this is just a place holder. A real implementation must parse the class constant entry - debuglog("Found JVM invokedynamic instructionm, inserting place holder ICode INVOKE_DYNAMIC.") + debuglog("Found JVM invokedynamic instruction, inserting place holder ICode INVOKE_DYNAMIC.") containsInvokeDynamic = true val poolEntry = in.nextChar.toInt in.skip(2) diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala index 5ecca5abce..80e06eb8fa 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala @@ -985,7 +985,7 @@ trait Implicits { if (implicitInfoss.forall(_.isEmpty)) SearchFailure else new ImplicitComputation(implicitInfoss, isLocalToCallsite) findBest() - /** Produce an implicict info map, i.e. a map from the class symbols C of all parts of this type to + /** Produce an implicit info map, i.e. a map from the class symbols C of all parts of this type to * the implicit infos in the companion objects of these class symbols C. * The parts of a type is the smallest set of types that contains * - the type itself diff --git a/src/compiler/scala/tools/nsc/typechecker/Infer.scala b/src/compiler/scala/tools/nsc/typechecker/Infer.scala index f9582a54ff..ea0a9bb243 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Infer.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Infer.scala @@ -934,7 +934,7 @@ trait Infer extends Checkable { def infer_s = map3(tparams, tvars, targs)((tparam, tvar, targ) => s"$tparam=$tvar/$targ") mkString "," printTyping(tree, s"infer expr instance from pt=$pt, $infer_s") - // SI-7899 infering by-name types is unsound. The correct behaviour is conditional because the hole is + // SI-7899 inferring by-name types is unsound. The correct behaviour is conditional because the hole is // exploited in Scalaz (Free.scala), as seen in: run/t7899-regression. def dropByNameIfStrict(tp: Type): Type = if (settings.inferByName) tp else dropByName(tp) def targsStrict = if (targs eq null) null else targs mapConserve dropByNameIfStrict diff --git a/src/compiler/scala/tools/nsc/util/DocStrings.scala b/src/compiler/scala/tools/nsc/util/DocStrings.scala index 352816803f..4ff7067a21 100755 --- a/src/compiler/scala/tools/nsc/util/DocStrings.scala +++ b/src/compiler/scala/tools/nsc/util/DocStrings.scala @@ -184,7 +184,7 @@ object DocStrings { extractSectionTag(str, section) -> section } - /** Extract the section tag, treating the section tag as an indentifier */ + /** Extract the section tag, treating the section tag as an identifier */ def extractSectionTag(str: String, section: (Int, Int)): String = str.substring(section._1, skipTag(str, section._1)) diff --git a/src/library/scala/collection/generic/Sorted.scala b/src/library/scala/collection/generic/Sorted.scala index a0b0e1318b..b2e63daaba 100644 --- a/src/library/scala/collection/generic/Sorted.scala +++ b/src/library/scala/collection/generic/Sorted.scala @@ -36,7 +36,7 @@ trait Sorted[K, +This <: Sorted[K, This]] { /** Creates a ranged projection of this collection. Any mutations in the * ranged projection will update this collection and vice versa. * - * Note: keys are not garuanteed to be consistent between this collection + * Note: keys are not guaranteed to be consistent between this collection * and the projection. This is the case for buffers where indexing is * relative to the projection. * diff --git a/src/reflect/scala/reflect/api/FlagSets.scala b/src/reflect/scala/reflect/api/FlagSets.scala index bcad84a3f0..d3294dad9b 100644 --- a/src/reflect/scala/reflect/api/FlagSets.scala +++ b/src/reflect/scala/reflect/api/FlagSets.scala @@ -48,7 +48,7 @@ import scala.language.implicitConversions * ''Of Note:'' This part of the Reflection API is being considered as a candidate for redesign. It is * quite possible that in future releases of the reflection API, flag sets could be replaced with something else. * - * For more details about `FlagSet`s and other aspects of Scala reflection, see the + * For more details about `FlagSet`s and other aspects of Scala reflection, see the * [[http://docs.scala-lang.org/overviews/reflection/overview.html Reflection Guide]] * * @group ReflectionAPI diff --git a/src/reflect/scala/reflect/api/Printers.scala b/src/reflect/scala/reflect/api/Printers.scala index 01b9759c70..c0abc5120c 100644 --- a/src/reflect/scala/reflect/api/Printers.scala +++ b/src/reflect/scala/reflect/api/Printers.scala @@ -130,7 +130,7 @@ import java.io.{ PrintWriter, StringWriter } * TermName("y")#2541#GET)) * }}} * - * For more details about `Printer`s and other aspects of Scala reflection, see the + * For more details about `Printer`s and other aspects of Scala reflection, see the * [[http://docs.scala-lang.org/overviews/reflection/overview.html Reflection Guide]] * * @group ReflectionAPI diff --git a/src/reflect/scala/reflect/internal/util/StripMarginInterpolator.scala b/src/reflect/scala/reflect/internal/util/StripMarginInterpolator.scala index e622e78d57..35858cdc78 100644 --- a/src/reflect/scala/reflect/internal/util/StripMarginInterpolator.scala +++ b/src/reflect/scala/reflect/internal/util/StripMarginInterpolator.scala @@ -13,7 +13,7 @@ trait StripMarginInterpolator { * The margin of each line is defined by whitespace leading up to a '|' character. * This margin is stripped '''before''' the arguments are interpolated into to string. * - * String escape sequences are '''not''' processed; this interpolater is designed to + * String escape sequences are '''not''' processed; this interpolator is designed to * be used with triple quoted Strings. * * {{{ diff --git a/test/files/jvm/javaReflection/Test.scala b/test/files/jvm/javaReflection/Test.scala index ae5a36eeb2..199399fec8 100644 --- a/test/files/jvm/javaReflection/Test.scala +++ b/test/files/jvm/javaReflection/Test.scala @@ -31,7 +31,7 @@ getSimpleName / getCanonicalName / isAnonymousClass / isLocalClass / isSynthetic These should be avoided, they yield unexpected results: - isAnonymousClass is always false. Scala-defined classes are never anonymous for Java - reflection. Java reflection insepects the class name to decide whether a class is + reflection. Java reflection inspects the class name to decide whether a class is anonymous, based on the name spec referenced above. Also, the implementation of "isAnonymousClass" calls "getSimpleName", which may throw. diff --git a/test/files/jvm/protectedacc.scala b/test/files/jvm/protectedacc.scala index 89e70b90d8..43d218fa89 100644 --- a/test/files/jvm/protectedacc.scala +++ b/test/files/jvm/protectedacc.scala @@ -74,7 +74,7 @@ package p { package b { import a._; - /** Test interraction with Scala inherited methods and currying. */ + /** Test interaction with Scala inherited methods and currying. */ class B extends A { class C { def m = { diff --git a/test/files/neg/t8431.scala b/test/files/neg/t8431.scala index 032a1f394d..bc45bb62ae 100644 --- a/test/files/neg/t8431.scala +++ b/test/files/neg/t8431.scala @@ -48,7 +48,7 @@ class TestExplicit { {val c1 = convert2(s); c1.combined} } -// These ones work before and after; infering G=Null doesn't need to contribute an undetermined type param. +// These ones work before and after; inferring G=Null doesn't need to contribute an undetermined type param. class Test3 { import C.{cbf, convert1, convert2} val s: Invariant[Null] = ??? diff --git a/test/files/pos/t8947/Macro_1.scala b/test/files/pos/t8947/Macro_1.scala index 4a5de3decb..ace389f339 100644 --- a/test/files/pos/t8947/Macro_1.scala +++ b/test/files/pos/t8947/Macro_1.scala @@ -35,7 +35,7 @@ object X { // symtab.EmptyTree.setAttachments(symtab.NoPosition) // } // - // To make this visible to the macro implementaiton, it will need to be compiled in an earlier stage, + // To make this visible to the macro implementation, it will need to be compiled in an earlier stage, // e.g a separate SBT sub-project. } diff --git a/test/files/run/blame_eye_triple_eee-double.check b/test/files/run/blame_eye_triple_eee-double.check index 5e46d91a8f..53eac99ecd 100644 --- a/test/files/run/blame_eye_triple_eee-double.check +++ b/test/files/run/blame_eye_triple_eee-double.check @@ -6,4 +6,4 @@ if (x != x) is good if (NaN != x) is good x matching was good NaN matching was good -loop with NaN was goood +loop with NaN was good diff --git a/test/files/run/blame_eye_triple_eee-double.scala b/test/files/run/blame_eye_triple_eee-double.scala index 1640aead40..4dcbfe7a7a 100644 --- a/test/files/run/blame_eye_triple_eee-double.scala +++ b/test/files/run/blame_eye_triple_eee-double.scala @@ -56,6 +56,6 @@ object Test extends App { else z = NaN i += 1 } - if (z.isNaN && i == 10) println("loop with NaN was goood") + if (z.isNaN && i == 10) println("loop with NaN was good") else println("loop with NaN was broken") } diff --git a/test/files/run/blame_eye_triple_eee-float.check b/test/files/run/blame_eye_triple_eee-float.check index 5e46d91a8f..53eac99ecd 100644 --- a/test/files/run/blame_eye_triple_eee-float.check +++ b/test/files/run/blame_eye_triple_eee-float.check @@ -6,4 +6,4 @@ if (x != x) is good if (NaN != x) is good x matching was good NaN matching was good -loop with NaN was goood +loop with NaN was good diff --git a/test/files/run/blame_eye_triple_eee-float.scala b/test/files/run/blame_eye_triple_eee-float.scala index 4deb9f3d60..bcc6b195d5 100644 --- a/test/files/run/blame_eye_triple_eee-float.scala +++ b/test/files/run/blame_eye_triple_eee-float.scala @@ -56,6 +56,6 @@ object Test extends App { else z = NaN i += 1 } - if (z.isNaN && i == 10) println("loop with NaN was goood") + if (z.isNaN && i == 10) println("loop with NaN was good") else println("loop with NaN was broken") } diff --git a/test/files/run/names-defaults.scala b/test/files/run/names-defaults.scala index c364425ec9..7fb4a04546 100644 --- a/test/files/run/names-defaults.scala +++ b/test/files/run/names-defaults.scala @@ -239,7 +239,7 @@ object Test extends App { // result type of default getters: parameter type, except if this one mentions any type // parameter, in which case the result type is inferred. examples: - // result type of default getter is "String => String". if it were infered, the compiler + // result type of default getter is "String => String". if it were inferred, the compiler // would put "Nothing => Nothing", which is useless def transform(s: String, f: String => String = identity _) = f(s) println(transform("my text")) diff --git a/test/files/run/t0631.scala b/test/files/run/t0631.scala index c401ed31cb..8d672574ec 100644 --- a/test/files/run/t0631.scala +++ b/test/files/run/t0631.scala @@ -11,6 +11,6 @@ object Test extends App { case class Bar(x: Foo) val b = new Bar(new Foo) - // this should not call Foo.equals, but simply compare object identiy of b + // this should not call Foo.equals, but simply compare object identity of b println(b == b) } diff --git a/test/files/run/t2526.scala b/test/files/run/t2526.scala index 53f3059135..9f3c48ec61 100644 --- a/test/files/run/t2526.scala +++ b/test/files/run/t2526.scala @@ -38,7 +38,7 @@ object Test { /* * Checks foreach of `actual` goes over all the elements in `expected` - * We duplicate the method above because there is no common inteface between Traversable and + * We duplicate the method above because there is no common interface between Traversable and * Iterator and we want to avoid converting between collections to ensure that we test what * we mean to test. */ diff --git a/test/files/run/t7817-tree-gen.scala b/test/files/run/t7817-tree-gen.scala index a8317fda6e..094c0d6289 100644 --- a/test/files/run/t7817-tree-gen.scala +++ b/test/files/run/t7817-tree-gen.scala @@ -1,6 +1,6 @@ import scala.tools.partest._ -// Testing that `mkAttributedRef` doesn't incude the package object test.`package`, +// Testing that `mkAttributedRef` doesn't include the package object test.`package`, // under joint and separate compilation. package testSep { class C { object O } } diff --git a/test/files/scalacheck/quasiquotes/RuntimeErrorProps.scala b/test/files/scalacheck/quasiquotes/RuntimeErrorProps.scala index a3b6137f68..40fb42d63c 100644 --- a/test/files/scalacheck/quasiquotes/RuntimeErrorProps.scala +++ b/test/files/scalacheck/quasiquotes/RuntimeErrorProps.scala @@ -68,7 +68,7 @@ object RuntimeErrorProps extends QuasiquoteProperties("errors") { q"for(..$enums) 0" } - property("for inlalid enum") = testFails { + property("for invalid enum") = testFails { val enums = q"foo" :: Nil q"for(..$enums) 0" } diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerTest.scala index b8c5f85c49..0309bb97cc 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerTest.scala @@ -503,7 +503,7 @@ class InlinerTest extends ClearAfterClass { |class C extends T """.stripMargin val List(c, t, tClass) = compile(code) - // the static implementaiton method is inlined into the mixin, so there's no invocation in the mixin + // the static implementation method is inlined into the mixin, so there's no invocation in the mixin assertNoInvoke(getSingleMethod(c, "f")) } diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/MethodLevelOpts.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/MethodLevelOpts.scala index 1ce1b88ff2..5ef2458c0a 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/opt/MethodLevelOpts.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/MethodLevelOpts.scala @@ -56,7 +56,7 @@ class MethodLevelOpts extends ClearAfterClass { } @Test - def inlineReturnInCachtNotTry(): Unit = { + def inlineReturnInCatchNotTry(): Unit = { val code = "def f: Int = return { try 1 catch { case _: Throwable => 2 } }" // cannot inline the IRETURN into the try block (because RETURN may throw IllegalMonitorState) val m = singleMethod(methodOptCompiler)(code) -- cgit v1.2.3