From 0998dee9921b094643f9722d3fcae42fba0bde41 Mon Sep 17 00:00:00 2001 From: Martijn Hoekstra Date: Sat, 5 Dec 2020 08:44:02 +0100 Subject: [PATCH 001/769] override diff on List for performance --- src/library/scala/collection/Seq.scala | 36 +++++++++----- .../collection/StrictOptimizedSeqOps.scala | 49 ++++++++++++------- .../scala/collection/immutable/List.scala | 30 ++++++++++++ .../collection/immutable/ListBenchmark.scala | 28 +++++++++++ 4 files changed, 112 insertions(+), 31 deletions(-) diff --git a/src/library/scala/collection/Seq.scala b/src/library/scala/collection/Seq.scala index cad0a3c2ad5e..4ece3ec8a6fd 100644 --- a/src/library/scala/collection/Seq.scala +++ b/src/library/scala/collection/Seq.scala @@ -857,12 +857,16 @@ trait SeqOps[+A, +CC[_], +C] extends Any def diff[B >: A](that: Seq[B]): C = { val occ = occCounts(that) fromSpecific(iterator.filter { x => - val ox = occ(x) // Avoid multiple map lookups - if (ox == 0) true - else { - occ(x) = ox - 1 - false + var include = false + occ.updateWith(x) { + case None => { + include = true + None + } + case Some(1) => None + case Some(n) => Some(n - 1) } + include }) } @@ -878,11 +882,16 @@ trait SeqOps[+A, +CC[_], +C] extends Any def intersect[B >: A](that: Seq[B]): C = { val occ = occCounts(that) fromSpecific(iterator.filter { x => - val ox = occ(x) // Avoid multiple map lookups - if (ox > 0) { - occ(x) = ox - 1 - true - } else false + var include = true + occ.updateWith(x) { + case None => { + include = false + None + } + case Some(1) => None + case Some(n) => Some(n - 1) + } + include }) } @@ -920,8 +929,11 @@ trait SeqOps[+A, +CC[_], +C] extends Any } protected[collection] def occCounts[B](sq: Seq[B]): mutable.Map[B, Int] = { - val occ = new mutable.HashMap[B, Int]().withDefaultValue(0) - for (y <- sq) occ(y) += 1 + val occ = new mutable.HashMap[B, Int]() + for (y <- sq) occ.updateWith(y) { + case None => Some(1) + case Some(n) => Some(n + 1) + } occ } diff --git a/src/library/scala/collection/StrictOptimizedSeqOps.scala b/src/library/scala/collection/StrictOptimizedSeqOps.scala index 73f89fa46897..396e53885081 100644 --- a/src/library/scala/collection/StrictOptimizedSeqOps.scala +++ b/src/library/scala/collection/StrictOptimizedSeqOps.scala @@ -75,27 +75,38 @@ trait StrictOptimizedSeqOps [+A, +CC[_], +C] b.result() } - override def diff[B >: A](that: Seq[B]): C = { - val occ = occCounts(that) - val b = newSpecificBuilder - for (x <- this) { - val ox = occ(x) // Avoid multiple map lookups - if (ox == 0) b += x - else occ(x) = ox - 1 + override def diff[B >: A](that: Seq[B]): C = + if (isEmpty || that.isEmpty) coll + else { + val occ = occCounts(that) + val b = newSpecificBuilder + for (x <- this) { + occ.updateWith(x) { + case None => { + b.addOne(x) + None + } + case Some(1) => None + case Some(n) => Some(n - 1) + } + } + b.result() } - b.result() - } - override def intersect[B >: A](that: Seq[B]): C = { - val occ = occCounts(that) - val b = newSpecificBuilder - for (x <- this) { - val ox = occ(x) // Avoid multiple map lookups - if (ox > 0) { - b += x - occ(x) = ox - 1 + override def intersect[B >: A](that: Seq[B]): C = + if (isEmpty || that.isEmpty) empty + else { + val occ = occCounts(that) + val b = newSpecificBuilder + for (x <- this) { + occ.updateWith(x) { + case None => None + case Some(n) => { + b.addOne(x) + if (n == 1) None else Some(n - 1) + } + } } + b.result() } - b.result() - } } diff --git a/src/library/scala/collection/immutable/List.scala b/src/library/scala/collection/immutable/List.scala index 4418a6353510..b0a5944716e3 100644 --- a/src/library/scala/collection/immutable/List.scala +++ b/src/library/scala/collection/immutable/List.scala @@ -614,6 +614,36 @@ sealed abstract class List[+A] } } + // Override for performance: traverse only as much as needed + // and share tail when nothing needs to be filtered out anymore + override def diff[B >: A](that: collection.Seq[B]): List[A] = { + if (that.isEmpty || this.isEmpty) this + else if (tail.isEmpty) if (that.contains(head)) Nil else this + else { + val occ = occCounts(that) + val b = new ListBuffer[A]() + @tailrec + def rec(remainder: List[A]): List[A] = { + if(occ.isEmpty) b.prependToList(remainder) + else remainder match { + case Nil => b.result() + case head :: next => { + occ.updateWith(head){ + case None => { + b.append(head) + None + } + case Some(1) => None + case Some(n) => Some(n - 1) + } + rec(next) + } + } + } + rec(this) + } + } + } // Internal code that mutates `next` _must_ call `Statics.releaseFence()` if either immediately, or diff --git a/test/benchmarks/src/main/scala/scala/collection/immutable/ListBenchmark.scala b/test/benchmarks/src/main/scala/scala/collection/immutable/ListBenchmark.scala index 8a44778fae12..c00b2d6be80d 100644 --- a/test/benchmarks/src/main/scala/scala/collection/immutable/ListBenchmark.scala +++ b/test/benchmarks/src/main/scala/scala/collection/immutable/ListBenchmark.scala @@ -24,6 +24,8 @@ class ListBenchmark { var mid: Content = _ var last: Content = _ var replacement: Content = _ + var firstHalf: List[Content] = _ + var lastHalf: List[Content] = _ @Setup(Level.Trial) def initKeys(): Unit = { @@ -31,6 +33,8 @@ class ListBenchmark { mid = Content(size / 2) last = Content(Math.max(0,size -1)) replacement = Content(size * 2 + 1) + firstHalf = values.take(size / 2) + lastHalf = values.drop(size / 2) } @Benchmark def filter_includeAll: Any = { @@ -86,4 +90,28 @@ class ListBenchmark { @Benchmark def partition_exc_last: Any = { values.partition(v => v.value != last.value) } + + @Benchmark def diff_single_mid: Any = { + values.diff(List(mid)) + } + + @Benchmark def diff_single_last: Any = { + values.diff(List(last)) + } + + @Benchmark def diff_notIncluded: Any = { + values.diff(List(Content(-1))) + } + + @Benchmark def diff_identical: Any = { + values.diff(values) + } + + @Benchmark def diff_first_half: Any = { + values.diff(firstHalf) + } + + @Benchmark def diff_last_half: Any = { + values.diff(lastHalf) + } } From 3daef23afe89dcf1d17f91f1da001e04d3c8fdd6 Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Tue, 2 Feb 2021 22:54:42 -0800 Subject: [PATCH 002/769] Import from _root_ --- .../tools/nsc/typechecker/ContextErrors.scala | 3 --- .../scala/tools/nsc/typechecker/Namers.scala | 3 --- test/files/neg/t9125.check | 6 ++++++ test/files/neg/t9125.scala | 13 +++++++++++++ test/files/pos/t9125.scala | 14 ++++++++++++++ 5 files changed, 33 insertions(+), 6 deletions(-) create mode 100644 test/files/neg/t9125.check create mode 100644 test/files/neg/t9125.scala create mode 100644 test/files/pos/t9125.scala diff --git a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala index 57888bf6d3cc..04c5258561d6 100644 --- a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala +++ b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala @@ -1346,9 +1346,6 @@ trait ContextErrors { def ParentSealedInheritanceError(parent: Tree, psym: Symbol) = NormalTypeError(parent, "illegal inheritance from sealed " + psym ) - def RootImportError(tree: Tree) = - issueNormalTypeError(tree, "_root_ cannot be imported") - def SymbolValidationError(sym: Symbol, errKind: SymValidateErrors.Value): Unit = { val msg = errKind match { case ImplicitConstr => "`implicit` modifier not allowed for constructors" diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala index 94d46ff8aa6a..7d62d53ec77c 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala @@ -1825,9 +1825,6 @@ trait Namers extends MethodSynthesis { val Import(expr, selectors) = imp val expr1 = typer.typedQualifier(expr) - if (expr1.symbol != null && expr1.symbol.isRootPackage) - RootImportError(imp) - if (expr1.isErrorTyped) ErrorType else { diff --git a/test/files/neg/t9125.check b/test/files/neg/t9125.check new file mode 100644 index 000000000000..cf58ee5c812a --- /dev/null +++ b/test/files/neg/t9125.check @@ -0,0 +1,6 @@ +t9125.scala:10: error: reference to p is ambiguous; +it is both defined in package q and imported subsequently by +import _root_.p + def f() = new p.C + ^ +1 error diff --git a/test/files/neg/t9125.scala b/test/files/neg/t9125.scala new file mode 100644 index 000000000000..78af55cb91e9 --- /dev/null +++ b/test/files/neg/t9125.scala @@ -0,0 +1,13 @@ + +package p { + class C +} + +package q { + object p { + class K { + import _root_.p + def f() = new p.C + } + } +} diff --git a/test/files/pos/t9125.scala b/test/files/pos/t9125.scala new file mode 100644 index 000000000000..f1d3e67f618f --- /dev/null +++ b/test/files/pos/t9125.scala @@ -0,0 +1,14 @@ + +package p { + class C +} + +package q { + package p { + class K { + import _root_.{p => pp} + def f() = new pp.C + def g() = new _root_.p.C + } + } +} From 70917e0ec2f35da33c95a3cb6ef7015ceeb7b17c Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Wed, 3 Feb 2021 09:00:58 -0800 Subject: [PATCH 003/769] User test case --- test/files/pos/t283.scala | 5 +++++ 1 file changed, 5 insertions(+) create mode 100644 test/files/pos/t283.scala diff --git a/test/files/pos/t283.scala b/test/files/pos/t283.scala new file mode 100644 index 000000000000..8691404db6ae --- /dev/null +++ b/test/files/pos/t283.scala @@ -0,0 +1,5 @@ + +import _root_._ // _root_.java._ is OK +object Test extends App { + println(java.util.Locale.getDefault().toString) // static call +} From f745f92a6ceb8bb813d4530eb058ad8e1995991c Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Sun, 15 Feb 2015 13:33:09 -0800 Subject: [PATCH 004/769] Catch any expression This commit takes an arbitrary expression to `catch`. The expression is required to conform to `Function[Throwable, ?]`. The previous transform was name-based. If the handler is a `PartialFunction`, it is invoked conditionally. More behavior tests for catch expression. --- spec/06-expressions.md | 7 +++--- .../scala/tools/nsc/ast/parser/Parsers.scala | 16 ++++--------- .../tools/nsc/ast/parser/TreeBuilder.scala | 23 +++++++++++++++++++ .../scala/tools/nsc/typechecker/Typers.scala | 11 ++++++++- test/files/neg/t5887.check | 12 +++++++++- test/files/neg/t5887.scala | 23 +++++++++++++++++-- test/files/run/t5887.scala | 17 ++++++++++++++ 7 files changed, 90 insertions(+), 19 deletions(-) create mode 100644 test/files/run/t5887.scala diff --git a/spec/06-expressions.md b/spec/06-expressions.md index 905fa5bf4925..d9a23e909cce 100644 --- a/spec/06-expressions.md +++ b/spec/06-expressions.md @@ -1106,7 +1106,7 @@ Expr1 ::= ‘try’ Expr [‘catch’ Expr] [‘finally’ Expr] ``` A _try expression_ is of the form `try { ´b´ } catch ´h´` -where the handler ´h´ is a +where the handler ´h´ is usually a [pattern matching anonymous function](08-pattern-matching.html#pattern-matching-anonymous-functions) ```scala @@ -1120,11 +1120,12 @@ handler ´h´ is applied to the thrown exception. If the handler contains a case matching the thrown exception, the first such case is invoked. If the handler contains no case matching the thrown exception, the exception is -re-thrown. +re-thrown. More generally, if the handler is a `PartialFunction`, +it is applied only if it is defined at the given exception. Let ´\mathit{pt}´ be the expected type of the try expression. The block ´b´ is expected to conform to ´\mathit{pt}´. The handler ´h´ -is expected conform to type `scala.PartialFunction[scala.Throwable, ´\mathit{pt}\,´]`. +is expected conform to type `scala.Function[scala.Throwable, ´\mathit{pt}\,´]`. The type of the try expression is the [weak least upper bound](03-types.html#weak-conformance) of the type of ´b´ and the result type of ´h´. diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala index 4736df60916a..22c71d18b4a5 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala @@ -1546,22 +1546,14 @@ self => case TRY => def parseTry = atPos(in.skipToken()) { val body = expr() - def catchFromExpr() = List(makeCatchFromExpr(expr())) - val catches: List[CaseDef] = - if (in.token != CATCH) Nil - else { - in.nextToken() - if (in.token != LBRACE) catchFromExpr() - else inBracesOrNil { - if (in.token == CASE) caseClauses() - else catchFromExpr() - } - } + val handler: List[CaseDef] = + if (in.token == CATCH) { in.nextToken(); makeMatchFromExpr(expr()) } + else Nil val finalizer = in.token match { case FINALLY => in.nextToken() ; expr() case _ => EmptyTree } - Try(body, catches, finalizer) + Try(body, handler, finalizer) } parseTry case WHILE => diff --git a/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala b/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala index ea7e9f1b0cc5..0a5be517d727 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala @@ -102,6 +102,17 @@ abstract class TreeBuilder { def makeCaseDef(pat: Tree, guard: Tree, rhs: Tree): CaseDef = CaseDef(gen.patvarTransformer.transform(pat), guard, rhs) + /** At parser, rejigger non-case catch expression. + * + * Match is eliminated by unwrapping. Other expression + * becomes a single CaseDef with empty pattern and + * expr tree as RHS. + */ + def makeMatchFromExpr(catchExpr: Tree): List[CaseDef] = catchExpr match { + case Match(EmptyTree, cases) => cases + case _ => CaseDef(EmptyTree, EmptyTree, catchExpr) :: Nil + } + /** Creates tree representing: * { case x: Throwable => * val catchFn = catchExpr @@ -124,6 +135,18 @@ abstract class TreeBuilder { makeCaseDef(pat, EmptyTree, body) } + /** Creates tree representing: + * { case x: Throwable => catchExpr(x) } + */ + def makeCatchFromFunc(catchFn: Tree): CaseDef = { + val binder = freshTermName() + val pat = Bind(binder, Typed(Ident(nme.WILDCARD), Ident(tpnme.Throwable))) + val body = atPos(catchFn.pos.makeTransparent)(Block( + Apply(Select(catchFn, nme.apply), List(Ident(binder))), + )) + makeCaseDef(pat, EmptyTree, body) + } + /** Create a tree representing the function type (argtpes) => restpe */ def makeFunctionTypeTree(argtpes: List[Tree], restpe: Tree): Tree = gen.mkFunctionTypeTree(argtpes, restpe) diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index 1cd70a05c87e..f22f198c60b3 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -5678,7 +5678,16 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper def typedTry(tree: Try) = { val Try(block, catches, fin) = tree val block1 = typed(block, pt) - val catches1 = typedCases(catches, ThrowableTpe, pt) + val cases = catches match { + case CaseDef(EmptyTree, EmptyTree, catchExpr) :: Nil => + val e = typed(catchExpr, functionType(List(ThrowableTpe), pt)) + val catcher = + if (isPartialFunctionType(e.tpe)) treeBuilder.makeCatchFromExpr(e) + else treeBuilder.makeCatchFromFunc(e) + catcher :: Nil + case _ => catches + } + val catches1 = typedCases(cases, ThrowableTpe, pt) val fin1 = if (fin.isEmpty) fin else typed(fin, UnitTpe) def finish(ownType: Type) = treeCopy.Try(tree, block1, catches1, fin1) setType ownType diff --git a/test/files/neg/t5887.check b/test/files/neg/t5887.check index aec5beed78a2..a805798a380e 100644 --- a/test/files/neg/t5887.check +++ b/test/files/neg/t5887.check @@ -1,8 +1,18 @@ +t5887.scala:6: error: type mismatch; + found : Int(22) + required: Throwable => ? + def f = try ??? catch 22 + ^ t5887.scala:10: error: missing parameter type for expanded function The argument types of an anonymous function must be fully known. (SLS 8.5) Expected type was: ? def h = List("x") map (s => try { case _ => 7 }) ^ +t5887.scala:29: error: type mismatch; + found : TheOldCollegeTry.this.catcher.type + required: Throwable => Int + def noLongerAllower: Int = try 42 catch catcher + ^ t5887.scala:8: warning: A try without a catch or finally is equivalent to putting its body in a block; no exceptions are handled. def g = try 42 ^ @@ -10,4 +20,4 @@ t5887.scala:10: warning: A try without a catch or finally is equivalent to putti def h = List("x") map (s => try { case _ => 7 }) ^ 2 warnings -1 error +3 errors diff --git a/test/files/neg/t5887.scala b/test/files/neg/t5887.scala index d9493adc2e80..e8dc51f91af8 100644 --- a/test/files/neg/t5887.scala +++ b/test/files/neg/t5887.scala @@ -2,10 +2,29 @@ trait TheOldCollegeTry { // was: value isDefinedAt is not a member of Int - // now: required: PartialFunction[Throwable,?] - //def f = try ??? catch 22 + // now: required: Function[Throwable,?] + def f = try ??? catch 22 def g = try 42 def h = List("x") map (s => try { case _ => 7 }) + + def j = try ??? catch (_ => 42) + + import PartialFunction.fromFunction + + def recover(t: Throwable): Int = 42 + def k = try 27 catch fromFunction(recover) + def k2 = try 27 catch recover + + def parseErrorHandler[T]: PartialFunction[Throwable, T] = ??? + def pushBusy[T](body: => T): T = + try body + catch parseErrorHandler + + object catcher { + def isDefinedAt(x: Any) = true + def apply(x: Any) = 27 + } + def noLongerAllower: Int = try 42 catch catcher } diff --git a/test/files/run/t5887.scala b/test/files/run/t5887.scala new file mode 100644 index 000000000000..410eb7c11758 --- /dev/null +++ b/test/files/run/t5887.scala @@ -0,0 +1,17 @@ + +import scala.tools.testkit.AssertUtil.assertThrows + +object Test extends App { + def npe: Int = throw null + def err: Int = throw new Error() + + val pf: PartialFunction[Throwable, Int] = { case _: NullPointerException => 42 } + val f: Throwable => Int = pf + + assertThrows[NullPointerException](npe) + + assert(42 == (try npe catch pf)) + assert(42 == (try npe catch f)) + assertThrows[Error](try err catch pf) + assertThrows[MatchError](try err catch f) +} From aa708e378e85c431fe7b6574f2541eb09d680e97 Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Tue, 22 Dec 2020 10:11:43 -0800 Subject: [PATCH 005/769] Warn on total function for catch handler --- .../scala/tools/nsc/typechecker/Typers.scala | 47 ++++++++++--------- test/files/neg/catch-all.check | 5 +- test/files/neg/catch-all.scala | 8 +++- test/files/neg/t5887.check | 8 +++- test/files/run/t5887.scala | 2 + 5 files changed, 46 insertions(+), 24 deletions(-) diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index f22f198c60b3..6181c49c86d6 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -5652,30 +5652,32 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper treeCopy.Star(tree, typed(tree.elem, mode, pt)) setType makeFullyDefined(pt) } - def issueTryWarnings(tree: Try): Try = { - def checkForCatchAll(cdef: CaseDef): Unit = { - def unbound(t: Tree) = t.symbol == null || t.symbol == NoSymbol - def warn(name: Name) = { - val msg = s"This catches all Throwables. If this is really intended, use `case ${name.decoded} : Throwable` to clear this warning." - context.warning(cdef.pat.pos, msg, WarningCategory.Other) + + def typedTry(tree: Try) = { + def warn(pos: Position, name: Name) = { + val msg = s"This catches all Throwables. If this is really intended, use `case ${name.decoded} : Throwable` to clear this warning." + context.warning(pos, msg, WarningCategory.Other) + } + def issueTryWarnings(tree: Try): Try = { + def checkForCatchAll(cdef: CaseDef): Unit = { + def unbound(t: Tree) = t.symbol == null || t.symbol == NoSymbol + if (cdef.guard.isEmpty) cdef.pat match { + case Bind(name, i @ Ident(_)) if unbound(i) => warn(cdef.pat.pos, name) + case i @ Ident(name) if unbound(i) => warn(cdef.pat.pos, name) + case _ => + } } - if (cdef.guard.isEmpty) cdef.pat match { - case Bind(name, i @ Ident(_)) if unbound(i) => warn(name) - case i @ Ident(name) if unbound(i) => warn(name) - case _ => + if (!isPastTyper) tree match { + case Try(_, Nil, fin) => + if (fin eq EmptyTree) + context.warning(tree.pos, "A try without a catch or finally is equivalent to putting its body in a block; no exceptions are handled.", WarningCategory.Other) + case Try(_, catches, _) => + catches foreach checkForCatchAll } + tree } - if (!isPastTyper) tree match { - case Try(_, Nil, fin) => - if (fin eq EmptyTree) - context.warning(tree.pos, "A try without a catch or finally is equivalent to putting its body in a block; no exceptions are handled.", WarningCategory.Other) - case Try(_, catches, _) => - catches foreach checkForCatchAll - } - tree - } - def typedTry(tree: Try) = { + val Try(block, catches, fin) = tree val block1 = typed(block, pt) val cases = catches match { @@ -5683,7 +5685,10 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper val e = typed(catchExpr, functionType(List(ThrowableTpe), pt)) val catcher = if (isPartialFunctionType(e.tpe)) treeBuilder.makeCatchFromExpr(e) - else treeBuilder.makeCatchFromFunc(e) + else { + warn(e.pos, nme.WILDCARD) + treeBuilder.makeCatchFromFunc(e) + } catcher :: Nil case _ => catches } diff --git a/test/files/neg/catch-all.check b/test/files/neg/catch-all.check index ac20a14164ae..e56376138e0a 100644 --- a/test/files/neg/catch-all.check +++ b/test/files/neg/catch-all.check @@ -7,6 +7,9 @@ catch-all.scala:6: warning: This catches all Throwables. If this is really inten catch-all.scala:8: warning: This catches all Throwables. If this is really intended, use `case x : Throwable` to clear this warning. try { "warn" } catch { case _: RuntimeException => ; case x => } ^ +catch-all.scala:36: warning: This catches all Throwables. If this is really intended, use `case _ : Throwable` to clear this warning. + try "okay" catch discarder // warn total function + ^ error: No warnings can be incurred under -Werror. -3 warnings +4 warnings 1 error diff --git a/test/files/neg/catch-all.scala b/test/files/neg/catch-all.scala index c8308e1d8939..eb9f9b506716 100644 --- a/test/files/neg/catch-all.scala +++ b/test/files/neg/catch-all.scala @@ -1,4 +1,4 @@ -// scalac: -Xfatal-warnings +// scalac: -Werror // object CatchAll { try { "warn" } catch { case _ => } @@ -28,6 +28,12 @@ object CatchAll { try { "okay" } catch { case _ if "".isEmpty => } "okay" match { case _ => "" } + + val handler: PartialFunction[Throwable, String] = { case _ => "hello, world" } + val discarder = (_: Throwable) => "goodbye, cruel world" + + try "okay" catch handler + try "okay" catch discarder // warn total function } object T extends Throwable diff --git a/test/files/neg/t5887.check b/test/files/neg/t5887.check index a805798a380e..21bedc99d721 100644 --- a/test/files/neg/t5887.check +++ b/test/files/neg/t5887.check @@ -19,5 +19,11 @@ t5887.scala:8: warning: A try without a catch or finally is equivalent to puttin t5887.scala:10: warning: A try without a catch or finally is equivalent to putting its body in a block; no exceptions are handled. def h = List("x") map (s => try { case _ => 7 }) ^ -2 warnings +t5887.scala:12: warning: This catches all Throwables. If this is really intended, use `case _ : Throwable` to clear this warning. + def j = try ??? catch (_ => 42) + ^ +t5887.scala:18: warning: This catches all Throwables. If this is really intended, use `case _ : Throwable` to clear this warning. + def k2 = try 27 catch recover + ^ +4 warnings 3 errors diff --git a/test/files/run/t5887.scala b/test/files/run/t5887.scala index 410eb7c11758..34bf5fa8487a 100644 --- a/test/files/run/t5887.scala +++ b/test/files/run/t5887.scala @@ -1,6 +1,8 @@ import scala.tools.testkit.AssertUtil.assertThrows +import scala.annotation.nowarn +@nowarn("msg=This catches all Throwables.") object Test extends App { def npe: Int = throw null def err: Int = throw new Error() From a5c4802140dd6906ad16d54a625ef4336920ef9c Mon Sep 17 00:00:00 2001 From: Filipe Regadas Date: Thu, 10 Dec 2020 21:27:31 +0000 Subject: [PATCH 006/769] Add missing @SerialVersionUID on collections --- project/MimaFilters.scala | 3 +++ src/library/scala/collection/MapLike.scala | 1 + .../scala/collection/SortedMapLike.scala | 1 + .../scala/collection/convert/Wrappers.scala | 19 +++++++++++++++++++ .../scala/collection/immutable/BitSet.scala | 2 ++ .../scala/collection/immutable/HashMap.scala | 3 +++ .../scala/collection/immutable/HashSet.scala | 4 ++++ .../scala/collection/immutable/IntMap.scala | 3 +++ .../scala/collection/immutable/LongMap.scala | 3 +++ .../scala/collection/immutable/Map.scala | 1 + .../collection/immutable/NumericRange.scala | 3 +++ .../scala/collection/immutable/Range.scala | 1 + .../collection/immutable/SortedMap.scala | 1 + .../collection/mutable/ArrayBuilder.scala | 11 +++++++++++ .../collection/mutable/DefaultEntry.scala | 1 + .../collection/mutable/LinkedEntry.scala | 1 + .../collection/mutable/LinkedHashSet.scala | 1 + .../scala/collection/mutable/ListMap.scala | 1 + .../scala/collection/mutable/LongMap.scala | 1 + .../scala/collection/mutable/Map.scala | 1 + .../collection/mutable/PriorityQueue.scala | 2 ++ .../scala/collection/mutable/Queue.scala | 1 + .../collection/mutable/WeakHashMap.scala | 1 + .../collection/mutable/WrappedArray.scala | 10 ++++++++++ 24 files changed, 76 insertions(+) diff --git a/project/MimaFilters.scala b/project/MimaFilters.scala index 0a0a5996591b..cb83f1da446a 100644 --- a/project/MimaFilters.scala +++ b/project/MimaFilters.scala @@ -25,6 +25,9 @@ object MimaFilters extends AutoPlugin { // #9314 #9315 #9507 NewRedBlackTree is private[collection] ProblemFilters.exclude[Problem]("scala.collection.immutable.NewRedBlackTree*"), + + // #9166 add missing serialVersionUID + ProblemFilters.exclude[MissingFieldProblem]("*.serialVersionUID"), ) override val buildSettings = Seq( diff --git a/src/library/scala/collection/MapLike.scala b/src/library/scala/collection/MapLike.scala index 0711ab2a01e0..bd57e6f4f5a5 100644 --- a/src/library/scala/collection/MapLike.scala +++ b/src/library/scala/collection/MapLike.scala @@ -172,6 +172,7 @@ self => /** The implementation class of the set returned by `keySet`. */ + @SerialVersionUID(1589106351530299313L) protected class DefaultKeySet extends AbstractSet[K] with Set[K] with Serializable { def contains(key : K) = self.contains(key) def iterator = keysIterator diff --git a/src/library/scala/collection/SortedMapLike.scala b/src/library/scala/collection/SortedMapLike.scala index 1703985c890b..692aad7b9049 100644 --- a/src/library/scala/collection/SortedMapLike.scala +++ b/src/library/scala/collection/SortedMapLike.scala @@ -36,6 +36,7 @@ self => override def keySet : SortedSet[A] = new DefaultKeySortedSet + @SerialVersionUID(-38666158592954763L) protected class DefaultKeySortedSet extends super.DefaultKeySet with SortedSet[A] { implicit def ordering = self.ordering override def + (elem: A): SortedSet[A] = (SortedSet[A]() ++ this + elem) diff --git a/src/library/scala/collection/convert/Wrappers.scala b/src/library/scala/collection/convert/Wrappers.scala index 7e8970c9d60f..74322ed2a3fd 100644 --- a/src/library/scala/collection/convert/Wrappers.scala +++ b/src/library/scala/collection/convert/Wrappers.scala @@ -27,6 +27,7 @@ private[collection] trait Wrappers { override def isEmpty = underlying.isEmpty } + @SerialVersionUID(7914730360012802566L) case class IteratorWrapper[A](underlying: Iterator[A]) extends ju.Iterator[A] with ju.Enumeration[A] { def hasNext = underlying.hasNext def next() = underlying.next() @@ -39,23 +40,28 @@ private[collection] trait Wrappers { def asJava = new IteratorWrapper(underlying) } + @SerialVersionUID(-2624079708378729299L) case class JIteratorWrapper[A](underlying: ju.Iterator[A]) extends AbstractIterator[A] with Iterator[A] { def hasNext = underlying.hasNext def next() = underlying.next } + @SerialVersionUID(1480199642890917878L) case class JEnumerationWrapper[A](underlying: ju.Enumeration[A]) extends AbstractIterator[A] with Iterator[A] { def hasNext = underlying.hasMoreElements def next() = underlying.nextElement } + @SerialVersionUID(8702516763061989735L) case class IterableWrapper[A](underlying: Iterable[A]) extends ju.AbstractCollection[A] with IterableWrapperTrait[A] { } + @SerialVersionUID(4914368587801013118L) case class JIterableWrapper[A](underlying: jl.Iterable[A]) extends AbstractIterable[A] with Iterable[A] { def iterator = underlying.iterator def newBuilder[B] = new mutable.ArrayBuffer[B] } + @SerialVersionUID(-9156669203906593803L) case class JCollectionWrapper[A](underlying: ju.Collection[A]) extends AbstractIterable[A] with Iterable[A] { def iterator = underlying.iterator override def size = underlying.size @@ -63,10 +69,12 @@ private[collection] trait Wrappers { def newBuilder[B] = new mutable.ArrayBuffer[B] } + @SerialVersionUID(-2066086677605085135L) case class SeqWrapper[A](underlying: Seq[A]) extends ju.AbstractList[A] with IterableWrapperTrait[A] { def get(i: Int) = underlying(i) } + @SerialVersionUID(-3277343097189933650L) case class MutableSeqWrapper[A](underlying: mutable.Seq[A]) extends ju.AbstractList[A] with IterableWrapperTrait[A] { def get(i: Int) = underlying(i) override def set(i: Int, elem: A) = { @@ -76,6 +84,7 @@ private[collection] trait Wrappers { } } + @SerialVersionUID(2065310383330290590L) case class MutableBufferWrapper[A](underlying: mutable.Buffer[A]) extends ju.AbstractList[A] with IterableWrapperTrait[A] { def get(i: Int) = underlying(i) override def set(i: Int, elem: A) = { val p = underlying(i); underlying(i) = elem; p } @@ -83,6 +92,7 @@ private[collection] trait Wrappers { override def remove(i: Int) = underlying remove i } + @SerialVersionUID(-7340917072424655477L) case class JListWrapper[A](underlying: ju.List[A]) extends mutable.AbstractBuffer[A] with mutable.Buffer[A] { def length = underlying.size override def isEmpty = underlying.isEmpty @@ -132,6 +142,7 @@ private[collection] trait Wrappers { } } + @SerialVersionUID(-4801553198679985982L) case class MutableSetWrapper[A](underlying: mutable.Set[A]) extends SetWrapper[A](underlying) { override def add(elem: A) = { val sz = underlying.size @@ -144,6 +155,7 @@ private[collection] trait Wrappers { override def clear() = underlying.clear() } + @SerialVersionUID(-8813164664953372494L) case class JSetWrapper[A](underlying: ju.Set[A]) extends mutable.AbstractSet[A] with mutable.Set[A] with mutable.SetLike[A, JSetWrapper[A]] { override def size = underlying.size @@ -240,6 +252,7 @@ private[collection] trait Wrappers { } } + @SerialVersionUID(8668425014051911127L) case class MutableMapWrapper[A, B](underlying: mutable.Map[A, B]) extends MapWrapper[A, B](underlying) { override def put(k: A, v: B) = underlying.put(k, v) match { case Some(v1) => v1 @@ -300,10 +313,12 @@ private[collection] trait Wrappers { * This includes `get`, as `java.util.Map`'s API does not allow for an * atomic `get` when `null` values may be present. */ + @SerialVersionUID(5258955232187049103L) case class JMapWrapper[A, B](underlying : ju.Map[A, B]) extends mutable.AbstractMap[A, B] with JMapWrapperLike[A, B, JMapWrapper[A, B]] { override def empty = JMapWrapper(new ju.HashMap[A, B]) } + @SerialVersionUID(3929791676502269860L) class ConcurrentMapWrapper[A, B](override val underlying: concurrent.Map[A, B]) extends MutableMapWrapper[A, B](underlying) with juc.ConcurrentMap[A, B] { override def putIfAbsent(k: A, v: B) = underlying.putIfAbsent(k, v) match { @@ -330,6 +345,7 @@ private[collection] trait Wrappers { * access is supported; multi-element operations such as maps and filters * are not guaranteed to be atomic. */ + @SerialVersionUID(-8245743033724996882L) case class JConcurrentMapWrapper[A, B](underlying: juc.ConcurrentMap[A, B]) extends mutable.AbstractMap[A, B] with JMapWrapperLike[A, B, JConcurrentMapWrapper[A, B]] with concurrent.Map[A, B] { override def get(k: A) = Option(underlying get k) @@ -345,6 +361,7 @@ private[collection] trait Wrappers { underlying.replace(k, oldvalue, newvalue) } + @SerialVersionUID(942915481780293390L) case class DictionaryWrapper[A, B](underlying: mutable.Map[A, B]) extends ju.Dictionary[A, B] { def size: Int = underlying.size def isEmpty: Boolean = underlying.isEmpty @@ -372,6 +389,7 @@ private[collection] trait Wrappers { } } + @SerialVersionUID(-5214182838863307389L) case class JDictionaryWrapper[A, B](underlying: ju.Dictionary[A, B]) extends mutable.AbstractMap[A, B] with mutable.Map[A, B] { override def size: Int = underlying.size @@ -391,6 +409,7 @@ private[collection] trait Wrappers { override def clear() = underlying.clear() } + @SerialVersionUID(1265445269473530406L) case class JPropertiesWrapper(underlying: ju.Properties) extends mutable.AbstractMap[String, String] with mutable.Map[String, String] with mutable.MapLike[String, String, JPropertiesWrapper] { diff --git a/src/library/scala/collection/immutable/BitSet.scala b/src/library/scala/collection/immutable/BitSet.scala index 5f1c705100bb..e3bd0cbfcffd 100644 --- a/src/library/scala/collection/immutable/BitSet.scala +++ b/src/library/scala/collection/immutable/BitSet.scala @@ -123,6 +123,7 @@ object BitSet extends BitSetFactory[BitSet] { else new BitSet1(elems - java.lang.Long.lowestOneBit(elems)) } + @SerialVersionUID(-860417644893387539L) class BitSet2(val elems0: Long, elems1: Long) extends BitSet { protected def nwords = 2 protected def word(idx: Int) = if (idx == 0) elems0 else if (idx == 1) elems1 else 0L @@ -150,6 +151,7 @@ object BitSet extends BitSetFactory[BitSet] { * implementation. Care needs to be taken not to modify the exposed * array. */ + @SerialVersionUID(807040099560956194L) class BitSetN(val elems: Array[Long]) extends BitSet { protected def nwords = elems.length protected def word(idx: Int) = if (idx < nwords) elems(idx) else 0L diff --git a/src/library/scala/collection/immutable/HashMap.scala b/src/library/scala/collection/immutable/HashMap.scala index 32e0e97a04ac..87253ec6eaf7 100644 --- a/src/library/scala/collection/immutable/HashMap.scala +++ b/src/library/scala/collection/immutable/HashMap.scala @@ -347,6 +347,7 @@ object HashMap extends ImmutableMapFactory[HashMap] with BitOperations.Int { } @deprecatedInheritance("This class will be made final in a future release.", "2.12.2") + @SerialVersionUID(4549809275616486327L) class HashMap1[A,+B](private[collection] val key: A, private[collection] val hash: Int, private[collection] val value: (B @uV), private[this] var kvOrNull: (A,B @uV)) extends HashMap[A,B] { override def size = 1 @@ -436,6 +437,7 @@ object HashMap extends ImmutableMapFactory[HashMap] with BitOperations.Int { } } + @SerialVersionUID(-1917647429457579983L) private[collection] class HashMapCollision1[A, +B](private[collection] val hash: Int, val kvs: ListMap[A, B @uV]) extends HashMap[A, B @uV] { // assert(kvs.size > 1) @@ -551,6 +553,7 @@ object HashMap extends ImmutableMapFactory[HashMap] with BitOperations.Int { } @deprecatedInheritance("This class will be made final in a future release.", "2.12.2") + @SerialVersionUID(834418348325321784L) class HashTrieMap[A, +B]( private[HashMap] var bitmap0: Int, private[HashMap] var elems0: Array[HashMap[A, B @uV]], diff --git a/src/library/scala/collection/immutable/HashSet.scala b/src/library/scala/collection/immutable/HashSet.scala index 61b2d641634b..808e56e2e7aa 100644 --- a/src/library/scala/collection/immutable/HashSet.scala +++ b/src/library/scala/collection/immutable/HashSet.scala @@ -259,8 +259,10 @@ object HashSet extends ImmutableSetFactory[HashSet] { /** * Common superclass of HashSet1 and HashSetCollision1, which are the two possible leaves of the Trie */ + @SerialVersionUID(-8788235040812980474L) private[HashSet] sealed abstract class LeafHashSet[A](private[HashSet] final val hash: Int) extends HashSet[A] + @SerialVersionUID(7828248784025959392L) class HashSet1[A](private[HashSet] val key: A, hash: Int) extends LeafHashSet[A](hash) { override def size = 1 @@ -333,6 +335,7 @@ object HashSet extends ImmutableSetFactory[HashSet] { override def foreach[U](f: A => U): Unit = f(key) } + @SerialVersionUID(-4499898620567995040L) private[immutable] class HashSetCollision1[A](hash: Int, val ks: ListSet[A], override val size: Int) extends LeafHashSet[A](hash) { override protected def get0(key: A, hash: Int, level: Int): Boolean = @@ -536,6 +539,7 @@ object HashSet extends ImmutableSetFactory[HashSet] { * elems: [a,b] * children: ---b----------------a----------- */ + @SerialVersionUID(-1260675327783828535L) class HashTrieSet[A](private[HashSet] var bitmap: Int, private[collection] var elems: Array[HashSet[A]], private[HashSet] var size0: Int) extends HashSet[A] { @inline override final def size = size0 diff --git a/src/library/scala/collection/immutable/IntMap.scala b/src/library/scala/collection/immutable/IntMap.scala index 7938747b372c..0cc63108e9f5 100644 --- a/src/library/scala/collection/immutable/IntMap.scala +++ b/src/library/scala/collection/immutable/IntMap.scala @@ -64,6 +64,7 @@ object IntMap { def apply[T](elems: (Int, T)*): IntMap[T] = elems.foldLeft(empty[T])((x, y) => x.updated(y._1, y._2)) + @SerialVersionUID(-9137650114085457282L) private[immutable] case object Nil extends IntMap[Nothing] { // Important! Without this equals method in place, an infinite // loop from Map.equals => size => pattern-match-on-Nil => equals @@ -76,11 +77,13 @@ object IntMap { } } + @SerialVersionUID(3302720273753906158L) private[immutable] case class Tip[+T](key: Int, value: T) extends IntMap[T]{ def withValue[S](s: S) = if (s.asInstanceOf[AnyRef] eq value.asInstanceOf[AnyRef]) this.asInstanceOf[IntMap.Tip[S]] else IntMap.Tip(key, s) } + @SerialVersionUID(-523093388545197183L) private[immutable] case class Bin[+T](prefix: Int, mask: Int, left: IntMap[T], right: IntMap[T]) extends IntMap[T] { def bin[S](left: IntMap[S], right: IntMap[S]): IntMap[S] = { if ((this.left eq left) && (this.right eq right)) this.asInstanceOf[IntMap.Bin[S]] diff --git a/src/library/scala/collection/immutable/LongMap.scala b/src/library/scala/collection/immutable/LongMap.scala index 2a5b89622b56..bef668f9c3f3 100644 --- a/src/library/scala/collection/immutable/LongMap.scala +++ b/src/library/scala/collection/immutable/LongMap.scala @@ -62,6 +62,7 @@ object LongMap { def apply[T](elems: (Long, T)*): LongMap[T] = elems.foldLeft(empty[T])((x, y) => x.updated(y._1, y._2)) +@SerialVersionUID(1224320979026293120L) private[immutable] case object Nil extends LongMap[Nothing] { // Important, don't remove this! See IntMap for explanation. override def equals(that : Any) = that match { @@ -71,11 +72,13 @@ object LongMap { } } +@SerialVersionUID(4938010434684160500L) private[immutable] case class Tip[+T](key: Long, value: T) extends LongMap[T] { def withValue[S](s: S) = if (s.asInstanceOf[AnyRef] eq value.asInstanceOf[AnyRef]) this.asInstanceOf[LongMap.Tip[S]] else LongMap.Tip(key, s) } +@SerialVersionUID(2433491195925361636L) private[immutable] case class Bin[+T](prefix: Long, mask: Long, left: LongMap[T], right: LongMap[T]) extends LongMap[T] { def bin[S](left: LongMap[S], right: LongMap[S]): LongMap[S] = { if ((this.left eq left) && (this.right eq right)) this.asInstanceOf[LongMap.Bin[S]] diff --git a/src/library/scala/collection/immutable/Map.scala b/src/library/scala/collection/immutable/Map.scala index cb34b761fc27..e1a5f9c31666 100644 --- a/src/library/scala/collection/immutable/Map.scala +++ b/src/library/scala/collection/immutable/Map.scala @@ -91,6 +91,7 @@ object Map extends ImmutableMapFactory[Map] { def empty[K, V]: Map[K, V] = EmptyMap.asInstanceOf[Map[K, V]] + @SerialVersionUID(-7464981207502461188L) class WithDefault[K, +V](underlying: Map[K, V], d: K => V) extends scala.collection.Map.WithDefault[K, V](underlying, d) with Map[K, V] { override def empty = new WithDefault(underlying.empty, d) override def updated[V1 >: V](key: K, value: V1): WithDefault[K, V1] = new WithDefault[K, V1](underlying.updated[V1](key, value), d) diff --git a/src/library/scala/collection/immutable/NumericRange.scala b/src/library/scala/collection/immutable/NumericRange.scala index d29c853f06b5..c14fb5ded778 100644 --- a/src/library/scala/collection/immutable/NumericRange.scala +++ b/src/library/scala/collection/immutable/NumericRange.scala @@ -39,6 +39,7 @@ package immutable * @define mayNotTerminateInf * @define willNotTerminateInf */ +@SerialVersionUID(-5580158174769432538L) abstract class NumericRange[T] (val start: T, val end: T, val step: T, val isInclusive: Boolean) (implicit num: Integral[T]) @@ -365,6 +366,7 @@ object NumericRange { } } + @SerialVersionUID(-5986512874781685419L) class Inclusive[T](start: T, end: T, step: T)(implicit num: Integral[T]) extends NumericRange(start, end, step, true) { def copy(start: T, end: T, step: T): Inclusive[T] = @@ -373,6 +375,7 @@ object NumericRange { def exclusive: Exclusive[T] = NumericRange(start, end, step) } + @SerialVersionUID(-7058074814271573640L) class Exclusive[T](start: T, end: T, step: T)(implicit num: Integral[T]) extends NumericRange(start, end, step, false) { def copy(start: T, end: T, step: T): Exclusive[T] = diff --git a/src/library/scala/collection/immutable/Range.scala b/src/library/scala/collection/immutable/Range.scala index 56b796656912..2d777b528761 100644 --- a/src/library/scala/collection/immutable/Range.scala +++ b/src/library/scala/collection/immutable/Range.scala @@ -439,6 +439,7 @@ object Range { def count(start: Int, end: Int, step: Int): Int = count(start, end, step, isInclusive = false) + @SerialVersionUID(4237131469519710909L) final class Inclusive(start: Int, end: Int, step: Int) extends Range(start, end, step) { // override def par = new ParRange(this) override def isInclusive = true diff --git a/src/library/scala/collection/immutable/SortedMap.scala b/src/library/scala/collection/immutable/SortedMap.scala index 59b1d833ab3a..e7b9b5722151 100644 --- a/src/library/scala/collection/immutable/SortedMap.scala +++ b/src/library/scala/collection/immutable/SortedMap.scala @@ -42,6 +42,7 @@ self => override def updated [B1 >: B](key: A, value: B1): SortedMap[A, B1] = this + ((key, value)) override def keySet: immutable.SortedSet[A] = new DefaultKeySortedSet + @SerialVersionUID(112809526508924148L) protected class DefaultKeySortedSet extends super.DefaultKeySortedSet with immutable.SortedSet[A] { override def + (elem: A): SortedSet[A] = if (this(elem)) this diff --git a/src/library/scala/collection/mutable/ArrayBuilder.scala b/src/library/scala/collection/mutable/ArrayBuilder.scala index 397a9d9eb2fd..bb342e1c2801 100644 --- a/src/library/scala/collection/mutable/ArrayBuilder.scala +++ b/src/library/scala/collection/mutable/ArrayBuilder.scala @@ -22,6 +22,7 @@ import scala.reflect.ClassTag * * @tparam T the type of the elements for the builder. */ +@SerialVersionUID(-4721309866680431208L) abstract class ArrayBuilder[T] extends ReusableBuilder[T, Array[T]] with Serializable /** A companion object for array builders. @@ -61,6 +62,7 @@ object ArrayBuilder { * * @tparam T type of elements for the array builder, subtype of `AnyRef` with a `ClassTag` context bound. */ + @SerialVersionUID(-8376727444766075941L) final class ofRef[T <: AnyRef : ClassTag] extends ArrayBuilder[T] { private var elems: Array[T] = _ @@ -126,6 +128,7 @@ object ArrayBuilder { } /** A class for array builders for arrays of `byte`s. It can be reused. */ + @SerialVersionUID(-3484148043254823366L) final class ofByte extends ArrayBuilder[Byte] { private var elems: Array[Byte] = _ @@ -191,6 +194,7 @@ object ArrayBuilder { } /** A class for array builders for arrays of `short`s. It can be reused. */ + @SerialVersionUID(3295904306819377609L) final class ofShort extends ArrayBuilder[Short] { private var elems: Array[Short] = _ @@ -256,6 +260,7 @@ object ArrayBuilder { } /** A class for array builders for arrays of `char`s. It can be reused. */ + @SerialVersionUID(-8284807600792805165L) final class ofChar extends ArrayBuilder[Char] { private var elems: Array[Char] = _ @@ -321,6 +326,7 @@ object ArrayBuilder { } /** A class for array builders for arrays of `int`s. It can be reused. */ + @SerialVersionUID(-3033902589330485711L) final class ofInt extends ArrayBuilder[Int] { private var elems: Array[Int] = _ @@ -386,6 +392,7 @@ object ArrayBuilder { } /** A class for array builders for arrays of `long`s. It can be reused. */ + @SerialVersionUID(-4278005356053656861L) final class ofLong extends ArrayBuilder[Long] { private var elems: Array[Long] = _ @@ -451,6 +458,7 @@ object ArrayBuilder { } /** A class for array builders for arrays of `float`s. It can be reused. */ + @SerialVersionUID(-740775369715282824L) final class ofFloat extends ArrayBuilder[Float] { private var elems: Array[Float] = _ @@ -516,6 +524,7 @@ object ArrayBuilder { } /** A class for array builders for arrays of `double`s. It can be reused. */ + @SerialVersionUID(2549152794429074790L) final class ofDouble extends ArrayBuilder[Double] { private var elems: Array[Double] = _ @@ -581,6 +590,7 @@ object ArrayBuilder { } /** A class for array builders for arrays of `boolean`s. It can be reused. */ + @SerialVersionUID(-3574834070591819420L) class ofBoolean extends ArrayBuilder[Boolean] { private var elems: Array[Boolean] = _ @@ -646,6 +656,7 @@ object ArrayBuilder { } /** A class for array builders for arrays of `Unit` type. It can be reused. */ + @SerialVersionUID(1995804197797796249L) final class ofUnit extends ArrayBuilder[Unit] { private var size: Int = 0 diff --git a/src/library/scala/collection/mutable/DefaultEntry.scala b/src/library/scala/collection/mutable/DefaultEntry.scala index 6417b54ba41a..8c317e90949b 100644 --- a/src/library/scala/collection/mutable/DefaultEntry.scala +++ b/src/library/scala/collection/mutable/DefaultEntry.scala @@ -17,6 +17,7 @@ package mutable /** Class used internally for default map model. * @since 2.3 */ +@SerialVersionUID(-3856907690109104385L) final class DefaultEntry[A, B](val key: A, var value: B) extends HashEntry[A, DefaultEntry[A, B]] with Serializable { diff --git a/src/library/scala/collection/mutable/LinkedEntry.scala b/src/library/scala/collection/mutable/LinkedEntry.scala index 6828b51e366e..38c17806dbde 100644 --- a/src/library/scala/collection/mutable/LinkedEntry.scala +++ b/src/library/scala/collection/mutable/LinkedEntry.scala @@ -17,6 +17,7 @@ package mutable /** Class for the linked hash map entry, used internally. * @since 2.8 */ +@SerialVersionUID(-2671939643954900582L) final class LinkedEntry[A, B](val key: A, var value: B) extends HashEntry[A, LinkedEntry[A, B]] with Serializable { var earlier: LinkedEntry[A, B] = null diff --git a/src/library/scala/collection/mutable/LinkedHashSet.scala b/src/library/scala/collection/mutable/LinkedHashSet.scala index 369de0da1b41..af8f364f4a1e 100644 --- a/src/library/scala/collection/mutable/LinkedHashSet.scala +++ b/src/library/scala/collection/mutable/LinkedHashSet.scala @@ -143,6 +143,7 @@ object LinkedHashSet extends MutableSetFactory[LinkedHashSet] { /** Class for the linked hash set entry, used internally. * @since 2.10 */ + @SerialVersionUID(6056749505994053009L) private[scala] final class Entry[A](val key: A) extends HashEntry[A, Entry[A]] with Serializable { var earlier: Entry[A] = null var later: Entry[A] = null diff --git a/src/library/scala/collection/mutable/ListMap.scala b/src/library/scala/collection/mutable/ListMap.scala index 9857fae2c7f7..32473eeb2958 100644 --- a/src/library/scala/collection/mutable/ListMap.scala +++ b/src/library/scala/collection/mutable/ListMap.scala @@ -38,6 +38,7 @@ import annotation.tailrec * @define orderDependent * @define orderDependentFold */ +@SerialVersionUID(-3362098515407812442L) class ListMap[A, B] extends AbstractMap[A, B] with Map[A, B] diff --git a/src/library/scala/collection/mutable/LongMap.scala b/src/library/scala/collection/mutable/LongMap.scala index c5e898d6d3d6..35eefa1669af 100644 --- a/src/library/scala/collection/mutable/LongMap.scala +++ b/src/library/scala/collection/mutable/LongMap.scala @@ -36,6 +36,7 @@ import generic.CanBuildFrom * rapidly as 2^30 is approached. * */ +@SerialVersionUID(3311432836435989440L) final class LongMap[V] private[collection] (defaultEntry: Long => V, initialBufferSize: Int, initBlank: Boolean) extends AbstractMap[Long, V] with Map[Long, V] diff --git a/src/library/scala/collection/mutable/Map.scala b/src/library/scala/collection/mutable/Map.scala index ed84279c38dc..69185c1f1894 100644 --- a/src/library/scala/collection/mutable/Map.scala +++ b/src/library/scala/collection/mutable/Map.scala @@ -64,6 +64,7 @@ object Map extends MutableMapFactory[Map] { def empty[K, V]: Map[K, V] = new HashMap[K, V] + @SerialVersionUID(3886083557164597477L) class WithDefault[K, V](underlying: Map[K, V], d: K => V) extends scala.collection.Map.WithDefault(underlying, d) with Map[K, V] { override def += (kv: (K, V)) = {underlying += kv; this} def -= (key: K) = {underlying -= key; this} diff --git a/src/library/scala/collection/mutable/PriorityQueue.scala b/src/library/scala/collection/mutable/PriorityQueue.scala index c7b6f244c44a..3c70e0f371ad 100644 --- a/src/library/scala/collection/mutable/PriorityQueue.scala +++ b/src/library/scala/collection/mutable/PriorityQueue.scala @@ -55,6 +55,7 @@ import generic._ * @define mayNotTerminateInf * @define willNotTerminateInf */ +@SerialVersionUID(736425014438295802L) sealed class PriorityQueue[A](implicit val ord: Ordering[A]) extends AbstractIterable[A] with Iterable[A] @@ -67,6 +68,7 @@ sealed class PriorityQueue[A](implicit val ord: Ordering[A]) { import ord._ + @SerialVersionUID(3491656538574147683L) private class ResizableArrayAccess[A] extends AbstractSeq[A] with ResizableArray[A] with Serializable { def p_size0 = size0 def p_size0_=(s: Int) = size0 = s diff --git a/src/library/scala/collection/mutable/Queue.scala b/src/library/scala/collection/mutable/Queue.scala index df72aefc8372..40e742c2e922 100644 --- a/src/library/scala/collection/mutable/Queue.scala +++ b/src/library/scala/collection/mutable/Queue.scala @@ -32,6 +32,7 @@ import generic._ * @define mayNotTerminateInf * @define willNotTerminateInf */ +@SerialVersionUID(-5130636723247980089L) class Queue[A] extends MutableList[A] with LinearSeqOptimized[A, Queue[A]] diff --git a/src/library/scala/collection/mutable/WeakHashMap.scala b/src/library/scala/collection/mutable/WeakHashMap.scala index 2895904d51a8..02ee46d5762d 100644 --- a/src/library/scala/collection/mutable/WeakHashMap.scala +++ b/src/library/scala/collection/mutable/WeakHashMap.scala @@ -42,6 +42,7 @@ import convert.Wrappers._ * @define mayNotTerminateInf * @define willNotTerminateInf */ +@SerialVersionUID(-853182442555455877L) class WeakHashMap[A, B] extends JMapWrapper[A, B](new java.util.WeakHashMap) with JMapWrapperLike[A, B, WeakHashMap[A, B]] { override def empty = new WeakHashMap[A, B] diff --git a/src/library/scala/collection/mutable/WrappedArray.scala b/src/library/scala/collection/mutable/WrappedArray.scala index d635443f5127..bab94f8ae323 100644 --- a/src/library/scala/collection/mutable/WrappedArray.scala +++ b/src/library/scala/collection/mutable/WrappedArray.scala @@ -186,6 +186,7 @@ object WrappedArray { def newBuilder[A]: Builder[A, IndexedSeq[A]] = new ArrayBuffer + @SerialVersionUID(3456489343829468865L) final class ofRef[T <: AnyRef](val array: Array[T]) extends WrappedArray[T] with Serializable { def elemTag = ClassTag[T](array.getClass.getComponentType) def length: Int = array.length @@ -194,6 +195,7 @@ object WrappedArray { override def hashCode = MurmurHash3.wrappedArrayHash(array) } + @SerialVersionUID(-4502363748086738L) final class ofByte(val array: Array[Byte]) extends WrappedArray[Byte] with Serializable { def elemTag = ClassTag.Byte def length: Int = array.length @@ -206,6 +208,7 @@ object WrappedArray { } } + @SerialVersionUID(3569089221887297170L) final class ofShort(val array: Array[Short]) extends WrappedArray[Short] with Serializable { def elemTag = ClassTag.Short def length: Int = array.length @@ -218,6 +221,7 @@ object WrappedArray { } } + @SerialVersionUID(4353470320490138993L) final class ofChar(val array: Array[Char]) extends WrappedArray[Char] with Serializable { def elemTag = ClassTag.Char def length: Int = array.length @@ -230,6 +234,7 @@ object WrappedArray { } } + @SerialVersionUID(-3796494337148298008L) final class ofInt(val array: Array[Int]) extends WrappedArray[Int] with Serializable { def elemTag = ClassTag.Int def length: Int = array.length @@ -242,6 +247,7 @@ object WrappedArray { } } + @SerialVersionUID(7604729449860217276L) final class ofLong(val array: Array[Long]) extends WrappedArray[Long] with Serializable { def elemTag = ClassTag.Long def length: Int = array.length @@ -254,6 +260,7 @@ object WrappedArray { } } + @SerialVersionUID(-5070075925231686368L) final class ofFloat(val array: Array[Float]) extends WrappedArray[Float] with Serializable { def elemTag = ClassTag.Float def length: Int = array.length @@ -266,6 +273,7 @@ object WrappedArray { } } + @SerialVersionUID(6556610635003622495L) final class ofDouble(val array: Array[Double]) extends WrappedArray[Double] with Serializable { def elemTag = ClassTag.Double def length: Int = array.length @@ -278,6 +286,7 @@ object WrappedArray { } } + @SerialVersionUID(-4835600351252182105L) final class ofBoolean(val array: Array[Boolean]) extends WrappedArray[Boolean] with Serializable { def elemTag = ClassTag.Boolean def length: Int = array.length @@ -290,6 +299,7 @@ object WrappedArray { } } + @SerialVersionUID(3443664051778905707L) final class ofUnit(val array: Array[Unit]) extends WrappedArray[Unit] with Serializable { def elemTag = ClassTag.Unit def length: Int = array.length From d12ab3800ca940cb042f018dc85c28cb5bf2db03 Mon Sep 17 00:00:00 2001 From: Dale Wijnand Date: Wed, 3 Jun 2020 17:20:03 +0100 Subject: [PATCH 007/769] RIP icode --- project/ScalaOptionParser.scala | 4 +- .../nsc/backend/jvm/BCodeIdiomatic.scala | 2 +- src/manual/scala/man1/scalac.scala | 6 -- .../scala/tools/partest/IcodeComparison.scala | 85 ------------------- test/files/run/icode-reader-dead-code.scala | 2 +- 5 files changed, 4 insertions(+), 95 deletions(-) delete mode 100644 src/partest-extras/scala/tools/partest/IcodeComparison.scala diff --git a/project/ScalaOptionParser.scala b/project/ScalaOptionParser.scala index a5cbb35dde40..371a951c13f4 100644 --- a/project/ScalaOptionParser.scala +++ b/project/ScalaOptionParser.scala @@ -96,8 +96,8 @@ object ScalaOptionParser { private def stringSettingNames = List("-Xgenerate-phase-graph", "-Xmain-class", "-Xpluginsdir", "-Xshow-class", "-Xshow-object", "-Xsource-reader", "-Ydump-classes", "-Ygen-asmp", "-Ypresentation-log", "-Ypresentation-replay", "-Yrepl-outdir", "-d", "-dependencyfile", "-encoding", "-Xscript") private def pathSettingNames = List("-bootclasspath", "-classpath", "-extdirs", "-javabootclasspath", "-javaextdirs", "-sourcepath", "-toolcp") - private val phases = List("all", "parser", "namer", "packageobjects", "typer", "patmat", "superaccessors", "extmethods", "pickler", "refchecks", "uncurry", "tailcalls", "specialize", "explicitouter", "erasure", "posterasure", "fields", "lambdalift", "constructors", "flatten", "mixin", "cleanup", "delambdafy", "icode", "jvm", "terminal") - private val phaseSettings = List("-Xprint-icode", "-Ystop-after", "-Yskip", "-Yshow", "-Ystop-before", "-Ybrowse", "-Ylog", "-Ycheck", "-Xprint", "-Yvalidate-pos") + private val phases = List("all", "parser", "namer", "packageobjects", "typer", "patmat", "superaccessors", "extmethods", "pickler", "refchecks", "uncurry", "tailcalls", "specialize", "explicitouter", "erasure", "posterasure", "fields", "lambdalift", "constructors", "flatten", "mixin", "cleanup", "delambdafy", "jvm", "terminal") + private val phaseSettings = List("-Ystop-after", "-Yskip", "-Yshow", "-Ystop-before", "-Ybrowse", "-Ylog", "-Ycheck", "-Xprint", "-Yvalidate-pos") private def multiStringSettingNames = List("-Xmacro-settings", "-Xplugin", "-Xplugin-disable", "-Xplugin-require", "-Ywarn-unused") private def intSettingNames = List("-Xmax-classfile-name", "-Xelide-below", "-Ypatmat-exhaust-depth", "-Ypresentation-delay", "-Yrecursion") private def choiceSettingNames = Map[String, List[String]]( diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeIdiomatic.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeIdiomatic.scala index ff609672cb89..d0addf70dfca 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BCodeIdiomatic.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeIdiomatic.scala @@ -95,7 +95,7 @@ abstract class BCodeIdiomatic { /* Just a namespace for utilities that encapsulate MethodVisitor idioms. * In the ASM world, org.objectweb.asm.commons.InstructionAdapter plays a similar role, - * but the methods here allow choosing when to transition from ICode to ASM types + * but the methods here allow choosing when to transition from BType to ASM types * (including not at all, e.g. for performance). */ abstract class JCodeMethodN { diff --git a/src/manual/scala/man1/scalac.scala b/src/manual/scala/man1/scalac.scala index c5046416abea..1a2f864077c9 100644 --- a/src/manual/scala/man1/scalac.scala +++ b/src/manual/scala/man1/scalac.scala @@ -274,9 +274,6 @@ object scalac extends Command { Definition( CmdOptionBound("Xprint:", Argument("phases")), "Print out program after " & Argument("phases") & " (see below)."), - Definition( - CmdOptionBound("Xprint-icode", "[:" & Argument("phases") & "]"), - "Log internal icode to *.icode files after" & Argument("phases") & " (default: icode)."), Definition( CmdOption("Xprint-pos"), "Print tree positions, as offsets."), @@ -399,9 +396,6 @@ object scalac extends Command { Definition( MItalic("delambdafy"), "remove lambdas"), - Definition( - MItalic("icode"), - "generate portable intermediate code"), Definition( MItalic("inliner"), "optimization: do inlining"), diff --git a/src/partest-extras/scala/tools/partest/IcodeComparison.scala b/src/partest-extras/scala/tools/partest/IcodeComparison.scala deleted file mode 100644 index 7c4c46628800..000000000000 --- a/src/partest-extras/scala/tools/partest/IcodeComparison.scala +++ /dev/null @@ -1,85 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.tools.partest - -import scala.tools.partest.nest.FileManager.compareContents - -/** A class for testing icode. All you need is this in a - * partest source file -- - * {{{ - * object Test extends IcodeComparison - * }}} - * -- and the generated output will be the icode for everything - * in that file. See scaladoc for possible customizations. - * TODO promote me to partest - */ -abstract class IcodeComparison extends DirectTest { - /** The phase after which icode is printed. - * Override to check icode at a different point, - * but you can't print at a phase that is not enabled - * in this compiler run. Defaults to "icode". - */ - def printIcodeAfterPhase = "icode" - - /** When comparing the output of two phases, this is - * the other phase of interest, normally the preceding - * phase. Defaults to "icode" for tests of optimizer phases. - */ - def printSuboptimalIcodeAfterPhase = "icode" - - /** The source code to compile defaults to the test file. - * I.e., the test file compiles itself. For a comparison, - * the test file will be compiled three times. - */ - def code = testPath.slurp() - - /** By default, the test code is compiled with -usejavacp. */ - override def extraSettings: String = "-usejavacp" - - /** Compile the test code and return the contents of all - * (sorted) .icode files, which are immediately deleted. - * @param arg0 at least one arg is required - * @param args must include -Xprint-icode:phase - */ - def collectIcode(arg0: String, args: String*): List[String] = { - compile("-d" :: testOutput.path :: arg0 :: args.toList : _*) - val icodeFiles = testOutput.files.toList filter (_ hasExtension "icode") - - // Some methods in scala.reflect.io.File leak an InputStream, leaving the underlying file open. - // Windows won't delete an open file, but we must ensure the files get deleted, since the logic - // here depends on it (collectIcode will be called multiple times, and we can't allow crosstalk - // between calls). So we are careful to use `slurp` which does call `close`, and careful to - // check that `delete` returns true indicating successful deletion. - try icodeFiles sortBy (_.name) flatMap (f => f.slurp().linesIterator.toList) - finally icodeFiles foreach (f => require(f.delete())) - } - - /** Collect icode at the default phase, `printIcodeAfterPhase`. */ - def collectIcode(): List[String] = collectIcode(s"-Xprint-icode:$printIcodeAfterPhase") - - /** Default show is showComparison. May be overridden for showIcode or similar. */ - def show() = showComparison() - - /** Compile the test code with and without optimization, and - * then print the diff of the icode. - */ - def showComparison() = { - val lines1 = collectIcode(s"-Xprint-icode:$printSuboptimalIcodeAfterPhase") - val lines2 = collectIcode("-optimise", s"-Xprint-icode:$printIcodeAfterPhase") - - println(compareContents(lines1, lines2)) - } - - /** Print icode at the default phase, `printIcodeAfterPhase`. */ - def showIcode() = println(collectIcode() mkString EOL) -} diff --git a/test/files/run/icode-reader-dead-code.scala b/test/files/run/icode-reader-dead-code.scala index 9c4f62289ce1..c113a183a679 100644 --- a/test/files/run/icode-reader-dead-code.scala +++ b/test/files/run/icode-reader-dead-code.scala @@ -7,7 +7,7 @@ import scala.tools.partest.DirectTest import scala.collection.JavaConverters._ /** - * Test that the ICodeReader does not crash if the bytecode of a method has unreachable code. + * Test that ClassReader does not crash if the bytecode of a method has unreachable code. */ object Test extends DirectTest { def code: String = ??? From f6c698f382c5dfc345a7755146e52dc42cdb0a63 Mon Sep 17 00:00:00 2001 From: Seth Tisue Date: Mon, 22 Feb 2021 15:27:54 -0800 Subject: [PATCH 008/769] 2.13.6 is next; re-STARR on 2.13.5 --- build.sbt | 2 +- project/MimaFilters.scala | 14 +------------- versions.properties | 2 +- 3 files changed, 3 insertions(+), 15 deletions(-) diff --git a/build.sbt b/build.sbt index 2a50ba4111d6..a02c8ab62674 100644 --- a/build.sbt +++ b/build.sbt @@ -70,7 +70,7 @@ lazy val publishSettings : Seq[Setting[_]] = Seq( // should not be set directly. It is the same as the Maven version and derived automatically from `baseVersion` and // `baseVersionSuffix`. globalVersionSettings -Global / baseVersion := "2.13.5" +Global / baseVersion := "2.13.6" Global / baseVersionSuffix := "SNAPSHOT" ThisBuild / organization := "org.scala-lang" ThisBuild / homepage := Some(url("https://www.scala-lang.org")) diff --git a/project/MimaFilters.scala b/project/MimaFilters.scala index b6e61976b735..d0313ad8a3ff 100644 --- a/project/MimaFilters.scala +++ b/project/MimaFilters.scala @@ -13,7 +13,7 @@ object MimaFilters extends AutoPlugin { import autoImport._ override val globalSettings = Seq( - mimaReferenceVersion := Some("2.13.4"), + mimaReferenceVersion := Some("2.13.5"), ) val mimaFilters: Seq[ProblemFilter] = Seq[ProblemFilter]( @@ -25,18 +25,6 @@ object MimaFilters extends AutoPlugin { // don't publish the artifact built with JDK 11 anyways ProblemFilters.exclude[DirectMissingMethodProblem]("scala.collection.convert.JavaCollectionWrappers#IteratorWrapper.asIterator"), - // #9425 Node is private[collection] - ProblemFilters.exclude[DirectMissingMethodProblem]("scala.collection.mutable.HashMap#Node.foreachEntry"), - - // #9487 - ProblemFilters.exclude[MissingClassProblem]("scala.reflect.ClassTag$cache$"), - - ProblemFilters.exclude[DirectMissingMethodProblem]("scala.collection.immutable.RedBlackTree#Tree.redWithRight"), - ProblemFilters.exclude[DirectMissingMethodProblem]("scala.collection.immutable.RedBlackTree#Tree.redWithLeftRight"), - ProblemFilters.exclude[DirectMissingMethodProblem]("scala.collection.immutable.RedBlackTree#Tree.blackWithLeftRight"), - ProblemFilters.exclude[DirectMissingMethodProblem]("scala.collection.immutable.RedBlackTree#Tree.redWithLeft"), - ProblemFilters.exclude[DirectMissingMethodProblem]("scala.collection.immutable.RedBlackTree.partitionKeys"), - ProblemFilters.exclude[DirectMissingMethodProblem]("scala.collection.immutable.RedBlackTree.filterKeys"), ) override val buildSettings = Seq( diff --git a/versions.properties b/versions.properties index 7ece1c784286..e9902399194f 100644 --- a/versions.properties +++ b/versions.properties @@ -1,5 +1,5 @@ # Scala version used for bootstrapping (see README.md) -starr.version=2.13.4 +starr.version=2.13.5 # These are the versions of the modules that go with this release. # Artifact dependencies: From 842c249b2185bb84e83274c14570e6334bb8c9ba Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Tue, 23 Feb 2021 09:24:09 -0800 Subject: [PATCH 009/769] Use -s option to decode junit test names --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index a02c8ab62674..0be87b0f2c5e 100644 --- a/build.sbt +++ b/build.sbt @@ -710,7 +710,7 @@ lazy val junit = project.in(file("test") / "junit") ), Compile / javacOptions ++= Seq("-Xlint"), libraryDependencies ++= Seq(junitInterfaceDep, jolDep, diffUtilsDep), - testOptions += Tests.Argument(TestFrameworks.JUnit, "-a", "-v"), + testOptions += Tests.Argument(TestFrameworks.JUnit, "-a", "-v", "-s"), Compile / unmanagedSourceDirectories := Nil, Test / unmanagedSourceDirectories := List(baseDirectory.value) ) From a2e187e5b719d613c19ae011137b6b00014fef39 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E6=A2=A6=E5=A2=83=E8=BF=B7=E7=A6=BB?= Date: Thu, 25 Feb 2021 11:43:53 +0800 Subject: [PATCH 010/769] Fixes scala/bug#12264 --- .../tools/nsc/interpreter/jline/Reader.scala | 18 +++++----- .../nsc/interpreter/shell/Completion.scala | 6 ++-- .../tools/nsc/interpreter/shell/ILoop.scala | 12 +++---- .../nsc/interpreter/shell/LoopCommands.scala | 4 +-- .../interpreter/shell/ReplCompletion.scala | 16 ++++----- .../nsc/interpreter/CompletionTest.scala | 36 +++++++++++++++++-- 6 files changed, 61 insertions(+), 31 deletions(-) diff --git a/src/repl-frontend/scala/tools/nsc/interpreter/jline/Reader.scala b/src/repl-frontend/scala/tools/nsc/interpreter/jline/Reader.scala index ce881c849895..d03cb7c83de8 100644 --- a/src/repl-frontend/scala/tools/nsc/interpreter/jline/Reader.scala +++ b/src/repl-frontend/scala/tools/nsc/interpreter/jline/Reader.scala @@ -13,14 +13,13 @@ package scala.tools.nsc.interpreter package jline -import java.util.{List => JList} - -import org.jline.reader.{Candidate, Completer, CompletingParsedLine, EOFError, EndOfFileException, History, LineReader, ParsedLine, Parser, SyntaxError, UserInterruptException} +import org.jline.reader.Parser.ParseContext import org.jline.reader.impl.{DefaultParser, LineReaderImpl} +import org.jline.reader._ import org.jline.terminal.Terminal -import shell.{Accumulator, ShellConfig} -import Parser.ParseContext +import java.util.{List => JList} +import scala.tools.nsc.interpreter.shell.{Accumulator, ShellConfig} /** A Reader that delegates to JLine3. */ @@ -109,7 +108,8 @@ object Reader { } } def backupHistory(): Unit = { - import java.nio.file.{Files, Paths, StandardCopyOption}, StandardCopyOption.REPLACE_EXISTING + import java.nio.file.{Files, Paths, StandardCopyOption} + import StandardCopyOption.REPLACE_EXISTING val hf = Paths.get(config.historyFile) val bk = Paths.get(config.historyFile + ".bk") Files.move(/*source =*/ hf, /*target =*/ bk, REPLACE_EXISTING) @@ -229,8 +229,8 @@ class Completion(delegate: shell.Completion) extends shell.Completion with Compl // JLine Completer def complete(lineReader: LineReader, parsedLine: ParsedLine, newCandidates: JList[Candidate]): Unit = { - def candidateForResult(cc: CompletionCandidate): Candidate = { - val value = cc.defString + def candidateForResult(line: String, cc: CompletionCandidate): Candidate = { + val value = if (line.startsWith(":")) ":" + cc.defString else cc.defString val displayed = cc.defString + (cc.arity match { case CompletionCandidate.Nullary => "" case CompletionCandidate.Nilary => "()" @@ -263,7 +263,7 @@ class Completion(delegate: shell.Completion) extends shell.Completion with Compl // normal completion case _ => for (cc <- result.candidates) - newCandidates.add(candidateForResult(cc)) + newCandidates.add(candidateForResult(result.line, cc)) } } } diff --git a/src/repl-frontend/scala/tools/nsc/interpreter/shell/Completion.scala b/src/repl-frontend/scala/tools/nsc/interpreter/shell/Completion.scala index 6e5585d86596..17f8c72eb57e 100644 --- a/src/repl-frontend/scala/tools/nsc/interpreter/shell/Completion.scala +++ b/src/repl-frontend/scala/tools/nsc/interpreter/shell/Completion.scala @@ -20,16 +20,16 @@ object NoCompletion extends Completion { def complete(buffer: String, cursor: Int) = NoCompletions } -case class CompletionResult(cursor: Int, candidates: List[CompletionCandidate]) { +case class CompletionResult(line: String, cursor: Int, candidates: List[CompletionCandidate]) { final def orElse(other: => CompletionResult): CompletionResult = if (candidates.nonEmpty) this else other } object CompletionResult { val empty: CompletionResult = NoCompletions } -object NoCompletions extends CompletionResult(-1, Nil) +object NoCompletions extends CompletionResult("", -1, Nil) case class MultiCompletion(underlying: Completion*) extends Completion { override def complete(buffer: String, cursor: Int) = - underlying.foldLeft(CompletionResult.empty)((r,c) => r.orElse(c.complete(buffer, cursor))) + underlying.foldLeft(CompletionResult.empty)((r, c) => r.orElse(c.complete(buffer, cursor))) } diff --git a/src/repl-frontend/scala/tools/nsc/interpreter/shell/ILoop.scala b/src/repl-frontend/scala/tools/nsc/interpreter/shell/ILoop.scala index b925c595a819..202e36b25450 100644 --- a/src/repl-frontend/scala/tools/nsc/interpreter/shell/ILoop.scala +++ b/src/repl-frontend/scala/tools/nsc/interpreter/shell/ILoop.scala @@ -223,15 +223,15 @@ class ILoop(config: ShellConfig, inOverride: BufferedReader = null, val emptyWord = """(\s+)$""".r.unanchored val directorily = """(\S*/)$""".r.unanchored val trailingWord = """(\S+)$""".r.unanchored - def listed(i: Int, dir: Option[Path]) = + def listed(buffer: String, i: Int, dir: Option[Path]) = dir.filter(_.isDirectory) - .map(d => CompletionResult(i, d.toDirectory.list.map(x => CompletionCandidate(x.name)).toList)) + .map(d => CompletionResult(buffer, i, d.toDirectory.list.map(x => CompletionCandidate(x.name)).toList)) .getOrElse(NoCompletions) def listedIn(dir: Directory, name: String) = dir.list.filter(_.name.startsWith(name)).map(_.name).toList def complete(buffer: String, cursor: Int): CompletionResult = buffer.substring(0, cursor) match { - case emptyWord(s) => listed(cursor, Directory.Current) - case directorily(s) => listed(cursor, Option(Path(s))) + case emptyWord(s) => listed(buffer, cursor, Directory.Current) + case directorily(s) => listed(buffer, cursor, Option(Path(s))) case trailingWord(s) => val f = File(s) val (i, maybes) = @@ -239,7 +239,7 @@ class ILoop(config: ShellConfig, inOverride: BufferedReader = null, else if (f.isDirectory) (cursor - s.length, List(s"${f.toAbsolute.path}/")) else if (f.parent.exists) (cursor - f.name.length, listedIn(f.parent.toDirectory, f.name)) else (-1, Nil) - if (maybes.isEmpty) NoCompletions else CompletionResult(i, maybes.map(CompletionCandidate(_))) + if (maybes.isEmpty) NoCompletions else CompletionResult(buffer, i, maybes.map(CompletionCandidate(_))) case _ => NoCompletions } } @@ -253,7 +253,7 @@ class ILoop(config: ShellConfig, inOverride: BufferedReader = null, val maybes = intp.visibleSettings.filter(_.name.startsWith(s)).map(_.name) .filterNot(cond(_) { case "-"|"-X"|"-Y" => true }).sorted if (maybes.isEmpty) NoCompletions - else CompletionResult(cursor - s.length, maybes.map(CompletionCandidate(_))) + else CompletionResult(buffer, cursor - s.length, maybes.map(CompletionCandidate(_))) case _ => NoCompletions } } diff --git a/src/repl-frontend/scala/tools/nsc/interpreter/shell/LoopCommands.scala b/src/repl-frontend/scala/tools/nsc/interpreter/shell/LoopCommands.scala index b2540e4817bd..1063971b5f2b 100644 --- a/src/repl-frontend/scala/tools/nsc/interpreter/shell/LoopCommands.scala +++ b/src/repl-frontend/scala/tools/nsc/interpreter/shell/LoopCommands.scala @@ -138,12 +138,12 @@ trait LoopCommands { val completion = if (cmd.isInstanceOf[NullaryCmd] || cursor < line.length) cmd.name else cmd.name + " " new Completion { def complete(buffer: String, cursor: Int) = - CompletionResult(cursor = 1, List(CompletionCandidate(completion))) + CompletionResult(buffer, cursor = 1, List(CompletionCandidate(completion))) } case cmd :: rest => new Completion { def complete(buffer: String, cursor: Int) = - CompletionResult(cursor = 1, cmds.map(cmd => CompletionCandidate(cmd.name))) + CompletionResult(buffer, cursor = 1, cmds.map(cmd => CompletionCandidate(cmd.name))) } } case _ => NoCompletion diff --git a/src/repl-frontend/scala/tools/nsc/interpreter/shell/ReplCompletion.scala b/src/repl-frontend/scala/tools/nsc/interpreter/shell/ReplCompletion.scala index 8d0959e833c1..3baa8d1a66e0 100644 --- a/src/repl-frontend/scala/tools/nsc/interpreter/shell/ReplCompletion.scala +++ b/src/repl-frontend/scala/tools/nsc/interpreter/shell/ReplCompletion.scala @@ -14,8 +14,6 @@ package scala.tools.nsc.interpreter package shell import scala.util.control.NonFatal -import scala.tools.nsc.interpreter.Repl -import scala.tools.nsc.interpreter.Naming /** Completion for the REPL. */ @@ -50,17 +48,17 @@ class ReplCompletion(intp: Repl, val accumulator: Accumulator = new Accumulator) case Left(_) => NoCompletions case Right(result) => try { buf match { - case slashPrint() if cursor == buf.length => - CompletionResult(cursor, CompletionCandidate.fromStrings("" :: Naming.unmangle(result.print) :: Nil)) - case slashPrintRaw() if cursor == buf.length => - CompletionResult(cursor, CompletionCandidate.fromStrings("" :: result.print :: Nil)) + case slashPrint() if cursor == buf.length => + CompletionResult(buf, cursor, CompletionCandidate.fromStrings("" :: Naming.unmangle(result.print) :: Nil)) + case slashPrintRaw() if cursor == buf.length => + CompletionResult(buf, cursor, CompletionCandidate.fromStrings("" :: result.print :: Nil)) case slashTypeAt(start, end) if cursor == buf.length => - CompletionResult(cursor, CompletionCandidate.fromStrings("" :: result.typeAt(start.toInt, end.toInt) :: Nil)) - case _ => + CompletionResult(buf, cursor, CompletionCandidate.fromStrings("" :: result.typeAt(start.toInt, end.toInt) :: Nil)) + case _ => // under JLine 3, we no longer use the tabCount concept, so tabCount is always 1 // which always gives us all completions val (c, r) = result.completionCandidates(tabCount = 1) - CompletionResult(c, r) + CompletionResult(buf, c, r) } } finally result.cleanup() } diff --git a/test/junit/scala/tools/nsc/interpreter/CompletionTest.scala b/test/junit/scala/tools/nsc/interpreter/CompletionTest.scala index b1938a55a553..0ce5a40ab4f8 100644 --- a/test/junit/scala/tools/nsc/interpreter/CompletionTest.scala +++ b/test/junit/scala/tools/nsc/interpreter/CompletionTest.scala @@ -1,10 +1,9 @@ package scala.tools.nsc.interpreter -import java.io.{PrintWriter, StringWriter} - import org.junit.Assert.{assertEquals, assertTrue} import org.junit.Test +import java.io.{PrintWriter, StringWriter} import scala.reflect.internal.util.{BatchSourceFile, SourceFile} import scala.tools.nsc.Settings import scala.tools.nsc.interpreter.shell._ @@ -36,6 +35,28 @@ class CompletionTest { (completer, intp, acc) } + private def commandInterpretLines(): (Completion, Repl, Accumulator) = { + val intp = newIMain() + class CommandMock extends LoopCommands { + override protected def echo(msg: String): Unit = ??? + override protected def out: PrintWriter = ??? + override def commands: List[LoopCommand] = { + val default = (string: String) => Result.default + List( + LoopCommand.cmd("paste", "[-raw] [path]", "enter paste mode or paste a file", default), + LoopCommand.cmd("paste", "[-raw] [path]", "enter paste mode or paste a file", default)// Other commands + ) + } + } + val acc = new Accumulator + val shellCompletion = new Completion { + override def complete(buffer: String, cursor: Int) = + if (buffer.startsWith(":")) new CommandMock().colonCompletion(buffer, cursor).complete(buffer, cursor) + else NoCompletions + } + (shellCompletion, intp, acc) + } + implicit class BeforeAfterCompletion(completion: Completion) { def complete(before: String, after: String = ""): CompletionResult = completion.complete(before + after, before.length) @@ -231,6 +252,17 @@ class CompletionTest { assertTrue(candidates2.last.defString.contains("deprecated")) } + @Test + def jline3Matcher(): Unit = { + val (completer, _, _) = commandInterpretLines() + val candidates1 = completer.complete(":p").candidates + assertEquals(2, candidates1.size) + + // Save the line to the CompletionResult of the matcher, and select the command to match successfully. + val completionResult = completer.complete(":p") + assertEquals(completionResult.line, ":p") + } + @Test def isNotDeprecated(): Unit = { val (completer, _, _) = interpretLines( From 7f7bcd58dce5f8001ae814b61b4ef60dc89d21ea Mon Sep 17 00:00:00 2001 From: Dale Wijnand Date: Wed, 22 Jul 2020 13:36:10 +0100 Subject: [PATCH 011/769] Remove trailing references/infra for virtpartmat --- .../tools/nsc/transform/ExplicitOuter.scala | 2 +- .../tools/nsc/transform/patmat/Logic.scala | 2 +- .../nsc/transform/patmat/MatchCodeGen.scala | 12 ++-- .../transform/patmat/MatchOptimization.scala | 2 +- .../transform/patmat/MatchTranslation.scala | 26 +++------ .../transform/patmat/MatchTreeMaking.scala | 5 +- .../transform/patmat/PatternMatching.scala | 55 ------------------- .../tools/nsc/typechecker/Duplicators.scala | 4 +- .../scala/tools/nsc/typechecker/Typers.scala | 5 +- .../interactive/tests/core/CoreTestDefs.scala | 6 +- .../scala/reflect/internal/StdNames.scala | 2 +- test/files/presentation/t8941/Runner.scala | 10 +--- test/files/run/t3835.scala | 3 - 13 files changed, 25 insertions(+), 109 deletions(-) diff --git a/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala b/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala index 8be86de53038..bb2778927539 100644 --- a/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala +++ b/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala @@ -465,7 +465,7 @@ abstract class ExplicitOuter extends InfoTransform }) super.transform(treeCopy.Apply(tree, sel, outerVal :: args)) - // for the new pattern matcher + // for the pattern matcher // base..eq(o) --> base.$outer().eq(o) if there's an accessor, else the whole tree becomes TRUE // TODO remove the synthetic `` method from outerFor?? case Apply(eqsel@Select(eqapp@Apply(sel@Select(base, nme.OUTER_SYNTH), Nil), eq), args) => diff --git a/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala b/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala index a575a4c933e7..ba149513d267 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala @@ -858,7 +858,7 @@ trait ScalaLogic extends Interface with Logic with TreeAndTypeAnalysis { else ConstantType(c) case Ident(_) if p.symbol.isStable => // for Idents, can encode uniqueness of symbol as uniqueness of the corresponding singleton type - // for Selects, which are handled by the next case, the prefix of the select varies independently of the symbol (see pos/virtpatmat_unreach_select.scala) + // for Selects, which are handled by the next case, the prefix of the select varies independently of the symbol (see neg/virtpatmat_unreach_select.scala) singleType(tp.prefix, p.symbol) case _ => Const.uniqueTpForTree(p) diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchCodeGen.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchCodeGen.scala index 46d4b4784cc8..13351a89b7db 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/MatchCodeGen.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchCodeGen.scala @@ -14,11 +14,7 @@ package scala.tools.nsc.transform.patmat import scala.tools.nsc.symtab.Flags.SYNTHETIC -/** Factory methods used by TreeMakers to make the actual trees. - * - * We have two modes in which to emit trees: optimized (the default) - * and pure (aka "virtualized": match is parametric in its monad). - */ +/** Factory methods used by TreeMakers to make the actual trees. */ trait MatchCodeGen extends Interface { import global._ @@ -27,7 +23,7 @@ trait MatchCodeGen extends Interface { /////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// trait CodegenCore extends MatchMonadInterface { private var ctr = 0 - def freshName(prefix: String) = {ctr += 1; vpmName.counted(prefix, ctr)} + def freshName(prefix: String) = { ctr += 1; newTermName(s"$prefix$ctr") } // assert(owner ne null); assert(owner ne NoSymbol) def freshSym(pos: Position, tp: Type = NoType, prefix: String = "x") = @@ -164,8 +160,8 @@ trait MatchCodeGen extends Interface { ValDef(prevSym, prev), // must be isEmpty and get as we don't control the target of the call (prev is an extractor call) ifThenElseZero( - NOT(prevSym DOT vpmName.isEmpty), - Substitution(b, prevSym DOT vpmName.get)(next) + NOT(prevSym DOT nme.isEmpty), + Substitution(b, prevSym DOT nme.get)(next) ) ) } diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchOptimization.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchOptimization.scala index f94b457ce70f..615dbe26cf20 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/MatchOptimization.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchOptimization.scala @@ -160,7 +160,7 @@ trait MatchOptimization extends MatchTreeMaking with MatchAnalysis { } // TODO: finer-grained duplication - def chainBefore(next: Tree)(casegen: Casegen): Tree = // assert(codegen eq optimizedCodegen) + def chainBefore(next: Tree)(casegen: Casegen): Tree = atPos(pos)(casegen.flatMapCondStored(cond, storedCond, res, nextBinder, substitution(next).duplicate)) override def toString = "Memo"+((nextBinder.name, storedCond.name, cond, res, substitution)) diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala index 64cde9496d45..280ff053e26f 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala @@ -187,15 +187,9 @@ trait MatchTranslation { override def toString = if (subpatterns.isEmpty) "" else subpatterns.mkString("(", ", ", ")") } - /** Implement a pattern match by turning its cases (including the implicit failure case) - * into the corresponding (monadic) extractors, and combining them with the `orElse` combinator. - * - * For `scrutinee match { case1 ... caseN }`, the resulting tree has the shape - * `runOrElse(scrutinee)(x => translateCase1(x).orElse(translateCase2(x)).....orElse(zero))` - * - * NOTE: the resulting tree is not type checked, nor are nested pattern matches transformed + /** NOTE: the resulting tree is not type checked, nor are nested pattern matches transformed * thus, you must typecheck the result (and that will in turn translate nested matches) - * this could probably optimized... (but note that the matchStrategy must be solved for each nested patternmatch) + * this could probably be optimized... */ def translateMatch(match_ : Match): Tree = { val Match(selector, cases) = match_ @@ -226,7 +220,7 @@ trait MatchTranslation { val pt = repeatedToSeq(origPt) // val packedPt = repeatedToSeq(typer.packedType(match_, context.owner)) - val selectorSym = freshSym(selector.pos, pureType(selectorTp)) setFlag treeInfo.SYNTH_CASE_FLAGS + val selectorSym = freshSym(selector.pos, selectorTp) setFlag treeInfo.SYNTH_CASE_FLAGS // pt = Any* occurs when compiling test/files/pos/annotDepMethType.scala val combined = combineCases(selector, selectorSym, nonSyntheticCases map translateCase(selectorSym, pt), pt, selectorPos, matchOwner, defaultOverride) @@ -255,7 +249,7 @@ trait MatchTranslation { val bindersAndCases = caseDefs.map(_.duplicate) map { caseDef => // generate a fresh symbol for each case, hoping we'll end up emitting a type-switch (we don't have a global scrut there) // if we fail to emit a fine-grained switch, have to do translateCase again with a single scrutSym (TODO: uniformize substitution on treemakers so we can avoid this) - val caseScrutSym = freshSym(caseDef.pat.pos, pureType(ThrowableTpe)) + val caseScrutSym = freshSym(caseDef.pat.pos, ThrowableTpe) (caseScrutSym, propagateSubstitution(translateCase(caseScrutSym, pt)(caseDef), EmptySubstitution)) } @@ -265,10 +259,10 @@ trait MatchTranslation { } val catches = if (swatches.nonEmpty) swatches else { - val scrutSym = freshSym(caseDefs.head.pat.pos, pureType(ThrowableTpe)) + val scrutSym = freshSym(caseDefs.head.pat.pos, ThrowableTpe) val casesNoSubstOnly = caseDefs map { caseDef => (propagateSubstitution(translateCase(scrutSym, pt)(caseDef), EmptySubstitution))} - val exSym = freshSym(pos, pureType(ThrowableTpe), "ex") + val exSym = freshSym(pos, ThrowableTpe, "ex") val suppression = if (settings.XnoPatmatAnalysis) Suppression.FullSuppression else Suppression.NoSuppression.copy(suppressExhaustive = true) // try/catches needn't be exhaustive @@ -325,11 +319,7 @@ trait MatchTranslation { if (guard == EmptyTree) Nil else List(GuardTreeMaker(guard)) - // TODO: 1) if we want to support a generalisation of Kotlin's patmat continue, must not hard-wire lifting into the monad (which is now done by codegen.one), - // so that user can generate failure when needed -- use implicit conversion to lift into monad on-demand? - // to enable this, probably need to move away from Option to a monad specific to pattern-match, - // so that we can return Option's from a match without ambiguity whether this indicates failure in the monad, or just some result in the monad - // 2) body.tpe is the type of the body after applying the substitution that represents the solution of GADT type inference + // TODO: body.tpe is the type of the body after applying the substitution that represents the solution of GADT type inference // need the explicit cast in case our substitutions in the body change the type to something that doesn't take GADT typing into account def translateBody(body: Tree, matchPt: Type): TreeMaker = BodyTreeMaker(body, matchPt) @@ -554,7 +544,7 @@ trait MatchTranslation { // can't simplify this when subPatBinders.isEmpty, since UnitTpe is definitely // wrong when isSeq, and resultInMonad should always be correct since it comes // directly from the extractor's result type - val binder = freshSym(pos, pureType(resultInMonad(patBinderOrCasted))) + val binder = freshSym(pos, resultInMonad(patBinderOrCasted)) val potentiallyMutableBinders: Set[Symbol] = if (extractorApply.tpe.typeSymbol.isNonBottomSubClass(OptionClass) && !isSeq) Set.empty diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala index aa6412d55883..13816c09fffd 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala @@ -19,8 +19,7 @@ import scala.tools.nsc.Reporting.WarningCategory /** Translate our IR (TreeMakers) into actual Scala Trees using the factory methods in MatchCodeGen. * - * The IR is mostly concerned with sequencing, substitution, and rendering all necessary conditions, - * mostly agnostic to whether we're in optimized/pure (virtualized) mode. + * The IR is mostly concerned with sequencing, substitution, and rendering all necessary conditions. */ trait MatchTreeMaking extends MatchCodeGen with Debugging { import global._ @@ -375,7 +374,7 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging { // by `Select(q, outerAccessor(outerSym.owner)) OBJ_EQ expectedPrefix` // if there's an outer accessor, otherwise the condition becomes `true` // TODO: centralize logic whether there's an outer accessor and use here? - val synthOuterGetter = expectedTp.typeSymbol.newMethod(vpmName.outer, newFlags = SYNTHETIC | ARTIFACT) setInfo expectedPrefix + val synthOuterGetter = expectedTp.typeSymbol.newMethod(nme.OUTER_SYNTH, newFlags = SYNTHETIC | ARTIFACT) setInfo expectedPrefix val outerTest = (Select(codegen._asInstanceOf(testedBinder, expectedTp), synthOuterGetter)) OBJ_EQ expectedOuterRef and(orig, outerTest) } diff --git a/src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala b/src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala index 8e013493a483..b6a65a2e2aa7 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala @@ -130,64 +130,9 @@ trait Interface extends ast.TreeDSL { protected final def mkFALSE = CODE.FALSE protected final def hasStableSymbol(p: Tree) = p.hasSymbolField && p.symbol.isStable - object vpmName { - val one = newTermName("one") - val flatMap = newTermName("flatMap") - val get = newTermName("get") - val guard = newTermName("guard") - val isEmpty = newTermName("isEmpty") - val orElse = newTermName("orElse") - val outer = newTermName("") - val runOrElse = newTermName("runOrElse") - val zero = newTermName("zero") - val _match = newTermName("__match") // don't call the val __match, since that will trigger virtual pattern matching... - - def counted(str: String, i: Int) = newTermName(str + i) - } - -/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// -// talking to userland -/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// - - /** Interface with user-defined match monad? - * if there's a __match in scope, we use this as the match strategy, assuming it conforms to MatchStrategy as defined below: - - {{{ - type Matcher[P[_], M[+_], A] = { - def flatMap[B](f: P[A] => M[B]): M[B] - def orElse[B >: A](alternative: => M[B]): M[B] - } - - abstract class MatchStrategy[P[_], M[+_]] { - // runs the matcher on the given input - def runOrElse[T, U](in: P[T])(matcher: P[T] => M[U]): P[U] - - def zero: M[Nothing] - def one[T](x: P[T]): M[T] - def guard[T](cond: P[Boolean], then: => P[T]): M[T] - } - }}} - - * P and M are derived from one's signature (`def one[T](x: P[T]): M[T]`) - - - * if no __match is found, we assume the following implementation (and generate optimized code accordingly) - - {{{ - object __match extends MatchStrategy[({type Id[x] = x})#Id, Option] { - def zero = None - def one[T](x: T) = Some(x) - // NOTE: guard's return type must be of the shape M[T], where M is the monad in which the pattern match should be interpreted - def guard[T](cond: Boolean, then: => T): Option[T] = if(cond) Some(then) else None - def runOrElse[T, U](x: T)(f: T => Option[U]): U = f(x) getOrElse (throw new MatchError(x)) - } - }}} - - */ trait MatchMonadInterface { val typer: Typer val matchOwner = typer.context.owner - def pureType(tp: Type): Type = tp def reportUnreachable(pos: Position) = typer.context.warning(pos, "unreachable code", WarningCategory.OtherMatchAnalysis) def reportMissingCases(pos: Position, counterExamples: List[String]) = { diff --git a/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala b/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala index 396cec2e53cc..4d18d7b86953 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala @@ -83,8 +83,8 @@ abstract class Duplicators extends Analyzer { val sym1 = ( context.scope lookup sym.name orElse { // try harder (look in outer scopes) - // with virtpatmat, this can happen when the sym is referenced in the scope of a LabelDef but - // is defined in the scope of an outer DefDef (e.g., in AbstractPartialFunction's andThen) + // with virtpatmat, this could happen when the sym was referenced in the scope of a LabelDef but + // was defined in the scope of an outer DefDef (e.g., in AbstractPartialFunction's andThen) BodyDuplicator.super.silent(_ typedType Ident(sym.name)).fold(NoSymbol: Symbol)(_.symbol) } filter (_ ne sym) ) diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index 11854d63a87a..b7bd6589062d 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -4718,7 +4718,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper // in principle we should pack the types of each branch before lubbing, but lub doesn't really work for existentials anyway // in the special (though common) case where the types are equal, it pays to pack before comparing - // especially virtpatmat needs more aggressive unification of skolemized types + // especially virtpatmat needed more aggressive unification of skolemized types // this breaks src/library/scala/collection/immutable/TrieIterator.scala (which as of 2.13 doesn't actually exist anymore) // annotated types need to be lubbed regardless (at least, continuations break if you bypass them like this) def samePackedTypes = ( @@ -4743,8 +4743,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper } } - // When there's a suitable __match in scope, virtualize the pattern match - // otherwise, type the Match and leave it until phase `patmat` (immediately after typer) + // Type the Match and leave it until phase `patmat` // empty-selector matches are transformed into synthetic PartialFunction implementations when the expected type demands it def typedVirtualizedMatch(tree: Match): Tree = { val selector = tree.selector diff --git a/src/interactive/scala/tools/nsc/interactive/tests/core/CoreTestDefs.scala b/src/interactive/scala/tools/nsc/interactive/tests/core/CoreTestDefs.scala index 41e4be51afcb..e1f201533148 100644 --- a/src/interactive/scala/tools/nsc/interactive/tests/core/CoreTestDefs.scala +++ b/src/interactive/scala/tools/nsc/interactive/tests/core/CoreTestDefs.scala @@ -113,10 +113,8 @@ private[tests] trait CoreTestDefs reporter.println("\naskHyperlinkPos for `" + tree.symbol.name + "` at " + format(pos) + " " + pos.source.file.name) val r = new Response[Position] val sourceFile = tree.symbol.sourceFile - // `tree.symbol.sourceFile` was discovered to be null when testing using virtpatmat on the akka presentation test, where a position had shifted to point to `Int` - // askHyperlinkPos for `Int` at (73,19) pi.scala --> class Int in package scala has null sourceFile! - val treePath = if (sourceFile ne null) sourceFile.path else null - val treeName = if (sourceFile ne null) sourceFile.name else null + val treePath = sourceFile.path + val treeName = sourceFile.name sourceFiles.find(_.path == treePath) match { case Some(source) => diff --git a/src/reflect/scala/reflect/internal/StdNames.scala b/src/reflect/scala/reflect/internal/StdNames.scala index 44b945dbfa2c..0c550505f360 100644 --- a/src/reflect/scala/reflect/internal/StdNames.scala +++ b/src/reflect/scala/reflect/internal/StdNames.scala @@ -391,7 +391,7 @@ trait StdNames { val OUTER: NameType = nameType("$outer") val OUTER_LOCAL: NameType = OUTER.localName val OUTER_ARG: NameType = nameType("arg" + OUTER) - val OUTER_SYNTH: NameType = nameType("") // emitted by virtual pattern matcher, replaced by outer accessor in explicitouter + val OUTER_SYNTH: NameType = nameType("") // emitted by pattern matcher, replaced by outer accessor in explicitouter val ROOTPKG: NameType = nameType("_root_") val SELECTOR_DUMMY: NameType = nameType("") val SELF: NameType = nameType(s"$$this") diff --git a/test/files/presentation/t8941/Runner.scala b/test/files/presentation/t8941/Runner.scala index 6401a830a22a..14a6aa835064 100644 --- a/test/files/presentation/t8941/Runner.scala +++ b/test/files/presentation/t8941/Runner.scala @@ -1,11 +1,3 @@ import scala.tools.nsc.interactive.tests.InteractiveTest -object Test extends InteractiveTest { - override def runDefaultTests(): Unit = { - // make sure typer is done.. the virtual pattern matcher might translate - // some trees and mess up positions. But we'll catch it red handed! - // sourceFiles foreach (src => askLoadedTyped(src).get) - super.runDefaultTests() - } - -} +object Test extends InteractiveTest diff --git a/test/files/run/t3835.scala b/test/files/run/t3835.scala index 0ee60da845cf..c84c5bb67140 100644 --- a/test/files/run/t3835.scala +++ b/test/files/run/t3835.scala @@ -1,7 +1,4 @@ object Test extends App { - // work around optimizer bug scala/bug#5672 -- generates wrong bytecode for switches in arguments - // virtpatmat happily emits a switch for a one-case switch - // this is not the focus of this test, hence the temporary workaround def a = (1, 2, 3) match { case (r, θ, φ) => r + θ + φ } println(a) def b = (1 match { case é => é }) From 2218acd884133c9fa65267df5fca3c77cabc06c4 Mon Sep 17 00:00:00 2001 From: Dale Wijnand Date: Wed, 22 Jul 2020 13:36:10 +0100 Subject: [PATCH 012/769] Remove some "2.10/2.11 compatibility" in patmat I think that was to help copy code between the two code branches? --- .../tools/nsc/transform/patmat/Logic.scala | 2 +- .../nsc/transform/patmat/MatchCodeGen.scala | 2 +- .../transform/patmat/MatchOptimization.scala | 4 ++-- .../nsc/transform/patmat/MatchTreeMaking.scala | 18 ++++++++---------- .../nsc/transform/patmat/PatternMatching.scala | 6 ------ 5 files changed, 12 insertions(+), 20 deletions(-) diff --git a/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala b/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala index ba149513d267..dc28377af3e0 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala @@ -865,7 +865,7 @@ trait ScalaLogic extends Interface with Logic with TreeAndTypeAnalysis { } val toString = - if (hasStableSymbol(p)) p.symbol.name.toString // tp.toString + if (p.hasSymbolField && p.symbol.isStable) p.symbol.name.toString // tp.toString else p.toString //+"#"+ id Const.unique(narrowTp, new ValueConst(narrowTp, checkableType(wideTp), toString)) // must make wide type checkable so that it is comparable to types from TypeConst diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchCodeGen.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchCodeGen.scala index 13351a89b7db..6d87a6bb1e74 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/MatchCodeGen.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchCodeGen.scala @@ -189,7 +189,7 @@ trait MatchCodeGen extends Interface { def flatMapCondStored(cond: Tree, condSym: Symbol, res: Tree, nextBinder: Symbol, next: Tree): Tree = ifThenElseZero(cond, BLOCK( - condSym === mkTRUE, + condSym === TRUE, nextBinder === res, next )) diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchOptimization.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchOptimization.scala index 615dbe26cf20..5fb7570c1553 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/MatchOptimization.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchOptimization.scala @@ -527,7 +527,7 @@ trait MatchOptimization extends MatchTreeMaking with MatchAnalysis { case _ => None }} - def scrutRef(scrut: Symbol): Tree = dealiasWiden(scrut.tpe) match { + def scrutRef(scrut: Symbol): Tree = scrut.tpe.dealiasWiden match { case subInt if subInt =:= IntTpe => REF(scrut) case subInt if definitions.isNumericSubClass(subInt.typeSymbol, IntClass) => @@ -557,7 +557,7 @@ trait MatchOptimization extends MatchTreeMaking with MatchAnalysis { override def emitSwitch(scrut: Tree, scrutSym: Symbol, cases: List[List[TreeMaker]], pt: Type, matchFailGenOverride: Option[Tree => Tree], unchecked: Boolean): Option[Tree] = { import CODE._ val regularSwitchMaker = new RegularSwitchMaker(scrutSym, matchFailGenOverride, unchecked) // TODO: if patterns allow switch but the type of the scrutinee doesn't, cast (type-test) the scrutinee to the corresponding switchable type and switch on the result - if (regularSwitchMaker.switchableTpe(dealiasWiden(scrutSym.tpe))) { + if (regularSwitchMaker.switchableTpe(scrutSym.tpe.dealiasWiden)) { val caseDefsWithDefault = regularSwitchMaker(cases map {c => (scrutSym, c)}, pt) if (caseDefsWithDefault.isEmpty) None // not worth emitting a switch. else { diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala index 13816c09fffd..4c29b0920e0f 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala @@ -22,8 +22,7 @@ import scala.tools.nsc.Reporting.WarningCategory * The IR is mostly concerned with sequencing, substitution, and rendering all necessary conditions. */ trait MatchTreeMaking extends MatchCodeGen with Debugging { - import global._ - import definitions._ + import global._, definitions._, CODE._ final case class Suppression(suppressExhaustive: Boolean, suppressUnreachable: Boolean) object Suppression { @@ -209,7 +208,6 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging { prevBinder: Symbol, expectedTp: Type, override val pos: Position) extends FunTreeMaker { - import CODE._ override lazy val nextBinder = prevBinder.asTerm // just passing through val nextBinderTp = nextBinder.info.widen @@ -268,7 +266,7 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging { bindSubPats(substitution(next)) } atPos(extractor.pos)( - if (extractorReturnsBoolean) casegen.flatMapCond(extractor, CODE.UNIT, nextBinder, condAndNext) + if (extractorReturnsBoolean) casegen.flatMapCond(extractor, UNIT, nextBinder, condAndNext) else casegen.flatMap(extractor, nextBinder, condAndNext) ) } @@ -338,11 +336,11 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging { def tru: Result } - object treeCondStrategy extends TypeTestCondStrategy { import CODE._ + object treeCondStrategy extends TypeTestCondStrategy { type Result = Tree def and(a: Result, b: Result): Result = a AND b - def tru = mkTRUE + def tru = TRUE def typeTest(testedBinder: Symbol, expectedTp: Type) = codegen._isInstanceOf(testedBinder, expectedTp) def nonNullTest(testedBinder: Symbol) = REF(testedBinder) OBJ_NE NULL def equalsTest(pat: Tree, testedBinder: Symbol) = codegen._equals(pat, testedBinder) @@ -485,8 +483,8 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging { else mkEqualsTest(expected) // Should revisit if we end up lifting `eq`'s definition to `Any`, as discussed here: // https://groups.google.com/d/msg/scala-internals/jsVlJI4H5OQ/8emZWRmgzcoJ - case ThisType(sym) if sym.isModule => and(mkEqualsTest(CODE.REF(sym)), mkTypeTest) // must use == to support e.g. List() == Nil - case ConstantType(Constant(null)) if isAnyRef => mkEqTest(expTp(CODE.NULL)) + case ThisType(sym) if sym.isModule => and(mkEqualsTest(REF(sym)), mkTypeTest) // must use == to support e.g. List() == Nil + case ConstantType(Constant(null)) if isAnyRef => mkEqTest(expTp(NULL)) case ConstantType(const) => mkEqualsTest(expTp(Literal(const))) case ThisType(sym) => mkEqTest(expTp(This(sym))) case _ => mkDefault @@ -528,10 +526,10 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging { // one alternative may still generate multiple trees (e.g., an extractor call + equality test) // (for now,) alternatives may not bind variables (except wildcards), so we don't care about the final substitution built internally by makeTreeMakers val combinedAlts = altss map (altTreeMakers => - ((casegen: Casegen) => combineExtractors(altTreeMakers :+ TrivialTreeMaker(casegen.one(mkTRUE)))(casegen)) + ((casegen: Casegen) => combineExtractors(altTreeMakers :+ TrivialTreeMaker(casegen.one(TRUE)))(casegen)) ) - val findAltMatcher = codegenAlt.matcher(EmptyTree, NoSymbol, BooleanTpe)(combinedAlts, Some(x => mkFALSE)) + val findAltMatcher = codegenAlt.matcher(EmptyTree, NoSymbol, BooleanTpe)(combinedAlts, Some(x => FALSE)) codegenAlt.ifThenElseZero(findAltMatcher, substitution(next)) } } diff --git a/src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala b/src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala index b6a65a2e2aa7..37ead94815d3 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala @@ -124,12 +124,6 @@ trait Interface extends ast.TreeDSL { import global._ import analyzer.Typer - // 2.10/2.11 compatibility - protected final def dealiasWiden(tp: Type) = tp.dealiasWiden - protected final def mkTRUE = CODE.TRUE - protected final def mkFALSE = CODE.FALSE - protected final def hasStableSymbol(p: Tree) = p.hasSymbolField && p.symbol.isStable - trait MatchMonadInterface { val typer: Typer val matchOwner = typer.context.owner From 9a5730b79160cd3a60f85fa276ec128a4d025fa8 Mon Sep 17 00:00:00 2001 From: Dale Wijnand Date: Wed, 22 Jul 2020 13:36:10 +0100 Subject: [PATCH 013/769] Resolve or just remove some TODOs + dead code. * Reflect that MatchOptimization only relies on MatchApproximation now, not MatchAnalysis. * Remove commented DeadCodeElimination stub (& "doDCE" reference): it's unlikely we'll implement that in patmat * At this point I don't see the ROI on unifying SwitchEmission's logic with MatchApproximator, so remove the "operate on Tests" TODO * Remove the "only instantiate new match translator" TODO, as (IIRC) that is no longer as simple as it was at the time of the TODO (because of the added position parameter I think?) --- .../transform/patmat/MatchOptimization.scala | 25 +++---------------- .../transform/patmat/PatternMatching.scala | 3 --- 2 files changed, 3 insertions(+), 25 deletions(-) diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchOptimization.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchOptimization.scala index 5fb7570c1553..975b16ceead0 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/MatchOptimization.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchOptimization.scala @@ -22,8 +22,7 @@ import scala.tools.nsc.Reporting.WarningCategory * The patmat translation doesn't rely on this, so it could be disabled in principle. * - well, not quite: the backend crashes if we emit duplicates in switches (e.g. scala/bug#7290) */ -// TODO: split out match analysis -trait MatchOptimization extends MatchTreeMaking with MatchAnalysis { +trait MatchOptimization extends MatchTreeMaking with MatchApproximation { import global._ import global.definitions._ @@ -206,20 +205,7 @@ trait MatchOptimization extends MatchTreeMaking with MatchAnalysis { } } - - //// DCE -// trait DeadCodeElimination extends TreeMakers { -// // TODO: non-trivial dead-code elimination -// // e.g., the following match should compile to a simple instanceof: -// // case class Ident(name: String) -// // for (Ident(name) <- ts) println(name) -// def doDCE(prevBinder: Symbol, cases: List[List[TreeMaker]], pt: Type): List[List[TreeMaker]] = { -// // do minimal DCE -// cases -// } -// } - - //// SWITCHES -- TODO: operate on Tests rather than TreeMakers + //// SWITCHES trait SwitchEmission extends TreeMakers with MatchMonadInterface { import treeInfo.isGuardedCase @@ -615,13 +601,8 @@ trait MatchOptimization extends MatchTreeMaking with MatchAnalysis { with SwitchEmission with CommonSubconditionElimination { override def optimizeCases(prevBinder: Symbol, cases: List[List[TreeMaker]], pt: Type, selectorPos: Position): (List[List[TreeMaker]], List[Tree]) = { - // TODO: do CSE on result of doDCE(prevBinder, cases, pt) val optCases = doCSE(prevBinder, cases, pt, selectorPos) - val toHoist = ( - for (treeMakers <- optCases) - yield treeMakers.collect{case tm: ReusedCondTreeMaker => tm.treesToHoist} - ).flatten.flatten.toList - (optCases, toHoist) + (optCases, optCases.flatMap(flatCollect(_) { case tm: ReusedCondTreeMaker => tm.treesToHoist })) } } } diff --git a/src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala b/src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala index 37ead94815d3..b9d562ff9756 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala @@ -90,9 +90,6 @@ trait PatternMatching extends Transform case _ => super.transform(tree) } - // TODO: only instantiate new match translator when localTyper has changed - // override def atOwner[A](tree: Tree, owner: Symbol)(trans: => A): A - // as this is the only time TypingTransformer changes it def translator(selectorPos: Position): MatchTranslator with CodegenCore = { new OptimizingMatchTranslator(localTyper, selectorPos) } From 4142905b35e5310fb5cb6dd2eccf63dfb899fb2d Mon Sep 17 00:00:00 2001 From: Dale Wijnand Date: Thu, 25 Feb 2021 11:27:14 +0000 Subject: [PATCH 014/769] Simplify combineCasesNoSubstOnly & shift propagateSubstitution "propagateSubstitution" was always being called after translateCase, so suck that into the end of translateCase. Then merge the two "combineCases" methods by just adding 1 call to getSuppression. I also lifted out "requireSwitch" which was dominating the method. The type annotation for RefTree is to get IntelliJ to chill about overloaded methods. --- .../scala/tools/nsc/ast/TreeDSL.scala | 4 +- .../transform/patmat/MatchTranslation.scala | 11 +-- .../transform/patmat/MatchTreeMaking.scala | 84 +++++++++---------- 3 files changed, 46 insertions(+), 53 deletions(-) diff --git a/src/compiler/scala/tools/nsc/ast/TreeDSL.scala b/src/compiler/scala/tools/nsc/ast/TreeDSL.scala index a88de4d9c42d..7281d66aa807 100644 --- a/src/compiler/scala/tools/nsc/ast/TreeDSL.scala +++ b/src/compiler/scala/tools/nsc/ast/TreeDSL.scala @@ -150,8 +150,8 @@ trait TreeDSL { def SOME(xs: Tree*) = Apply(SomeClass.companionSymbol, gen.mkTuple(xs.toList)) /** Typed trees from symbols. */ - def REF(sym: Symbol) = gen.mkAttributedRef(sym) - def REF(pre: Type, sym: Symbol) = gen.mkAttributedRef(pre, sym) + def REF(sym: Symbol): RefTree = gen.mkAttributedRef(sym) + def REF(pre: Type, sym: Symbol): RefTree = gen.mkAttributedRef(pre, sym) /** Implicits - some of these should probably disappear **/ implicit def mkTreeMethods(target: Tree): TreeMethods = new TreeMethods(target) diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala index 280ff053e26f..6d5a8eab3919 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala @@ -223,7 +223,7 @@ trait MatchTranslation { val selectorSym = freshSym(selector.pos, selectorTp) setFlag treeInfo.SYNTH_CASE_FLAGS // pt = Any* occurs when compiling test/files/pos/annotDepMethType.scala - val combined = combineCases(selector, selectorSym, nonSyntheticCases map translateCase(selectorSym, pt), pt, selectorPos, matchOwner, defaultOverride) + val combined = combineCases(selector, selectorSym, nonSyntheticCases map translateCase(selectorSym, pt), pt, selectorPos, matchOwner, defaultOverride, getSuppression(selector)) if (StatisticsStatics.areSomeColdStatsEnabled) statistics.stopTimer(statistics.patmatNanos, start) combined @@ -250,7 +250,7 @@ trait MatchTranslation { // generate a fresh symbol for each case, hoping we'll end up emitting a type-switch (we don't have a global scrut there) // if we fail to emit a fine-grained switch, have to do translateCase again with a single scrutSym (TODO: uniformize substitution on treemakers so we can avoid this) val caseScrutSym = freshSym(caseDef.pat.pos, ThrowableTpe) - (caseScrutSym, propagateSubstitution(translateCase(caseScrutSym, pt)(caseDef), EmptySubstitution)) + (caseScrutSym, translateCase(caseScrutSym, pt)(caseDef)) } for(cases <- emitTypeSwitch(bindersAndCases, pt).toList @@ -260,7 +260,7 @@ trait MatchTranslation { val catches = if (swatches.nonEmpty) swatches else { val scrutSym = freshSym(caseDefs.head.pat.pos, ThrowableTpe) - val casesNoSubstOnly = caseDefs map { caseDef => (propagateSubstitution(translateCase(scrutSym, pt)(caseDef), EmptySubstitution))} + val cases = caseDefs.map(translateCase(scrutSym, pt)) val exSym = freshSym(pos, ThrowableTpe, "ex") val suppression = @@ -272,7 +272,7 @@ trait MatchTranslation { CaseDef( Bind(exSym, Ident(nme.WILDCARD)), // TODO: does this need fixing upping? EmptyTree, - combineCasesNoSubstOnly(REF(exSym), scrutSym, casesNoSubstOnly, pt, selectorPos, matchOwner, Some(scrut => Throw(REF(exSym))), suppression) + combineCases(REF(exSym), scrutSym, cases, pt, selectorPos, matchOwner, Some(scrut => Throw(REF(exSym))), suppression) ) }) } @@ -310,7 +310,8 @@ trait MatchTranslation { */ def translateCase(scrutSym: Symbol, pt: Type)(caseDef: CaseDef): List[TreeMaker] = { val CaseDef(pattern, guard, body) = caseDef - translatePattern(BoundTree(scrutSym, pattern)) ++ translateGuard(guard) :+ translateBody(body, pt) + val treeMakers = translatePattern(BoundTree(scrutSym, pattern)) ++ translateGuard(guard) :+ translateBody(body, pt) + propagateSubstitution(treeMakers, EmptySubstitution) } def translatePattern(bound: BoundTree): List[TreeMaker] = bound.translate() diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala index 4c29b0920e0f..67f8b2bae3ea 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala @@ -573,69 +573,61 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging { case _ => Suppression.NoSuppression } - // calls propagateSubstitution on the treemakers - def combineCases(scrut: Tree, scrutSym: Symbol, casesRaw: List[List[TreeMaker]], pt: Type, selectorPos: Position, owner: Symbol, matchFailGenOverride: Option[Tree => Tree]): Tree = { - // drops SubstOnlyTreeMakers, since their effect is now contained in the TreeMakers that follow them - val casesNoSubstOnly = casesRaw map (propagateSubstitution(_, EmptySubstitution)) - combineCasesNoSubstOnly(scrut, scrutSym, casesNoSubstOnly, pt, selectorPos, owner, matchFailGenOverride, getSuppression(scrut)) + def requiresSwitch(scrut: Tree, cases: List[List[TreeMaker]]): Boolean = { + if (settings.XnoPatmatAnalysis) false + else scrut match { + case Typed(tree, tpt) => + val hasSwitchAnnotation = treeInfo.isSwitchAnnotation(tpt.tpe) + // matches with two or fewer cases need not apply for switchiness (if-then-else will do) + // `case 1 | 2` is considered as two cases. + def exceedsTwoCasesOrAlts = { + // avoids traversing the entire list if there are more than 3 elements + def lengthMax3(l: List[List[TreeMaker]]): Int = l match { + case a :: b :: c :: _ => 3 + case cases => cases.map { + case AlternativesTreeMaker(_, alts, _) :: _ => lengthMax3(alts) + case c => 1 + }.sum + } + lengthMax3(cases) > 2 + } + hasSwitchAnnotation && exceedsTwoCasesOrAlts + case _ => false + } } // pt is the fully defined type of the cases (either pt or the lub of the types of the cases) - def combineCasesNoSubstOnly(scrut: Tree, scrutSym: Symbol, casesNoSubstOnly: List[List[TreeMaker]], pt: Type, - selectorPos: Position, owner: Symbol, matchFailGenOverride: Option[Tree => Tree], - suppression: Suppression, + def combineCases( + scrut: Tree, scrutSym: Symbol, cases: List[List[TreeMaker]], pt: Type, + selectorPos: Position, owner: Symbol, matchFailGenOverride: Option[Tree => Tree], + suppression: Suppression, ): Tree = fixerUpper(owner, scrut.pos) { def matchFailGen = matchFailGenOverride orElse Some(Throw(MatchErrorClass.tpe, _: Tree)) - debug.patmat("combining cases: "+ (casesNoSubstOnly.map(_.mkString(" >> ")).mkString("{", "\n", "}"))) - - val requireSwitch: Boolean = - if (settings.XnoPatmatAnalysis) false - else scrut match { - case Typed(tree, tpt) => - val hasSwitchAnnotation = treeInfo.isSwitchAnnotation(tpt.tpe) - // matches with two or fewer cases need not apply for switchiness (if-then-else will do) - // `case 1 | 2` is considered as two cases. - def exceedsTwoCasesOrAlts = { - // avoids traversing the entire list if there are more than 3 elements - def lengthMax3(l: List[List[TreeMaker]]): Int = l match { - case a :: b :: c :: _ => 3 - case cases => - cases.map { - case AlternativesTreeMaker(_, alts, _) :: _ => lengthMax3(alts) - case c => 1 - }.sum - } - lengthMax3(casesNoSubstOnly) > 2 - } - hasSwitchAnnotation && exceedsTwoCasesOrAlts - case _ => - false - } + debug.patmat("combining cases: "+ (cases.map(_.mkString(" >> ")).mkString("{", "\n", "}"))) - emitSwitch(scrut, scrutSym, casesNoSubstOnly, pt, matchFailGenOverride, unchecked = suppression.suppressExhaustive).getOrElse { - if (requireSwitch) typer.context.warning(scrut.pos, "could not emit switch for @switch annotated match", WarningCategory.OtherMatchAnalysis) + emitSwitch(scrut, scrutSym, cases, pt, matchFailGenOverride, unchecked = suppression.suppressExhaustive).getOrElse { + if (requiresSwitch(scrut, cases)) + typer.context.warning(scrut.pos, "could not emit switch for @switch annotated match", WarningCategory.OtherMatchAnalysis) - if (!casesNoSubstOnly.isEmpty) { - // before optimizing, check casesNoSubstOnly for presence of a default case, + if (!cases.isEmpty) { + // before optimizing, check cases for presence of a default case, // since DCE will eliminate trivial cases like `case _ =>`, even if they're the last one // exhaustivity and reachability must be checked before optimization as well // TODO: improve notion of trivial/irrefutable -- a trivial type test before the body still makes for a default case // ("trivial" depends on whether we're emitting a straight match or an exception, or more generally, any supertype of scrutSym.tpe is a no-op) // irrefutability checking should use the approximation framework also used for CSE, unreachability and exhaustivity checking - val synthCatchAll = - if (casesNoSubstOnly.nonEmpty && { - val nonTrivLast = casesNoSubstOnly.last - nonTrivLast.nonEmpty && nonTrivLast.head.isInstanceOf[BodyTreeMaker] - }) None - else matchFailGen + val synthCatchAll = cases match { + case _ :+ Seq(_: BodyTreeMaker, _*) => None + case _ => matchFailGen + } - analyzeCases(scrutSym, casesNoSubstOnly, pt, suppression) + analyzeCases(scrutSym, cases, pt, suppression) - val (cases, toHoist) = optimizeCases(scrutSym, casesNoSubstOnly, pt, selectorPos) + val (optimizedCases, toHoist) = optimizeCases(scrutSym, cases, pt, selectorPos) - val matchRes = codegen.matcher(scrut, scrutSym, pt)(cases map combineExtractors, synthCatchAll) + val matchRes = codegen.matcher(scrut, scrutSym, pt)(optimizedCases map combineExtractors, synthCatchAll) if (toHoist.isEmpty) matchRes else Block(toHoist, matchRes) } else { From 0a760729c7bb604dae19f10d7040185127a8be9e Mon Sep 17 00:00:00 2001 From: Dale Wijnand Date: Thu, 25 Feb 2021 14:14:01 +0000 Subject: [PATCH 015/769] Unfilter some exhaustivity tests --- test/files/run/virtpatmat_alts.check | 6 ++++-- test/files/run/virtpatmat_alts.scala | 3 --- test/files/run/virtpatmat_nested_lists.check | 3 ++- test/files/run/virtpatmat_nested_lists.scala | 4 ---- test/files/run/virtpatmat_opt_sharing.check | 3 ++- test/files/run/virtpatmat_opt_sharing.scala | 4 ---- 6 files changed, 8 insertions(+), 15 deletions(-) diff --git a/test/files/run/virtpatmat_alts.check b/test/files/run/virtpatmat_alts.check index f39e292fef10..91b7c068c307 100644 --- a/test/files/run/virtpatmat_alts.check +++ b/test/files/run/virtpatmat_alts.check @@ -1,7 +1,9 @@ -virtpatmat_alts.scala:5: warning: match may not be exhaustive. +virtpatmat_alts.scala:2: warning: match may not be exhaustive. +It would fail on the following inputs: (false, true), (true, false) (true, true) match { ^ -virtpatmat_alts.scala:9: warning: match may not be exhaustive. +virtpatmat_alts.scala:6: warning: match may not be exhaustive. +It would fail on the following inputs: List((x: Int forSome x not in (1, 2, 4, 5, 6, 7))), List((x: Int forSome x not in (1, 2, 4, 5, 6, 7)), _), List(1, _), List(2, _), List(4, _), List(5, _), List(6, _), List(7, _), List(_, _) List(5) match { ^ OK 5 diff --git a/test/files/run/virtpatmat_alts.scala b/test/files/run/virtpatmat_alts.scala index d1dfa8a4a1ab..0ae6f36241d5 100644 --- a/test/files/run/virtpatmat_alts.scala +++ b/test/files/run/virtpatmat_alts.scala @@ -1,6 +1,3 @@ -/* - * filter: It would fail on the following input - */ object Test extends App { (true, true) match { case (true, true) | (false, false) => 1 diff --git a/test/files/run/virtpatmat_nested_lists.check b/test/files/run/virtpatmat_nested_lists.check index 9d1d5a90a8f8..0ae86d1bf5f7 100644 --- a/test/files/run/virtpatmat_nested_lists.check +++ b/test/files/run/virtpatmat_nested_lists.check @@ -1,4 +1,5 @@ -virtpatmat_nested_lists.scala:7: warning: match may not be exhaustive. +virtpatmat_nested_lists.scala:3: warning: match may not be exhaustive. +It would fail on the following inputs: List(_), List(_, List(_), _), List(_, List(_, _)), List(_, List(_, _), _), List(_, Nil), List(_, Nil, _), Nil List(List(1), List(2)) match { case x :: (y :: Nil) :: Nil => println(y) } ^ 2 diff --git a/test/files/run/virtpatmat_nested_lists.scala b/test/files/run/virtpatmat_nested_lists.scala index 6b7233c0454e..58f36b642303 100644 --- a/test/files/run/virtpatmat_nested_lists.scala +++ b/test/files/run/virtpatmat_nested_lists.scala @@ -1,8 +1,4 @@ // scalac: -Ypatmat-exhaust-depth off -// -/* - * filter: It would fail on the following input - */ object Test extends App { List(List(1), List(2)) match { case x :: (y :: Nil) :: Nil => println(y) } } diff --git a/test/files/run/virtpatmat_opt_sharing.check b/test/files/run/virtpatmat_opt_sharing.check index 95e962134f98..a2189687a521 100644 --- a/test/files/run/virtpatmat_opt_sharing.check +++ b/test/files/run/virtpatmat_opt_sharing.check @@ -1,4 +1,5 @@ -virtpatmat_opt_sharing.scala:9: warning: match may not be exhaustive. +virtpatmat_opt_sharing.scala:5: warning: match may not be exhaustive. +It would fail on the following inputs: List((x: Int forSome x not in 1)), List((x: Int forSome x not in 1), (x: Int forSome x not in 3)), List((x: Int forSome x not in 1), (x: Int forSome x not in 3), (x: Int forSome x not in 4)), List((x: Int forSome x not in 1), (x: Int forSome x not in 3), (x: Int forSome x not in 4), (x: Int forSome x not in (5, 6, 7))), List((x: Int forSome x not in 1), (x: Int forSome x not in 3), (x: Int forSome x not in 4), 5), List((x: Int forSome x not in 1), (x: Int forSome x not in 3), (x: Int forSome x not in 4), 6), List((x: Int forSome x not in 1), (x: Int forSome x not in 3), (x: Int forSome x not in 4), 7), List((x: Int forSome x not in 1), (x: Int forSome x not in 3), (x: Int forSome x not in 4), ??), List((x: Int forSome x not in 1), (x: Int forSome x not in 3), 4), List((x: Int forSome x not in 1), (x: Int forSome x not in 3), 4, (x: Int forSome x not in (5, 6, 7))), List((x: Int forSome x not in 1), (x: Int forSome x not in 3), 4, 5), List((x: Int forSome x not in 1), (x: Int forSome x not in 3), 4, 6), List((x: Int forSome x not in 1), (x: Int forSome x not in 3), 4, 7), List((x: Int forSome x not in 1), (x: Int forSome x not in 3), 4, ??), List((x: Int forSome x not in 1), 3), List((x: Int forSome x not in 1), 3, (x: Int forSome x not in 4)), List((x: Int forSome x not in 1), 3, (x: Int forSome x not in 4), (x: Int forSome x not in (5, 6, 7))), List((x: Int forSome x not in 1), 3, (x: Int forSome x not in 4), 5), List((x: Int forSome x not in 1), 3, (x: Int forSome x not in 4), 6), List((x: Int forSome x not in 1), 3, (x: Int forSome x not in 4), 7), List((x: Int forSome x not in 1), 3, (x: Int forSome x not in 4), ??), List((x: Int forSome x not in 1), 3, 4), List((x: Int forSome x not in 1), 3, 4, (x: Int forSome x not in (5, 6, 7))), List((x: Int forSome x not in 1), 3, 4, 5), List((x: Int forSome x not in 1), 3, 4, 6), List((x: Int forSome x not in 1), 3, 4, 7), List((x: Int forSome x not in 1), 3, 4, ??), List(1), List(1, (x: Int forSome x not in 3)), List(1, (x: Int forSome x not in 3), (x: Int forSome x not in 4)), List(1, (x: Int forSome x not in 3), (x: Int forSome x not in 4), (x: Int forSome x not in (5, 6, 7))), List(1, (x: Int forSome x not in 3), (x: Int forSome x not in 4), 5), List(1, (x: Int forSome x not in 3), (x: Int forSome x not in 4), 6), List(1, (x: Int forSome x not in 3), (x: Int forSome x not in 4), 7), List(1, (x: Int forSome x not in 3), (x: Int forSome x not in 4), ??), List(1, (x: Int forSome x not in 3), 4), List(1, (x: Int forSome x not in 3), 4, (x: Int forSome x not in (5, 6, 7))), List(1, (x: Int forSome x not in 3), 4, 5), List(1, (x: Int forSome x not in 3), 4, 6), List(1, (x: Int forSome x not in 3), 4, 7), List(1, (x: Int forSome x not in 3), 4, ??), List(1, 3), List(1, 3, (x: Int forSome x not in 4)), List(1, 3, (x: Int forSome x not in 4), (x: Int forSome x not in (5, 6, 7))), List(1, 3, (x: Int forSome x not in 4), 5), List(1, 3, (x: Int forSome x not in 4), 6), List(1, 3, (x: Int forSome x not in 4), 7), List(1, 3, (x: Int forSome x not in 4), ??), List(1, 3, 4), List(1, 3, 4, (x: Int forSome x not in (5, 6, 7))), Nil List(1, 3, 4, 7) match { ^ 1 diff --git a/test/files/run/virtpatmat_opt_sharing.scala b/test/files/run/virtpatmat_opt_sharing.scala index 2f1b68d05780..988f963c8ec7 100644 --- a/test/files/run/virtpatmat_opt_sharing.scala +++ b/test/files/run/virtpatmat_opt_sharing.scala @@ -1,8 +1,4 @@ // scalac: -Ypatmat-exhaust-depth off -// -/* - * filter: It would fail on the following input - */ object Test extends App { virtMatch() def virtMatch() = { From ac99c8258c9c961f8a8526ea26c7d6384eb89d86 Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Fri, 26 Feb 2021 15:31:50 -0800 Subject: [PATCH 016/769] Enable parser warnings in REPL Silent parsing, then normal reporting when compiling template. For silence, use method that restores settings when done. --- src/repl/scala/tools/nsc/interpreter/IMain.scala | 12 ++---------- test/files/run/t12354.check | 14 ++++++++++++++ test/files/run/t12354.scala | 10 ++++++++++ 3 files changed, 26 insertions(+), 10 deletions(-) create mode 100644 test/files/run/t12354.check create mode 100644 test/files/run/t12354.scala diff --git a/src/repl/scala/tools/nsc/interpreter/IMain.scala b/src/repl/scala/tools/nsc/interpreter/IMain.scala index 7ece83af86ba..c00fc0d903cd 100644 --- a/src/repl/scala/tools/nsc/interpreter/IMain.scala +++ b/src/repl/scala/tools/nsc/interpreter/IMain.scala @@ -103,15 +103,7 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends else new PathResolver(settings, global.closeableRegistry).resultAsURLs // the compiler's classpath ) def settings = initialSettings - // Run the code body with the given boolean settings flipped to true. - def withoutWarnings[T](body: => T): T = beQuietDuring { - val saved = settings.nowarn.value - if (!saved) - settings.nowarn.value = true - - try body - finally if (!saved) settings.nowarn.value = false - } + def withoutWarnings[T](body: => T): T = beQuietDuring(IMain.withSuppressedSettings(settings, global)(body)) // Apply a temporary label for compilation (for example, script name) def withLabel[A](temp: String)(body: => A): A = { val saved = label @@ -1166,7 +1158,7 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends def apply(line: String): Result = debugging(s"""parse("$line")""") { var isIncomplete = false - def parse = { + def parse = withoutWarnings { val trees = newUnitParser(line, label).parseStats() if (!isIncomplete) runReporting.summarizeErrors() diff --git a/test/files/run/t12354.check b/test/files/run/t12354.check new file mode 100644 index 000000000000..3109db8938cb --- /dev/null +++ b/test/files/run/t12354.check @@ -0,0 +1,14 @@ + +scala> case class C(implicit x: Int) +:11: warning: case classes should have a non-implicit parameter list; adapting to 'case class C()(...)' +case class C(implicit x: Int) + ^ +defined class C + +scala> for {x <- Nil; val y = 1} yield y +:12: warning: val keyword in for comprehension is deprecated +for {x <- Nil; val y = 1} yield y + ^ +res0: List[Int] = List() + +scala> :quit diff --git a/test/files/run/t12354.scala b/test/files/run/t12354.scala new file mode 100644 index 000000000000..9d0754884dab --- /dev/null +++ b/test/files/run/t12354.scala @@ -0,0 +1,10 @@ + +import scala.tools.nsc.Settings +import scala.tools.partest.SessionTest + +object Test extends SessionTest { + override def transformSettings(ss: Settings) = { + ss.deprecation.value = true + ss + } +} From c3e25a6366e27723cba203ee00965c8513a4fcbb Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Fri, 26 Feb 2021 22:52:43 -0800 Subject: [PATCH 017/769] Repl indents for alignment after regression --- .../tools/nsc/interpreter/ReplReporter.scala | 4 +- test/files/jvm/interpreter.check | 20 ++-- test/files/run/constrained-types.check | 16 ++-- .../run/reify-repl-fail-gracefully.check | 4 +- test/files/run/reify_newimpl_22.check | 4 +- test/files/run/reify_newimpl_23.check | 4 +- test/files/run/reify_newimpl_25.check | 4 +- test/files/run/reify_newimpl_26.check | 4 +- test/files/run/repl-bare-expr.check | 24 ++--- .../run/repl-class-based-outer-pointers.check | 4 +- .../run/repl-class-based-term-macros.check | 96 +++++++++---------- test/files/run/repl-colon-type.check | 8 +- .../files/run/repl-no-imports-no-predef.check | 80 ++++++++-------- test/files/run/repl-parens.check | 24 ++--- test/files/run/repl-paste-2.check | 4 +- test/files/run/repl-reset.check | 16 ++-- test/files/run/t11402.check | 4 +- test/files/run/t12354.check | 8 +- test/files/run/t1931.check | 8 +- test/files/run/t4542.check | 4 +- test/files/run/t4594-repl-settings.check | 4 +- test/files/run/t5655.check | 8 +- test/files/run/t7319.check | 12 +-- test/files/run/t7747-repl.check | 64 ++++++------- test/files/run/t8918-unary-ids.check | 8 +- test/files/run/t9170.check | 12 +-- test/files/run/t9206.check | 8 +- test/files/run/xMigration.check | 12 +-- 28 files changed, 234 insertions(+), 234 deletions(-) diff --git a/src/repl/scala/tools/nsc/interpreter/ReplReporter.scala b/src/repl/scala/tools/nsc/interpreter/ReplReporter.scala index 677fafda4827..4949fb22d0ef 100644 --- a/src/repl/scala/tools/nsc/interpreter/ReplReporter.scala +++ b/src/repl/scala/tools/nsc/interpreter/ReplReporter.scala @@ -72,7 +72,7 @@ class ReplReporter(intp: IMain) extends ConsoleReporter(intp.settings, Console.i } // shift indentation for source text entered at prompt - def print(pos: Position, msg: String, severity: Severity): Unit = { + override protected def display(pos: Position, msg: String, severity: Severity): Unit = { val adjusted = if (pos.source.file.name == "") new OffsetPosition(pos.source, pos.offset.getOrElse(0)) { @@ -80,7 +80,7 @@ class ReplReporter(intp: IMain) extends ConsoleReporter(intp.settings, Console.i override def lineCaret = s"${indentation}${super.lineCaret}" } else pos - super.info0(adjusted, msg, severity, force = false) + super.display(adjusted, msg, severity) } override def printMessage(msg: String): Unit = { diff --git a/test/files/jvm/interpreter.check b/test/files/jvm/interpreter.check index b0e666bda3bc..178fa24e5723 100644 --- a/test/files/jvm/interpreter.check +++ b/test/files/jvm/interpreter.check @@ -34,8 +34,8 @@ scala> val bogus: anotherint = "hello" found : String("hello") required: anotherint (which expands to) Int -val bogus: anotherint = "hello" - ^ + val bogus: anotherint = "hello" + ^ scala> trait PointlessTrait defined trait PointlessTrait @@ -278,13 +278,13 @@ scala> // both of the following should abort immediately: scala> def x => y => z :1: error: '=' expected but '=>' found. -def x => y => z - ^ + def x => y => z + ^ scala> [1,2,3] :1: error: illegal start of definition -[1,2,3] -^ + [1,2,3] + ^ scala> @@ -355,8 +355,8 @@ scala> def f(e: Exp) = e match { // non-exhaustive warning here } :18: warning: match may not be exhaustive. It would fail on the following inputs: Exp(), Term() -def f(e: Exp) = e match { // non-exhaustive warning here - ^ + def f(e: Exp) = e match { // non-exhaustive warning here + ^ f: (e: Exp)Int scala> :quit @@ -364,5 +364,5 @@ plusOne: (x: Int)Int res0: Int = 6 res0: String = after reset :12: error: not found: value plusOne -plusOne(5) // should be undefined now -^ + plusOne(5) // should be undefined now + ^ diff --git a/test/files/run/constrained-types.check b/test/files/run/constrained-types.check index 4f2c7015ec05..58cdeb7df0a0 100644 --- a/test/files/run/constrained-types.check +++ b/test/files/run/constrained-types.check @@ -134,16 +134,16 @@ scala> scala> val x = 3 : Int @Annot(e+f+g+h) // should have a graceful error message :12: error: not found: value e -val x = 3 : Int @Annot(e+f+g+h) // should have a graceful error message - ^ + val x = 3 : Int @Annot(e+f+g+h) // should have a graceful error message + ^ :12: error: not found: value f -val x = 3 : Int @Annot(e+f+g+h) // should have a graceful error message - ^ + val x = 3 : Int @Annot(e+f+g+h) // should have a graceful error message + ^ :12: error: not found: value g -val x = 3 : Int @Annot(e+f+g+h) // should have a graceful error message - ^ + val x = 3 : Int @Annot(e+f+g+h) // should have a graceful error message + ^ :12: error: not found: value h -val x = 3 : Int @Annot(e+f+g+h) // should have a graceful error message - ^ + val x = 3 : Int @Annot(e+f+g+h) // should have a graceful error message + ^ scala> :quit diff --git a/test/files/run/reify-repl-fail-gracefully.check b/test/files/run/reify-repl-fail-gracefully.check index b37953ff5f48..32ed87635688 100644 --- a/test/files/run/reify-repl-fail-gracefully.check +++ b/test/files/run/reify-repl-fail-gracefully.check @@ -9,7 +9,7 @@ scala> scala> reify :16: error: too few argument lists for macro invocation -reify -^ + reify + ^ scala> :quit diff --git a/test/files/run/reify_newimpl_22.check b/test/files/run/reify_newimpl_22.check index 34184831da0a..b2f4d5624e1b 100644 --- a/test/files/run/reify_newimpl_22.check +++ b/test/files/run/reify_newimpl_22.check @@ -16,8 +16,8 @@ scala> { println(code.eval) } :19: free term: Ident(TermName("x")) defined by res0 in :18:7 - val code = reify { - ^ + val code = reify { + ^ 2 scala> :quit diff --git a/test/files/run/reify_newimpl_23.check b/test/files/run/reify_newimpl_23.check index 4f5462cb1307..abf314b26a3b 100644 --- a/test/files/run/reify_newimpl_23.check +++ b/test/files/run/reify_newimpl_23.check @@ -15,8 +15,8 @@ scala> def foo[T]{ println(code.eval) } :17: free type: Ident(TypeName("T")) defined by foo in :16:9 - val code = reify { - ^ + val code = reify { + ^ foo: [T]=> Unit scala> :quit diff --git a/test/files/run/reify_newimpl_25.check b/test/files/run/reify_newimpl_25.check index 8b942bd67750..d446caa91a03 100644 --- a/test/files/run/reify_newimpl_25.check +++ b/test/files/run/reify_newimpl_25.check @@ -6,8 +6,8 @@ scala> { println(tt) } :15: free term: Ident(TermName("x")) defined by res0 in :14:7 - val tt = implicitly[TypeTag[x.type]] - ^ + val tt = implicitly[TypeTag[x.type]] + ^ TypeTag[x.type] scala> :quit diff --git a/test/files/run/reify_newimpl_26.check b/test/files/run/reify_newimpl_26.check index 776ef2206527..eb2b8309a085 100644 --- a/test/files/run/reify_newimpl_26.check +++ b/test/files/run/reify_newimpl_26.check @@ -5,8 +5,8 @@ scala> def foo[T]{ println(tt) } :13: free type: Ident(TypeName("T")) defined by foo in :11:9 - val tt = implicitly[WeakTypeTag[List[T]]] - ^ + val tt = implicitly[WeakTypeTag[List[T]]] + ^ foo: [T]=> Unit scala> foo[Int] diff --git a/test/files/run/repl-bare-expr.check b/test/files/run/repl-bare-expr.check index f437e2fe4dcf..bdf8842bb0b1 100644 --- a/test/files/run/repl-bare-expr.check +++ b/test/files/run/repl-bare-expr.check @@ -1,14 +1,14 @@ scala> 2 ; 3 :11: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses -2 ;; -^ + 2 ;; + ^ res0: Int = 3 scala> { 2 ; 3 } :12: warning: a pure expression does nothing in statement position; multiline expressions might require enclosing parentheses -{ 2 ; 3 } - ^ + { 2 ; 3 } + ^ res1: Int = 3 scala> 5 ; 10 ; case object Cow ; 20 ; class Moo { override def toString = "Moooooo" } ; 30 ; def bippy = { @@ -16,17 +16,17 @@ scala> 5 ; 10 ; case object Cow ; 20 ; class Moo { override def toString = "Mooo 2 + 3 } ; bippy+88+11 :11: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses -5 ; 10 ; case object Cow ; 20 ; class Moo { override def toString = "Moooooo" } ; 30 ; def bippy = { -^ + 5 ; 10 ; case object Cow ; 20 ; class Moo { override def toString = "Moooooo" } ; 30 ; def bippy = { + ^ :11: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses -5 ; 10 ; case object Cow ; 20 ; class Moo { override def toString = "Moooooo" } ; 30 ; def bippy = { - ^ + 5 ; 10 ; case object Cow ; 20 ; class Moo { override def toString = "Moooooo" } ; 30 ; def bippy = { + ^ :11: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses -5 ; 10 ; case object Cow ; 20 ; class Moo { override def toString = "Moooooo" } ; 30 ; def bippy = { - ^ + 5 ; 10 ; case object Cow ; 20 ; class Moo { override def toString = "Moooooo" } ; 30 ; def bippy = { + ^ :11: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses -5 ; 10 ; case object Cow ; 20 ; class Moo { override def toString = "Moooooo" } ; 30 ; def bippy = { - ^ + 5 ; 10 ; case object Cow ; 20 ; class Moo { override def toString = "Moooooo" } ; 30 ; def bippy = { + ^ defined object Cow defined class Moo bippy: Int diff --git a/test/files/run/repl-class-based-outer-pointers.check b/test/files/run/repl-class-based-outer-pointers.check index 4a123aec9275..54bc714ab079 100644 --- a/test/files/run/repl-class-based-outer-pointers.check +++ b/test/files/run/repl-class-based-outer-pointers.check @@ -9,8 +9,8 @@ defined object Value scala> class C { final case class Num(value: Double) } // here it should still warn :11: warning: The outer reference in this type test cannot be checked at run time. -class C { final case class Num(value: Double) } // here it should still warn - ^ + class C { final case class Num(value: Double) } // here it should still warn + ^ defined class C scala> :quit diff --git a/test/files/run/repl-class-based-term-macros.check b/test/files/run/repl-class-based-term-macros.check index edec300bcc63..b7b9c94b3399 100644 --- a/test/files/run/repl-class-based-term-macros.check +++ b/test/files/run/repl-class-based-term-macros.check @@ -44,64 +44,64 @@ scala> def fooBBC: Unit = macro implBBC macro [].[[]] or macro [].[[]] note: macro definition is not supported in the REPL when using -Yrepl-classbased. -def fooBBC: Unit = macro implBBC - ^ + def fooBBC: Unit = macro implBBC + ^ scala> def fooWBC: Unit = macro implWBC :16: error: macro implementation reference has wrong shape. required: macro [].[[]] or macro [].[[]] note: macro definition is not supported in the REPL when using -Yrepl-classbased. -def fooWBC: Unit = macro implWBC - ^ + def fooWBC: Unit = macro implWBC + ^ scala> def fooRBBC: Unit = macro implRBBC :16: error: macro implementation reference has wrong shape. required: macro [].[[]] or macro [].[[]] note: macro definition is not supported in the REPL when using -Yrepl-classbased. -def fooRBBC: Unit = macro implRBBC - ^ + def fooRBBC: Unit = macro implRBBC + ^ scala> def fooRWBC: Unit = macro implRWBC :16: error: macro implementation reference has wrong shape. required: macro [].[[]] or macro [].[[]] note: macro definition is not supported in the REPL when using -Yrepl-classbased. -def fooRWBC: Unit = macro implRWBC - ^ + def fooRWBC: Unit = macro implRWBC + ^ scala> def fooSRBBC: Unit = macro implSRBBC :16: error: macro implementation reference has wrong shape. required: macro [].[[]] or macro [].[[]] note: macro definition is not supported in the REPL when using -Yrepl-classbased. -def fooSRBBC: Unit = macro implSRBBC - ^ + def fooSRBBC: Unit = macro implSRBBC + ^ scala> def fooSRWBC: Unit = macro implSRWBC :16: error: macro implementation reference has wrong shape. required: macro [].[[]] or macro [].[[]] note: macro definition is not supported in the REPL when using -Yrepl-classbased. -def fooSRWBC: Unit = macro implSRWBC - ^ + def fooSRWBC: Unit = macro implSRWBC + ^ scala> def fooRSRBBC: Unit = macro implRSRBBC :16: error: macro implementation reference has wrong shape. required: macro [].[[]] or macro [].[[]] note: macro definition is not supported in the REPL when using -Yrepl-classbased. -def fooRSRBBC: Unit = macro implRSRBBC - ^ + def fooRSRBBC: Unit = macro implRSRBBC + ^ scala> def fooRSRWBC: Unit = macro implRSRWBC :16: error: macro implementation reference has wrong shape. required: macro [].[[]] or macro [].[[]] note: macro definition is not supported in the REPL when using -Yrepl-classbased. -def fooRSRWBC: Unit = macro implRSRWBC - ^ + def fooRSRWBC: Unit = macro implRSRWBC + ^ scala> @@ -144,64 +144,64 @@ scala> def barBBC: Unit = macro MacrosModule.implBBC macro [].[[]] or macro [].[[]] note: macro definition is not supported in the REPL when using -Yrepl-classbased. -def barBBC: Unit = macro MacrosModule.implBBC - ^ + def barBBC: Unit = macro MacrosModule.implBBC + ^ scala> def barWBC: Unit = macro MacrosModule.implWBC :16: error: macro implementation reference has wrong shape. required: macro [].[[]] or macro [].[[]] note: macro definition is not supported in the REPL when using -Yrepl-classbased. -def barWBC: Unit = macro MacrosModule.implWBC - ^ + def barWBC: Unit = macro MacrosModule.implWBC + ^ scala> def barRBBC: Unit = macro MacrosModule.implRBBC :16: error: macro implementation reference has wrong shape. required: macro [].[[]] or macro [].[[]] note: macro definition is not supported in the REPL when using -Yrepl-classbased. -def barRBBC: Unit = macro MacrosModule.implRBBC - ^ + def barRBBC: Unit = macro MacrosModule.implRBBC + ^ scala> def barRWBC: Unit = macro MacrosModule.implRWBC :16: error: macro implementation reference has wrong shape. required: macro [].[[]] or macro [].[[]] note: macro definition is not supported in the REPL when using -Yrepl-classbased. -def barRWBC: Unit = macro MacrosModule.implRWBC - ^ + def barRWBC: Unit = macro MacrosModule.implRWBC + ^ scala> def barSRBBC: Unit = macro MacrosModule.implSRBBC :16: error: macro implementation reference has wrong shape. required: macro [].[[]] or macro [].[[]] note: macro definition is not supported in the REPL when using -Yrepl-classbased. -def barSRBBC: Unit = macro MacrosModule.implSRBBC - ^ + def barSRBBC: Unit = macro MacrosModule.implSRBBC + ^ scala> def barSRWBC: Unit = macro MacrosModule.implSRWBC :16: error: macro implementation reference has wrong shape. required: macro [].[[]] or macro [].[[]] note: macro definition is not supported in the REPL when using -Yrepl-classbased. -def barSRWBC: Unit = macro MacrosModule.implSRWBC - ^ + def barSRWBC: Unit = macro MacrosModule.implSRWBC + ^ scala> def barRSRBBC: Unit = macro MacrosModule.implRSRBBC :16: error: macro implementation reference has wrong shape. required: macro [].[[]] or macro [].[[]] note: macro definition is not supported in the REPL when using -Yrepl-classbased. -def barRSRBBC: Unit = macro MacrosModule.implRSRBBC - ^ + def barRSRBBC: Unit = macro MacrosModule.implRSRBBC + ^ scala> def barRSRWBC: Unit = macro MacrosModule.implRSRWBC :16: error: macro implementation reference has wrong shape. required: macro [].[[]] or macro [].[[]] note: macro definition is not supported in the REPL when using -Yrepl-classbased. -def barRSRWBC: Unit = macro MacrosModule.implRSRWBC - ^ + def barRSRWBC: Unit = macro MacrosModule.implRSRWBC + ^ scala> @@ -254,50 +254,50 @@ scala> scala> def bazBBC: Unit = macro MacroBundleBBC.impl :16: error: macro bundles must be static note: macro definition is not supported in the REPL when using -Yrepl-classbased. -def bazBBC: Unit = macro MacroBundleBBC.impl - ^ + def bazBBC: Unit = macro MacroBundleBBC.impl + ^ scala> def bazWBC: Unit = macro MacroBundleWBC.impl :16: error: macro bundles must be static note: macro definition is not supported in the REPL when using -Yrepl-classbased. -def bazWBC: Unit = macro MacroBundleWBC.impl - ^ + def bazWBC: Unit = macro MacroBundleWBC.impl + ^ scala> def bazRBBC: Unit = macro MacroBundleRBBC.impl :16: error: macro bundles must be static note: macro definition is not supported in the REPL when using -Yrepl-classbased. -def bazRBBC: Unit = macro MacroBundleRBBC.impl - ^ + def bazRBBC: Unit = macro MacroBundleRBBC.impl + ^ scala> def bazRWBC: Unit = macro MacroBundleRWBC.impl :16: error: macro bundles must be static note: macro definition is not supported in the REPL when using -Yrepl-classbased. -def bazRWBC: Unit = macro MacroBundleRWBC.impl - ^ + def bazRWBC: Unit = macro MacroBundleRWBC.impl + ^ scala> def bazSRBBC: Unit = macro MacroBundleSRBBC.impl :16: error: macro bundles must be static note: macro definition is not supported in the REPL when using -Yrepl-classbased. -def bazSRBBC: Unit = macro MacroBundleSRBBC.impl - ^ + def bazSRBBC: Unit = macro MacroBundleSRBBC.impl + ^ scala> def bazSRWBC: Unit = macro MacroBundleSRWBC.impl :16: error: macro bundles must be static note: macro definition is not supported in the REPL when using -Yrepl-classbased. -def bazSRWBC: Unit = macro MacroBundleSRWBC.impl - ^ + def bazSRWBC: Unit = macro MacroBundleSRWBC.impl + ^ scala> def bazRSRBBC: Unit = macro MacroBundleRSRBBC.impl :16: error: macro bundles must be static note: macro definition is not supported in the REPL when using -Yrepl-classbased. -def bazRSRBBC: Unit = macro MacroBundleRSRBBC.impl - ^ + def bazRSRBBC: Unit = macro MacroBundleRSRBBC.impl + ^ scala> def bazRSRWBC: Unit = macro MacroBundleRSRWBC.impl :16: error: macro bundles must be static note: macro definition is not supported in the REPL when using -Yrepl-classbased. -def bazRSRWBC: Unit = macro MacroBundleRSRWBC.impl - ^ + def bazRSRWBC: Unit = macro MacroBundleRSRWBC.impl + ^ scala> // diff --git a/test/files/run/repl-colon-type.check b/test/files/run/repl-colon-type.check index 507f64def4ee..55bfec241250 100644 --- a/test/files/run/repl-colon-type.check +++ b/test/files/run/repl-colon-type.check @@ -1,8 +1,8 @@ scala> :type List[1, 2, 3] :1: error: identifier expected but integer literal found. -List[1, 2, 3] - ^ + List[1, 2, 3] + ^ scala> :type List(1, 2, 3) List[Int] @@ -38,8 +38,8 @@ scala> :type protected lazy val f = 5 Access to protected lazy value f not permitted because enclosing object $eval in package $line13 is not a subclass of object $iw where target is defined - lazy val $result = f - ^ + lazy val $result = f + ^ scala> :type def f = 5 => Int diff --git a/test/files/run/repl-no-imports-no-predef.check b/test/files/run/repl-no-imports-no-predef.check index 77655db173ba..c3dc93541b8d 100644 --- a/test/files/run/repl-no-imports-no-predef.check +++ b/test/files/run/repl-no-imports-no-predef.check @@ -23,13 +23,13 @@ res6: (Int, Int) = (1,2) scala> 1 -> 2 :12: error: value -> is not a member of Int -1 -> 2 - ^ + 1 -> 2 + ^ scala> 1 → 2 :12: error: value → is not a member of Int -1 → 2 - ^ + 1 → 2 + ^ scala> @@ -41,8 +41,8 @@ res9: String = answer: 42 scala> s"answer: $answer" :13: error: not found: value StringContext -s"answer: $answer" -^ + s"answer: $answer" + ^ scala> @@ -56,8 +56,8 @@ res12: String = trueabc scala> true + "abc" :12: error: value + is not a member of Boolean -true + "abc" - ^ + true + "abc" + ^ scala> @@ -77,14 +77,14 @@ scala> scala> 2 ; 3 :11: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses -2 ;; -^ + 2 ;; + ^ res14: Int = 3 scala> { 2 ; 3 } :12: warning: a pure expression does nothing in statement position; multiline expressions might require enclosing parentheses -{ 2 ; 3 } - ^ + { 2 ; 3 } + ^ res15: Int = 3 scala> 5 ; 10 ; case object Cow ; 20 ; class Moo { override def toString = "Moooooo" } ; 30 ; def @@ -93,17 +93,17 @@ bippy = { 2 + 3 } ; bippy+88+11 :11: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses -5 ; 10 ; case object Cow ; 20 ; class Moo { override def toString = "Moooooo" } ; 30 ; def -^ + 5 ; 10 ; case object Cow ; 20 ; class Moo { override def toString = "Moooooo" } ; 30 ; def + ^ :11: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses -5 ; 10 ; case object Cow ; 20 ; class Moo { override def toString = "Moooooo" } ; 30 ; def - ^ + 5 ; 10 ; case object Cow ; 20 ; class Moo { override def toString = "Moooooo" } ; 30 ; def + ^ :11: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses -5 ; 10 ; case object Cow ; 20 ; class Moo { override def toString = "Moooooo" } ; 30 ; def - ^ + 5 ; 10 ; case object Cow ; 20 ; class Moo { override def toString = "Moooooo" } ; 30 ; def + ^ :11: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses -5 ; 10 ; case object Cow ; 20 ; class Moo { override def toString = "Moooooo" } ; 30 ; def - ^ + 5 ; 10 ; case object Cow ; 20 ; class Moo { override def toString = "Moooooo" } ; 30 ; def + ^ defined object Cow defined class Moo bippy: Int @@ -144,11 +144,11 @@ res24: Int = 4 scala> 5 ; ( (2 + 2 ) ) ; ((5)) :11: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses -5 ; ( (2 + 2 ) ) ;; -^ + 5 ; ( (2 + 2 ) ) ;; + ^ :11: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses -5 ; ( (2 + 2 ) ) ;; - ^ + 5 ; ( (2 + 2 ) ) ;; + ^ res25: Int = 5 scala> (((2 + 2)), ((2 + 2))) @@ -164,17 +164,17 @@ scala> scala> 55 ; ((2 + 2)) ; (1, 2, 3) :11: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses -55 ; ((2 + 2)) ;; -^ + 55 ; ((2 + 2)) ;; + ^ :11: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses -55 ; ((2 + 2)) ;; - ^ + 55 ; ((2 + 2)) ;; + ^ res29: (Int, Int, Int) = (1,2,3) scala> 55 ; (x: scala.Int) => x + 1 ; () => ((5)) :12: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses -55 ; (x: scala.Int) => x + 1 ;; -^ + 55 ; (x: scala.Int) => x + 1 ;; + ^ res30: () => Int = scala> @@ -184,8 +184,8 @@ res31: () => Int = scala> 55 ; () => 5 :11: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses -55 ;; -^ + 55 ;; + ^ res32: () => Int = scala> () => { class X ; new X } @@ -334,22 +334,22 @@ Forgetting defined types: BippyBungus, C, D, Dingus, Moo, Ruminant scala> x1 + x2 + x3 :12: error: not found: value x1 -x1 + x2 + x3 -^ + x1 + x2 + x3 + ^ :12: error: not found: value x2 -x1 + x2 + x3 - ^ + x1 + x2 + x3 + ^ :12: error: not found: value x3 -x1 + x2 + x3 - ^ + x1 + x2 + x3 + ^ scala> val x1 = 4 x1: Int = 4 scala> new BippyBungus :12: error: not found: type BippyBungus -new BippyBungus - ^ + new BippyBungus + ^ scala> class BippyBungus() { def f = 5 } defined class BippyBungus diff --git a/test/files/run/repl-parens.check b/test/files/run/repl-parens.check index d6065bb1faff..b9871cfba17f 100644 --- a/test/files/run/repl-parens.check +++ b/test/files/run/repl-parens.check @@ -19,11 +19,11 @@ res5: Int = 4 scala> 5 ; ( (2 + 2 ) ) ; ((5)) :11: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses -5 ; ( (2 + 2 ) ) ;; -^ + 5 ; ( (2 + 2 ) ) ;; + ^ :11: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses -5 ; ( (2 + 2 ) ) ;; - ^ + 5 ; ( (2 + 2 ) ) ;; + ^ res6: Int = 5 scala> (((2 + 2)), ((2 + 2))) @@ -39,17 +39,17 @@ scala> scala> 55 ; ((2 + 2)) ; (1, 2, 3) :11: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses -55 ; ((2 + 2)) ;; -^ + 55 ; ((2 + 2)) ;; + ^ :11: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses -55 ; ((2 + 2)) ;; - ^ + 55 ; ((2 + 2)) ;; + ^ res10: (Int, Int, Int) = (1,2,3) scala> 55 ; (x: Int) => x + 1 ; () => ((5)) :11: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses -55 ; (x: Int) => x + 1 ;; -^ + 55 ; (x: Int) => x + 1 ;; + ^ res11: () => Int = scala> @@ -59,8 +59,8 @@ res12: () => Int = scala> 55 ; () => 5 :11: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses -55 ;; -^ + 55 ;; + ^ res13: () => Int = scala> () => { class X ; new X } diff --git a/test/files/run/repl-paste-2.check b/test/files/run/repl-paste-2.check index 21bb2c5140f5..4c589df41a58 100644 --- a/test/files/run/repl-paste-2.check +++ b/test/files/run/repl-paste-2.check @@ -43,8 +43,8 @@ res1: Int = 690 scala> val x = dingus :11: error: not found: value dingus -val x = dingus - ^ + val x = dingus + ^ scala> val x = "dingus" x: String = dingus diff --git a/test/files/run/repl-reset.check b/test/files/run/repl-reset.check index ac9d70d3e85f..cf4d9a149e29 100644 --- a/test/files/run/repl-reset.check +++ b/test/files/run/repl-reset.check @@ -29,22 +29,22 @@ Forgetting defined types: BippyBungus scala> x1 + x2 + x3 :12: error: not found: value x1 -x1 + x2 + x3 -^ + x1 + x2 + x3 + ^ :12: error: not found: value x2 -x1 + x2 + x3 - ^ + x1 + x2 + x3 + ^ :12: error: not found: value x3 -x1 + x2 + x3 - ^ + x1 + x2 + x3 + ^ scala> val x1 = 4 x1: Int = 4 scala> new BippyBungus :12: error: not found: type BippyBungus -new BippyBungus - ^ + new BippyBungus + ^ scala> class BippyBungus() { def f = 5 } defined class BippyBungus diff --git a/test/files/run/t11402.check b/test/files/run/t11402.check index 9deee35f9c29..ba381609869f 100644 --- a/test/files/run/t11402.check +++ b/test/files/run/t11402.check @@ -6,8 +6,8 @@ This can be achieved by adding the import clause 'import scala.language.postfixO or by setting the compiler option -language:postfixOps. See the Scaladoc for value scala.language.postfixOps for a discussion why the feature should be explicitly enabled. -import scala.concurrent.duration._; val t = 1 second - ^ + import scala.concurrent.duration._; val t = 1 second + ^ import scala.concurrent.duration._ t: scala.concurrent.duration.FiniteDuration = 1 second diff --git a/test/files/run/t12354.check b/test/files/run/t12354.check index 3109db8938cb..4d558713c0c1 100644 --- a/test/files/run/t12354.check +++ b/test/files/run/t12354.check @@ -1,14 +1,14 @@ scala> case class C(implicit x: Int) :11: warning: case classes should have a non-implicit parameter list; adapting to 'case class C()(...)' -case class C(implicit x: Int) - ^ + case class C(implicit x: Int) + ^ defined class C scala> for {x <- Nil; val y = 1} yield y :12: warning: val keyword in for comprehension is deprecated -for {x <- Nil; val y = 1} yield y - ^ + for {x <- Nil; val y = 1} yield y + ^ res0: List[Int] = List() scala> :quit diff --git a/test/files/run/t1931.check b/test/files/run/t1931.check index 441bdfaedfd5..c8cbbe969f1a 100644 --- a/test/files/run/t1931.check +++ b/test/files/run/t1931.check @@ -10,8 +10,8 @@ import Predef.{any2stringadd=>_, _} scala> x + " works" :14: error: value + is not a member of Any -x + " works" - ^ + x + " works" + ^ scala> import Predef._ import Predef._ @@ -27,8 +27,8 @@ import Predef._ scala> f :14: error: not found: value f -f -^ + f + ^ scala> Predef.f res4: Int = 42 diff --git a/test/files/run/t4542.check b/test/files/run/t4542.check index cfc551de9fd2..942de545b517 100644 --- a/test/files/run/t4542.check +++ b/test/files/run/t4542.check @@ -6,8 +6,8 @@ defined class Foo scala> val f = new Foo :12: warning: class Foo is deprecated (since ReplTest version 1.0-FINAL): foooo -val f = new Foo - ^ + val f = new Foo + ^ f: Foo = Bippy scala> :quit diff --git a/test/files/run/t4594-repl-settings.check b/test/files/run/t4594-repl-settings.check index 2dc499252806..e1bbff94f6db 100644 --- a/test/files/run/t4594-repl-settings.check +++ b/test/files/run/t4594-repl-settings.check @@ -10,8 +10,8 @@ scala> :settings -deprecation scala> def b = depp :12: warning: method depp is deprecated (since Time began.): Please don't do that. -def b = depp - ^ + def b = depp + ^ b: String scala> :quit diff --git a/test/files/run/t5655.check b/test/files/run/t5655.check index 2aa8000e73ce..eee95004e3a3 100644 --- a/test/files/run/t5655.check +++ b/test/files/run/t5655.check @@ -10,15 +10,15 @@ scala> x it is imported twice in the same scope by import x._ and import x -x -^ + x + ^ scala> x :16: error: reference to x is ambiguous; it is imported twice in the same scope by import x._ and import x -x -^ + x + ^ scala> :quit diff --git a/test/files/run/t7319.check b/test/files/run/t7319.check index d4546e2fc425..e9cf56613784 100644 --- a/test/files/run/t7319.check +++ b/test/files/run/t7319.check @@ -20,20 +20,20 @@ scala> convert(Some[Int](0)) argument expression's type is not compatible with formal parameter type; found : Some[Int] required: ?F[_$1] forSome { type _$1 <: ?F[_$2] forSome { type _$2 } } -convert(Some[Int](0)) -^ + convert(Some[Int](0)) + ^ :15: error: type mismatch; found : Some[Int] required: F[_ <: F[_]] -convert(Some[Int](0)) - ^ + convert(Some[Int](0)) + ^ scala> Range(1,2).toArray: Seq[_] :14: error: polymorphic expression cannot be instantiated to expected type; found : [B >: Int]Array[B] required: Seq[_] -Range(1,2).toArray: Seq[_] - ^ + Range(1,2).toArray: Seq[_] + ^ scala> 0 res2: Int = 0 diff --git a/test/files/run/t7747-repl.check b/test/files/run/t7747-repl.check index 8e2f0e23c11f..7969a45d746e 100644 --- a/test/files/run/t7747-repl.check +++ b/test/files/run/t7747-repl.check @@ -16,14 +16,14 @@ z: Int = 156 scala> 2 ; 3 :11: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses -2 ;; -^ + 2 ;; + ^ res0: Int = 3 scala> { 2 ; 3 } :12: warning: a pure expression does nothing in statement position; multiline expressions might require enclosing parentheses -{ 2 ; 3 } - ^ + { 2 ; 3 } + ^ res1: Int = 3 scala> 5 ; 10 ; case object Cow ; 20 ; class Moo { override def toString = "Moooooo" } ; 30 ; def bippy = { @@ -31,17 +31,17 @@ scala> 5 ; 10 ; case object Cow ; 20 ; class Moo { override def toString = "Mooo 2 + 3 } ; bippy+88+11 :11: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses -5 ; 10 ; case object Cow ; 20 ; class Moo { override def toString = "Moooooo" } ; 30 ; def bippy = { -^ + 5 ; 10 ; case object Cow ; 20 ; class Moo { override def toString = "Moooooo" } ; 30 ; def bippy = { + ^ :11: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses -5 ; 10 ; case object Cow ; 20 ; class Moo { override def toString = "Moooooo" } ; 30 ; def bippy = { - ^ + 5 ; 10 ; case object Cow ; 20 ; class Moo { override def toString = "Moooooo" } ; 30 ; def bippy = { + ^ :11: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses -5 ; 10 ; case object Cow ; 20 ; class Moo { override def toString = "Moooooo" } ; 30 ; def bippy = { - ^ + 5 ; 10 ; case object Cow ; 20 ; class Moo { override def toString = "Moooooo" } ; 30 ; def bippy = { + ^ :11: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses -5 ; 10 ; case object Cow ; 20 ; class Moo { override def toString = "Moooooo" } ; 30 ; def bippy = { - ^ + 5 ; 10 ; case object Cow ; 20 ; class Moo { override def toString = "Moooooo" } ; 30 ; def bippy = { + ^ defined object Cow defined class Moo bippy: Int @@ -82,11 +82,11 @@ res10: Int = 4 scala> 5 ; ( (2 + 2 ) ) ; ((5)) :11: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses -5 ; ( (2 + 2 ) ) ;; -^ + 5 ; ( (2 + 2 ) ) ;; + ^ :11: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses -5 ; ( (2 + 2 ) ) ;; - ^ + 5 ; ( (2 + 2 ) ) ;; + ^ res11: Int = 5 scala> (((2 + 2)), ((2 + 2))) @@ -102,17 +102,17 @@ scala> scala> 55 ; ((2 + 2)) ; (1, 2, 3) :11: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses -55 ; ((2 + 2)) ;; -^ + 55 ; ((2 + 2)) ;; + ^ :11: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses -55 ; ((2 + 2)) ;; - ^ + 55 ; ((2 + 2)) ;; + ^ res15: (Int, Int, Int) = (1,2,3) scala> 55 ; (x: Int) => x + 1 ; () => ((5)) :12: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses -55 ; (x: Int) => x + 1 ;; -^ + 55 ; (x: Int) => x + 1 ;; + ^ res16: () => Int = scala> @@ -122,8 +122,8 @@ res17: () => Int = scala> 55 ; () => 5 :11: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses -55 ;; -^ + 55 ;; + ^ res18: () => Int = scala> () => { class X ; new X } @@ -210,22 +210,22 @@ Forgetting defined types: BippyBungus, Moo, Ruminant scala> x1 + x2 + x3 :12: error: not found: value x1 -x1 + x2 + x3 -^ + x1 + x2 + x3 + ^ :12: error: not found: value x2 -x1 + x2 + x3 - ^ + x1 + x2 + x3 + ^ :12: error: not found: value x3 -x1 + x2 + x3 - ^ + x1 + x2 + x3 + ^ scala> val x1 = 4 x1: Int = 4 scala> new BippyBungus :12: error: not found: type BippyBungus -new BippyBungus - ^ + new BippyBungus + ^ scala> class BippyBungus() { def f = 5 } defined class BippyBungus diff --git a/test/files/run/t8918-unary-ids.check b/test/files/run/t8918-unary-ids.check index f3540be9d14a..e85ebb5fdf43 100644 --- a/test/files/run/t8918-unary-ids.check +++ b/test/files/run/t8918-unary-ids.check @@ -10,13 +10,13 @@ res0: Int = -42 scala> - if (true) 1 else 2 :1: error: illegal start of simple expression -- if (true) 1 else 2 - ^ + - if (true) 1 else 2 + ^ scala> - - 1 :1: error: ';' expected but integer literal found. -- - 1 - ^ + - - 1 + ^ scala> -.-(1) res1: Int = 41 diff --git a/test/files/run/t9170.check b/test/files/run/t9170.check index 22b29d4657a8..7b3c6203e0d2 100644 --- a/test/files/run/t9170.check +++ b/test/files/run/t9170.check @@ -4,16 +4,16 @@ scala> object Y { def f[A](a: => A) = 1 ; def f[A](a: => Either[Exception, A]) = def f[A](a: => A): Int at line 11 and def f[A](a: => Either[Exception,A]): Int at line 11 have same type after erasure: (a: Function0)Int -object Y { def f[A](a: => A) = 1 ; def f[A](a: => Either[Exception, A]) = 2 } - ^ + object Y { def f[A](a: => A) = 1 ; def f[A](a: => Either[Exception, A]) = 2 } + ^ scala> object Y { def f[A](a: => A) = 1 ; def f[A](a: => Either[Exception, A]) = 2 } :11: error: double definition: def f[A](a: => A): Int at line 11 and def f[A](a: => Either[Exception,A]): Int at line 11 have same type after erasure: (a: Function0)Int -object Y { def f[A](a: => A) = 1 ; def f[A](a: => Either[Exception, A]) = 2 } - ^ + object Y { def f[A](a: => A) = 1 ; def f[A](a: => Either[Exception, A]) = 2 } + ^ scala> object Y { | def f[A](a: => A) = 1 @@ -23,8 +23,8 @@ scala> object Y { def f[A](a: => A): Int at line 12 and def f[A](a: => Either[Exception,A]): Int at line 13 have same type after erasure: (a: Function0)Int - def f[A](a: => Either[Exception, A]) = 2 - ^ + def f[A](a: => Either[Exception, A]) = 2 + ^ scala> :pa // Entering paste mode (ctrl-D to finish) diff --git a/test/files/run/t9206.check b/test/files/run/t9206.check index cf3488c73526..269f968ca9cc 100644 --- a/test/files/run/t9206.check +++ b/test/files/run/t9206.check @@ -3,14 +3,14 @@ scala> val i: Int = "foo" :11: error: type mismatch; found : String("foo") required: Int -val i: Int = "foo" - ^ + val i: Int = "foo" + ^ scala> { val j = 42 ; val i: Int = "foo" + j } :12: error: type mismatch; found : String required: Int -{ val j = 42 ; val i: Int = "foo" + j } - ^ + { val j = 42 ; val i: Int = "foo" + j } + ^ scala> :quit diff --git a/test/files/run/xMigration.check b/test/files/run/xMigration.check index 0ddc8996cf2b..b812d6a282f5 100644 --- a/test/files/run/xMigration.check +++ b/test/files/run/xMigration.check @@ -12,8 +12,8 @@ scala> :setting -Xmigration:any scala> Map(1 -> "eis").values // warn :12: warning: method values in trait MapLike has changed semantics in version 2.8.0: `values` returns `Iterable[V]` rather than `Iterator[V]`. -Map(1 -> "eis").values // warn - ^ + Map(1 -> "eis").values // warn + ^ res2: Iterable[String] = MapLike.DefaultValuesIterable(eis) scala> :setting -Xmigration:2.8 @@ -26,8 +26,8 @@ scala> :setting -Xmigration:2.7 scala> Map(1 -> "eis").values // warn :12: warning: method values in trait MapLike has changed semantics in version 2.8.0: `values` returns `Iterable[V]` rather than `Iterator[V]`. -Map(1 -> "eis").values // warn - ^ + Map(1 -> "eis").values // warn + ^ res4: Iterable[String] = MapLike.DefaultValuesIterable(eis) scala> :setting -Xmigration:2.11 @@ -40,8 +40,8 @@ scala> :setting -Xmigration // same as :any scala> Map(1 -> "eis").values // warn :12: warning: method values in trait MapLike has changed semantics in version 2.8.0: `values` returns `Iterable[V]` rather than `Iterator[V]`. -Map(1 -> "eis").values // warn - ^ + Map(1 -> "eis").values // warn + ^ res6: Iterable[String] = MapLike.DefaultValuesIterable(eis) scala> :quit From 81d0b88b8d42b1e88cd6bdcf040fa1f86cb8e08d Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Fri, 22 Jan 2021 16:55:32 +0100 Subject: [PATCH 018/769] Store suspended warnings per compilation unit Store suspended warnings per compilation unit and report them in `typerPhase.apply(unit)` instead of `typerPhase.run` - the latter is not invoked when using the presentation compiler. --- .../scala/tools/nsc/CompilationUnits.scala | 12 +++- src/compiler/scala/tools/nsc/Global.scala | 4 +- src/compiler/scala/tools/nsc/Reporting.scala | 67 ++++++++++--------- .../tools/nsc/typechecker/Analyzer.scala | 6 +- .../tools/nsc/typechecker/Contexts.scala | 38 ++++++++--- .../scala/tools/nsc/typechecker/Typers.scala | 2 +- test/files/neg/annot-nonconst.check | 12 ++-- test/files/neg/badtok-1-212.check | 6 +- test/files/neg/for-comprehension-old.check | 24 +++---- test/files/neg/nested-annotation.check | 6 +- test/files/neg/t10678.check | 6 +- test/files/neg/t6082.check | 10 +-- test/files/neg/t6083.check | 6 +- test/files/neg/t6675b.check | 28 ++++---- test/files/neg/t8704.check | 6 +- test/scaladoc/run/t5527.check | 21 +++--- 16 files changed, 145 insertions(+), 109 deletions(-) diff --git a/src/compiler/scala/tools/nsc/CompilationUnits.scala b/src/compiler/scala/tools/nsc/CompilationUnits.scala index 35a10b3feafd..f05cc719ec5b 100644 --- a/src/compiler/scala/tools/nsc/CompilationUnits.scala +++ b/src/compiler/scala/tools/nsc/CompilationUnits.scala @@ -12,9 +12,10 @@ package scala.tools.nsc -import scala.reflect.internal.util.{ SourceFile, NoSourceFile, FreshNameCreator } import scala.collection.mutable -import scala.collection.mutable.{ LinkedHashSet, ListBuffer } +import scala.collection.mutable.{LinkedHashSet, ListBuffer} +import scala.reflect.internal.util.{FreshNameCreator, NoSourceFile, SourceFile} +import scala.tools.nsc.Reporting.Message trait CompilationUnits { global: Global => @@ -127,6 +128,13 @@ trait CompilationUnits { global: Global => /** things to check at end of compilation unit */ val toCheck = new ListBuffer[() => Unit] + var suspendMessages = true + private[this] var _suspendedMessages: mutable.LinkedHashSet[Message] = null + def suspendedMessages: mutable.LinkedHashSet[Message] = { + if (_suspendedMessages == null) _suspendedMessages = mutable.LinkedHashSet.empty + _suspendedMessages + } + /** The features that were already checked for this unit */ var checkedFeatures = Set[Symbol]() diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala index 74a9454a80b5..6497beae19a0 100644 --- a/src/compiler/scala/tools/nsc/Global.scala +++ b/src/compiler/scala/tools/nsc/Global.scala @@ -1549,8 +1549,10 @@ class Global(var currentSettings: Settings, reporter0: Reporter) if (settings.YstatisticsEnabled && settings.Ystatistics.contains(phase.name)) printStatisticsFor(phase) - if (!globalPhase.hasNext || reporter.hasErrors) + if (!globalPhase.hasNext || reporter.hasErrors) { + units.foreach(unit => unit.suspendedMessages.foreach(runReporting.issueIfNotSuppressed)) runReporting.warnUnusedSuppressions() + } advancePhase() } diff --git a/src/compiler/scala/tools/nsc/Reporting.scala b/src/compiler/scala/tools/nsc/Reporting.scala index 7922ab33f531..c86a2d46b380 100644 --- a/src/compiler/scala/tools/nsc/Reporting.scala +++ b/src/compiler/scala/tools/nsc/Reporting.scala @@ -55,9 +55,7 @@ trait Reporting extends scala.reflect.internal.Reporting { self: ast.Positions w private val summarizedWarnings: mutable.Map[WarningCategory, mutable.LinkedHashMap[Position, Message]] = mutable.HashMap.empty private val summarizedInfos: mutable.Map[WarningCategory, mutable.LinkedHashMap[Position, Message]] = mutable.HashMap.empty - private var suppressionsComplete = false private val suppressions: mutable.LinkedHashMap[SourceFile, mutable.ListBuffer[Suppression]] = mutable.LinkedHashMap.empty - private val suspendedMessages: mutable.LinkedHashSet[Message] = mutable.LinkedHashSet.empty private def isSuppressed(warning: Message): Boolean = suppressions.getOrElse(warning.pos.source, Nil).find(_.matches(warning)) match { @@ -74,8 +72,6 @@ trait Reporting extends scala.reflect.internal.Reporting { self: ast.Positions w suppressions.getOrElse(pos.source, Nil).exists(_.annotPos.point == pos.point) def warnUnusedSuppressions(): Unit = { - // if we stop before typer completes (errors in parser, Ystop), report all suspended messages - suspendedMessages.foreach(issueWarning) if (settings.warnUnusedNowarn && !settings.isScaladoc) { // scaladoc doesn't run all phases, so not all warnings are emitted val sources = suppressions.keysIterator.toList for (source <- sources; sups <- suppressions.remove(source); sup <- sups.reverse) { @@ -85,15 +81,11 @@ trait Reporting extends scala.reflect.internal.Reporting { self: ast.Positions w } } - def reportSuspendedMessages(): Unit = { - suppressionsComplete = true + def reportSuspendedMessages(unit: CompilationUnit): Unit = { // sort suppressions. they are not added in any particular order because of lazy type completion - suppressions.transform((_, sups) => sups.sortBy(sup => 0 - sup.start)) - suspendedMessages.foreach { m => - if (!isSuppressed(m)) - issueWarning(m) - } - suspendedMessages.clear() + for (sups <- suppressions.get(unit.source)) + suppressions(unit.source) = sups.sortBy(sup => 0 - sup.start) + unit.suspendedMessages.foreach(issueIfNotSuppressed) } private def summaryMap(action: Action, category: WarningCategory) = { @@ -122,13 +114,9 @@ trait Reporting extends scala.reflect.internal.Reporting { self: ast.Positions w } } - private def checkSuppressedAndIssue(warning: Message): Unit = { - if (suppressionsComplete) { - if (!isSuppressed(warning)) - issueWarning(warning) - } else - suspendedMessages += warning - } + def issueIfNotSuppressed(warning: Message): Unit = + if (!isSuppressed(warning)) + issueWarning(warning) private def summarize(action: Action, category: WarningCategory): Unit = { def rerunMsg: String = { @@ -188,19 +176,28 @@ trait Reporting extends scala.reflect.internal.Reporting { self: ast.Positions w impl(sym) } else "" - def deprecationWarning(pos: Position, msg: String, since: String, site: String, origin: String): Unit = - checkSuppressedAndIssue(Message.Deprecation(pos, msg, site, origin, Version.fromString(since))) + def deprecationWarningMessage(pos: Position, msg: String, since: String, site: String, origin: String): Message = + Message.Deprecation(pos, msg, site, origin, Version.fromString(since)) - def deprecationWarning(pos: Position, origin: Symbol, site: Symbol, msg: String, since: String): Unit = - deprecationWarning(pos, msg, since, siteName(site), siteName(origin)) + def deprecationWarningMessage(pos: Position, origin: Symbol, site: Symbol, msg: String, since: String): Message = + deprecationWarningMessage(pos, msg, since, siteName(site), siteName(origin)) - def deprecationWarning(pos: Position, origin: Symbol, site: Symbol): Unit = { + def deprecationWarningMessage(pos: Position, origin: Symbol, site: Symbol): Message = { val version = origin.deprecationVersion.getOrElse("") val since = if (version.isEmpty) version else s" (since $version)" val message = origin.deprecationMessage.map(": " + _).getOrElse("") - deprecationWarning(pos, origin, site, s"$origin${origin.locationString} is deprecated$since$message", version) + deprecationWarningMessage(pos, origin, site, s"$origin${origin.locationString} is deprecated$since$message", version) } + def deprecationWarning(pos: Position, msg: String, since: String, site: String, origin: String): Unit = + issueIfNotSuppressed(deprecationWarningMessage(pos, msg, since, site, origin)) + + def deprecationWarning(pos: Position, origin: Symbol, site: Symbol, msg: String, since: String): Unit = + issueIfNotSuppressed(deprecationWarningMessage(pos, origin, site, msg, since)) + + def deprecationWarning(pos: Position, origin: Symbol, site: Symbol): Unit = + issueIfNotSuppressed(deprecationWarningMessage(pos, origin, site)) + private[this] var reportedFeature = Set[Symbol]() // we don't have access to runDefinitions here, so mapping from strings instead of feature symbols private val featureCategory: Map[String, WarningCategory.Feature] = { @@ -215,7 +212,8 @@ trait Reporting extends scala.reflect.internal.Reporting { self: ast.Positions w ("macros", FeatureMacros) ).withDefaultValue(Feature) } - def featureWarning(pos: Position, featureName: String, featureDesc: String, featureTrait: Symbol, construct: => String = "", required: Boolean, site: Symbol): Unit = { + + def featureWarningMessage(pos: Position, featureName: String, featureDesc: String, featureTrait: Symbol, construct: => String = "", required: Boolean, site: Symbol): Option[Message] = { val req = if (required) "needs to" else "should" val fqname = "scala.language." + featureName val explain = ( @@ -239,17 +237,26 @@ trait Reporting extends scala.reflect.internal.Reporting { self: ast.Positions w && parentFileName(pos.source).getOrElse("") == "xsbt" && Thread.currentThread.getStackTrace.exists(_.getClassName.startsWith("sbt.")) ) - if (required && !isSbtCompat) reporter.error(pos, msg) - else warning(pos, msg, featureCategory(featureTrait.nameString), site) + if (required && !isSbtCompat) { reporter.error(pos, msg); None } + else Some(warningMessage(pos, msg, featureCategory(featureTrait.nameString), site)) } + def featureWarning(pos: Position, featureName: String, featureDesc: String, featureTrait: Symbol, construct: => String = "", required: Boolean, site: Symbol): Unit = + featureWarningMessage(pos, featureName, featureDesc, featureTrait, construct, required, site).foreach(issueIfNotSuppressed) + + def warningMessage(pos: Position, msg: String, category: WarningCategory, site: String): Message = + Message.Plain(pos, msg, category, site) + + def warningMessage(pos: Position, msg: String, category: WarningCategory, site: Symbol): Message = + warningMessage(pos, msg, category, siteName(site)) + // Used in the optimizer where we don't have no symbols, the site string is created from the class internal name and method name. def warning(pos: Position, msg: String, category: WarningCategory, site: String): Unit = - checkSuppressedAndIssue(Message.Plain(pos, msg, category, site)) + issueIfNotSuppressed(warningMessage(pos, msg, category, site)) // Preferred over the overload above whenever a site symbol is available def warning(pos: Position, msg: String, category: WarningCategory, site: Symbol): Unit = - warning(pos, msg, category, siteName(site)) + issueIfNotSuppressed(warningMessage(pos, msg, category, site)) // used by Global.deprecationWarnings, which is used by sbt def deprecationWarnings: List[(Position, String)] = summaryMap(Action.WarningSummary, WarningCategory.Deprecation).toList.map(p => (p._1, p._2.msg)) diff --git a/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala b/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala index 36c1b3ed3e4d..f9fdd7a08b87 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala @@ -108,8 +108,8 @@ trait Analyzer extends AnyRef // defensive measure in case the bookkeeping in deferred macro expansion is buggy clearDelayed() if (StatisticsStatics.areSomeColdStatsEnabled) statistics.stopTimer(statistics.typerNanos, start) - runReporting.reportSuspendedMessages() } + def apply(unit: CompilationUnit) { try { val typer = newTyper(rootContext(unit)) @@ -121,9 +121,13 @@ trait Analyzer extends AnyRef if (settings.warnUnused.isSetByUser) new checkUnused(typer).apply(unit) } + if (unit.suspendMessages) + runReporting.reportSuspendedMessages(unit) } finally { unit.toCheck.clear() + unit.suspendMessages = false + unit.suspendedMessages.clear() } } } diff --git a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala index df04cf16ceec..8cc3d41942aa 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala @@ -25,6 +25,7 @@ import scala.tools.nsc.Reporting.WarningCategory trait Contexts { self: Analyzer => import global._ import definitions.{ JavaLangPackage, ScalaPackage, PredefModule, ScalaXmlTopScope, ScalaXmlPackage } + import ContextMode._ protected def onTreeCheckerError(pos: Position, msg: String): Unit = () @@ -605,8 +606,8 @@ trait Contexts { self: Analyzer => /** Issue/throw the given error message according to the current mode for error reporting. */ def error(pos: Position, msg: String) = reporter.error(fixPosition(pos), msg) /** Issue/throw the given error message according to the current mode for error reporting. */ - def warning(pos: Position, msg: String, category: WarningCategory) = reporter.warning(fixPosition(pos), msg, category, owner) - def warning(pos: Position, msg: String, category: WarningCategory, site: Symbol) = reporter.warning(fixPosition(pos), msg, category, site) + def warning(pos: Position, msg: String, category: WarningCategory) = reporter.warning(fixPosition(pos), msg, category, owner, this) + def warning(pos: Position, msg: String, category: WarningCategory, site: Symbol) = reporter.warning(fixPosition(pos), msg, category, site, this) def echo(pos: Position, msg: String) = reporter.echo(fixPosition(pos), msg) def fixPosition(pos: Position): Position = pos match { case NoPosition => nextEnclosing(_.tree.pos != NoPosition).tree.pos @@ -616,12 +617,22 @@ trait Contexts { self: Analyzer => // TODO: buffer deprecations under silent (route through ContextReporter, store in BufferingReporter) def deprecationWarning(pos: Position, sym: Symbol, msg: String, since: String): Unit = - runReporting.deprecationWarning(fixPosition(pos), sym, owner, msg, since) + if (unit.suspendMessages) + unit.suspendedMessages += runReporting.deprecationWarningMessage(fixPosition(pos), sym, owner, msg, since) + else + runReporting.deprecationWarning(fixPosition(pos), sym, owner, msg, since) + def deprecationWarning(pos: Position, sym: Symbol): Unit = - runReporting.deprecationWarning(fixPosition(pos), sym, owner) // TODO: allow this to escalate to an error, and implicit search will ignore deprecated implicits + if (unit.suspendMessages) + unit.suspendedMessages += runReporting.deprecationWarningMessage(fixPosition(pos), sym, owner) + else + runReporting.deprecationWarning(fixPosition(pos), sym, owner) def featureWarning(pos: Position, featureName: String, featureDesc: String, featureTrait: Symbol, construct: => String = "", required: Boolean): Unit = - runReporting.featureWarning(fixPosition(pos), featureName, featureDesc, featureTrait, construct, required, owner) + if (unit.suspendMessages) + unit.suspendedMessages ++= runReporting.featureWarningMessage(fixPosition(pos), featureName, featureDesc, featureTrait, construct, required, owner) + else + runReporting.featureWarning(fixPosition(pos), featureName, featureDesc, featureTrait, construct, required, owner) // nextOuter determines which context is searched next for implicits @@ -1358,8 +1369,11 @@ trait Contexts { self: Analyzer => def echo(msg: String): Unit = echo(NoPosition, msg) def echo(pos: Position, msg: String): Unit = reporter.echo(pos, msg) - def warning(pos: Position, msg: String, category: WarningCategory, site: Symbol): Unit = - runReporting.warning(pos, msg, category, site) + def warning(pos: Position, msg: String, category: WarningCategory, site: Symbol, context: Context): Unit = + if (context.unit.suspendMessages) + context.unit.suspendedMessages += runReporting.warningMessage(pos, msg, category, site) + else + runReporting.warning(pos, msg, category, site) def error(pos: Position, msg: String): Unit @@ -1452,9 +1466,13 @@ trait Contexts { self: Analyzer => else msg } - final def emitWarnings() = if (_warningBuffer != null) { + final def emitWarnings(context: Context) = if (_warningBuffer != null) { _warningBuffer foreach { - case (pos, msg, category, site) => runReporting.warning(pos, msg, category, site) + case (pos, msg, category, site) => + if (context.unit.suspendMessages) + context.unit.suspendedMessages += runReporting.warningMessage(pos, msg, category, site) + else + runReporting.warning(pos, msg, category, site) } _warningBuffer = null } @@ -1492,7 +1510,7 @@ trait Contexts { self: Analyzer => // the old throwing behavior was relied on by diagnostics in manifestOfType def error(pos: Position, msg: String): Unit = errorBuffer += TypeErrorWrapper(new TypeError(pos, msg)) - override def warning(pos: Position, msg: String, category: WarningCategory, site: Symbol): Unit = + override def warning(pos: Position, msg: String, category: WarningCategory, site: Symbol, context: Context): Unit = warningBuffer += ((pos, msg, category, site)) override protected def handleSuppressedAmbiguous(err: AbsAmbiguousTypeError): Unit = errorBuffer += err diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index 9843d03d12ae..b115242aaef7 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -716,7 +716,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper // If we have a successful result, emit any warnings it created. if (!context1.reporter.hasErrors) - context1.reporter.emitWarnings() + context1.reporter.emitWarnings(context1) wrapResult(context1.reporter, result) } else { diff --git a/test/files/neg/annot-nonconst.check b/test/files/neg/annot-nonconst.check index 58a13b10e9c3..a96eb08df5f3 100644 --- a/test/files/neg/annot-nonconst.check +++ b/test/files/neg/annot-nonconst.check @@ -1,9 +1,3 @@ -annot-nonconst.scala:6: error: annotation argument needs to be a constant; found: Test.this.n - @Length(n) def foo = "foo" - ^ -annot-nonconst.scala:7: error: annotation argument cannot be null - @Ann2(null) def bar = "bar" - ^ annot-nonconst.scala:1: warning: Implementation restriction: subclassing ClassfileAnnotation does not make your annotation visible at runtime. If that is what you want, you must write the annotation class in Java. @@ -14,5 +8,11 @@ make your annotation visible at runtime. If that is what you want, you must write the annotation class in Java. class Ann2(value: String) extends annotation.ClassfileAnnotation ^ +annot-nonconst.scala:6: error: annotation argument needs to be a constant; found: Test.this.n + @Length(n) def foo = "foo" + ^ +annot-nonconst.scala:7: error: annotation argument cannot be null + @Ann2(null) def bar = "bar" + ^ two warnings found two errors found diff --git a/test/files/neg/badtok-1-212.check b/test/files/neg/badtok-1-212.check index 754652dd2db5..7e3d7cbdfdf1 100644 --- a/test/files/neg/badtok-1-212.check +++ b/test/files/neg/badtok-1-212.check @@ -4,14 +4,14 @@ badtok-1-212.scala:3: error: unclosed character literal (or use " not ' for stri badtok-1-212.scala:3: error: unclosed character literal (or use " not ' for string literal) '42' ^ +badtok-1-212.scala:7: warning: deprecated syntax for character literal (use '\'' for single quote) +''' +^ badtok-1-212.scala:9: error: empty character literal ''; ^ badtok-1-212.scala:11: error: unclosed character literal ' ^ -badtok-1-212.scala:7: warning: deprecated syntax for character literal (use '\'' for single quote) -''' -^ one warning found four errors found diff --git a/test/files/neg/for-comprehension-old.check b/test/files/neg/for-comprehension-old.check index 47cca09953bc..b863c59538f1 100644 --- a/test/files/neg/for-comprehension-old.check +++ b/test/files/neg/for-comprehension-old.check @@ -1,15 +1,3 @@ -for-comprehension-old.scala:5: error: val in for comprehension must be followed by assignment - for (val x <- 1 to 5 ; y = x) yield x+y // fail - ^ -for-comprehension-old.scala:6: error: val in for comprehension must be followed by assignment - for (val x <- 1 to 5 ; val y = x) yield x+y // fail - ^ -for-comprehension-old.scala:10: error: val in for comprehension must be followed by assignment - for (z <- 1 to 2 ; val x <- 1 to 5 ; y = x) yield x+y // fail - ^ -for-comprehension-old.scala:11: error: val in for comprehension must be followed by assignment - for (z <- 1 to 2 ; val x <- 1 to 5 ; val y = x) yield x+y // fail - ^ for-comprehension-old.scala:4: warning: val keyword in for comprehension is deprecated for (x <- 1 to 5 ; val y = x) yield x+y // fail ^ @@ -22,5 +10,17 @@ for-comprehension-old.scala:9: warning: val keyword in for comprehension is depr for-comprehension-old.scala:11: warning: val keyword in for comprehension is deprecated for (z <- 1 to 2 ; val x <- 1 to 5 ; val y = x) yield x+y // fail ^ +for-comprehension-old.scala:5: error: val in for comprehension must be followed by assignment + for (val x <- 1 to 5 ; y = x) yield x+y // fail + ^ +for-comprehension-old.scala:6: error: val in for comprehension must be followed by assignment + for (val x <- 1 to 5 ; val y = x) yield x+y // fail + ^ +for-comprehension-old.scala:10: error: val in for comprehension must be followed by assignment + for (z <- 1 to 2 ; val x <- 1 to 5 ; y = x) yield x+y // fail + ^ +for-comprehension-old.scala:11: error: val in for comprehension must be followed by assignment + for (z <- 1 to 2 ; val x <- 1 to 5 ; val y = x) yield x+y // fail + ^ four warnings found four errors found diff --git a/test/files/neg/nested-annotation.check b/test/files/neg/nested-annotation.check index 1cd3df5bb054..a3e159ab3dac 100644 --- a/test/files/neg/nested-annotation.check +++ b/test/files/neg/nested-annotation.check @@ -1,10 +1,10 @@ -nested-annotation.scala:8: error: nested classfile annotations must be defined in java; found: inline - @ComplexAnnotation(new inline) def bippy(): Int = 1 - ^ nested-annotation.scala:3: warning: Implementation restriction: subclassing ClassfileAnnotation does not make your annotation visible at runtime. If that is what you want, you must write the annotation class in Java. class ComplexAnnotation(val value: Annotation) extends ClassfileAnnotation ^ +nested-annotation.scala:8: error: nested classfile annotations must be defined in java; found: inline + @ComplexAnnotation(new inline) def bippy(): Int = 1 + ^ one warning found one error found diff --git a/test/files/neg/t10678.check b/test/files/neg/t10678.check index d73e3ca30649..09c1bafeac55 100644 --- a/test/files/neg/t10678.check +++ b/test/files/neg/t10678.check @@ -1,11 +1,11 @@ +t10678.scala:5: warning: Using `<:` for `extends` is deprecated +trait U <: T + ^ t10678.scala:7: error: ';' expected but '<:' found. class C <: T { ^ t10678.scala:10: error: ';' expected but '<:' found. object O <: T { ^ -t10678.scala:5: warning: Using `<:` for `extends` is deprecated -trait U <: T - ^ one warning found two errors found diff --git a/test/files/neg/t6082.check b/test/files/neg/t6082.check index 9f757d2db82a..9bd5cb1866b7 100644 --- a/test/files/neg/t6082.check +++ b/test/files/neg/t6082.check @@ -1,13 +1,13 @@ +t6082.scala:1: warning: Implementation restriction: subclassing ClassfileAnnotation does not +make your annotation visible at runtime. If that is what +you want, you must write the annotation class in Java. +class annot(notValue: String) extends annotation.ClassfileAnnotation + ^ t6082.scala:2: error: classfile annotation arguments have to be supplied as named arguments @annot("") class C ^ t6082.scala:2: error: annotation annot is missing argument notValue @annot("") class C ^ -t6082.scala:1: warning: Implementation restriction: subclassing ClassfileAnnotation does not -make your annotation visible at runtime. If that is what -you want, you must write the annotation class in Java. -class annot(notValue: String) extends annotation.ClassfileAnnotation - ^ one warning found two errors found diff --git a/test/files/neg/t6083.check b/test/files/neg/t6083.check index 7116bda41d9a..b9869cd092e1 100644 --- a/test/files/neg/t6083.check +++ b/test/files/neg/t6083.check @@ -1,10 +1,10 @@ -t6083.scala:7: error: annotation argument needs to be a constant; found: conv.i2s(101) -@annot(101) class C - ^ t6083.scala:6: warning: Implementation restriction: subclassing ClassfileAnnotation does not make your annotation visible at runtime. If that is what you want, you must write the annotation class in Java. class annot(value: String) extends annotation.ClassfileAnnotation ^ +t6083.scala:7: error: annotation argument needs to be a constant; found: conv.i2s(101) +@annot(101) class C + ^ one warning found one error found diff --git a/test/files/neg/t6675b.check b/test/files/neg/t6675b.check index c78d8edb1b14..284046a70c87 100644 --- a/test/files/neg/t6675b.check +++ b/test/files/neg/t6675b.check @@ -1,37 +1,37 @@ +t6675b.scala:18: warning: deprecated adaptation: object LeftOrRight expects 2 patterns to hold (Int, Int) but crushing into 2-tuple to fit single pattern (scala/bug#6675) + def f1 = (Left((0, 0)): Either[(Int, Int), (Int, Int)]) match { case LeftOrRight(a) => a } // warn + ^ t6675b.scala:20: error: constructor cannot be instantiated to expected type; found : (T1, T2, T3) required: (Int, Int) def f3 = (Left((0, 0)): Either[(Int, Int), (Int, Int)]) match { case LeftOrRight((a, b, c)) => a } // fail ^ +t6675b.scala:25: warning: deprecated adaptation: object LeftOrRight expects 2 patterns to hold (A, A) but crushing into 2-tuple to fit single pattern (scala/bug#6675) + def f2[A](x: A) = (Left(x -> x): Either[(A, A), (A, A)]) match { case LeftOrRight(a) => a } // warn + ^ t6675b.scala:27: error: constructor cannot be instantiated to expected type; found : (T1, T2, T3) required: (?A1, ?A2) where type ?A2 <: A (this is a GADT skolem), type ?A1 <: A (this is a GADT skolem) def f4[A](x: A) = (Left(x -> x): Either[(A, A), (A, A)]) match { case LeftOrRight((a, b, c)) => a } // fail ^ +t6675b.scala:31: warning: deprecated adaptation: object NativelyTwo expects 2 patterns to hold ((Int, Int), (Int, Int)) but crushing into 2-tuple to fit single pattern (scala/bug#6675) + def f1 = (Left((0, 0)): Either[(Int, Int), (Int, Int)]) match { case NativelyTwo(a) => a } // warn + ^ t6675b.scala:33: error: constructor cannot be instantiated to expected type; found : (T1, T2, T3) required: ((Int, Int), (Int, Int)) def f3 = (Left((0, 0)): Either[(Int, Int), (Int, Int)]) match { case NativelyTwo((a, b, c)) => a } // fail ^ -t6675b.scala:40: error: constructor cannot be instantiated to expected type; - found : (T1, T2, T3) - required: ((?A1, ?A2), (?A3, ?A4)) where type ?A4 <: A (this is a GADT skolem), type ?A3 <: A (this is a GADT skolem), type ?A2 <: A (this is a GADT skolem), type ?A1 <: A (this is a GADT skolem) - def f4[A](x: A) = (Left(x -> x): Either[(A, A), (A, A)]) match { case NativelyTwo((a, b, c)) => a } // fail - ^ -t6675b.scala:18: warning: deprecated adaptation: object LeftOrRight expects 2 patterns to hold (Int, Int) but crushing into 2-tuple to fit single pattern (scala/bug#6675) - def f1 = (Left((0, 0)): Either[(Int, Int), (Int, Int)]) match { case LeftOrRight(a) => a } // warn - ^ -t6675b.scala:25: warning: deprecated adaptation: object LeftOrRight expects 2 patterns to hold (A, A) but crushing into 2-tuple to fit single pattern (scala/bug#6675) - def f2[A](x: A) = (Left(x -> x): Either[(A, A), (A, A)]) match { case LeftOrRight(a) => a } // warn - ^ -t6675b.scala:31: warning: deprecated adaptation: object NativelyTwo expects 2 patterns to hold ((Int, Int), (Int, Int)) but crushing into 2-tuple to fit single pattern (scala/bug#6675) - def f1 = (Left((0, 0)): Either[(Int, Int), (Int, Int)]) match { case NativelyTwo(a) => a } // warn - ^ t6675b.scala:37: warning: deprecated adaptation: object NativelyTwo expects 2 patterns to hold (A, A) but crushing into 2-tuple to fit single pattern (scala/bug#6675) def f1[A](x: A) = (Left(x): Either[A, A]) match { case NativelyTwo(a) => a } // warn ^ t6675b.scala:38: warning: deprecated adaptation: object NativelyTwo expects 2 patterns to hold ((A, A), (A, A)) but crushing into 2-tuple to fit single pattern (scala/bug#6675) def f2[A](x: A) = (Left(x -> x): Either[(A, A), (A, A)]) match { case NativelyTwo(a) => a } // warn ^ +t6675b.scala:40: error: constructor cannot be instantiated to expected type; + found : (T1, T2, T3) + required: ((?A1, ?A2), (?A3, ?A4)) where type ?A4 <: A (this is a GADT skolem), type ?A3 <: A (this is a GADT skolem), type ?A2 <: A (this is a GADT skolem), type ?A1 <: A (this is a GADT skolem) + def f4[A](x: A) = (Left(x -> x): Either[(A, A), (A, A)]) match { case NativelyTwo((a, b, c)) => a } // fail + ^ 5 warnings found four errors found diff --git a/test/files/neg/t8704.check b/test/files/neg/t8704.check index 1083bdba3fb1..eff35e61d653 100644 --- a/test/files/neg/t8704.check +++ b/test/files/neg/t8704.check @@ -1,11 +1,11 @@ +t8704.scala:8: warning: 2 parameter sections are effectively implicit +class D(private implicit val i: Int)(implicit s: String) + ^ t8704.scala:4: error: an implicit parameter section must be last class C(i: Int)(implicit j: Int)(implicit k: Int)(n: Int) { ^ t8704.scala:4: error: multiple implicit parameter sections are not allowed class C(i: Int)(implicit j: Int)(implicit k: Int)(n: Int) { ^ -t8704.scala:8: warning: 2 parameter sections are effectively implicit -class D(private implicit val i: Int)(implicit s: String) - ^ one warning found two errors found diff --git a/test/scaladoc/run/t5527.check b/test/scaladoc/run/t5527.check index 84392fe76ffa..bfaa4ad0ed07 100644 --- a/test/scaladoc/run/t5527.check +++ b/test/scaladoc/run/t5527.check @@ -1,3 +1,12 @@ +newSource1.scala:47: warning: discarding unmoored doc comment + /** Document this crucial constant for posterity. + ^ +newSource1.scala:64: warning: discarding unmoored doc comment + /*************************\ + ^ +newSource1.scala:73: warning: discarding unmoored doc comment + val i = 10 */** Important! + ^ [[syntax trees at end of parser]] // newSource1.scala package { object UselessComments extends scala.AnyRef { @@ -121,15 +130,3 @@ package { } } -newSource1.scala:42: warning: Tag '@martin' is not recognised - /** @martin is this right? It shouldn't flag me as scaladoc. */ - ^ -newSource1.scala:47: warning: discarding unmoored doc comment - /** Document this crucial constant for posterity. - ^ -newSource1.scala:64: warning: discarding unmoored doc comment - /*************************\ - ^ -newSource1.scala:73: warning: discarding unmoored doc comment - val i = 10 */** Important! - ^ From 5e46039e999b730f2b0a6f1878b52c377d75d017 Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Mon, 1 Mar 2021 13:53:47 -0800 Subject: [PATCH 019/769] Backport fix for detecting jar --- .../scala/tools/nsc/classpath/FileUtils.scala | 4 +- src/compiler/scala/tools/nsc/io/Jar.scala | 7 ++- .../tools/nsc/settings/MutableSettings.scala | 32 ++++++----- test/files/run/t12019/J_1.java | 7 +++ test/files/run/t12019/Test.scala | 53 +++++++++++++++++++ 5 files changed, 84 insertions(+), 19 deletions(-) create mode 100644 test/files/run/t12019/J_1.java create mode 100644 test/files/run/t12019/Test.scala diff --git a/src/compiler/scala/tools/nsc/classpath/FileUtils.scala b/src/compiler/scala/tools/nsc/classpath/FileUtils.scala index aa4d81736195..da6505613706 100644 --- a/src/compiler/scala/tools/nsc/classpath/FileUtils.scala +++ b/src/compiler/scala/tools/nsc/classpath/FileUtils.scala @@ -15,7 +15,7 @@ package scala.tools.nsc.classpath import java.io.{File => JFile, FileFilter} import java.net.URL import scala.reflect.internal.FatalError -import scala.reflect.io.AbstractFile +import scala.reflect.io.{AbstractFile, ZipArchive} /** * Common methods related to Java files and abstract files used in the context of classpath @@ -29,7 +29,7 @@ object FileUtils { def isScalaOrJavaSource: Boolean = !file.isDirectory && (file.hasExtension("scala") || file.hasExtension("java")) // TODO do we need to check also other files using ZipMagicNumber like in scala.tools.nsc.io.Jar.isJarOrZip? - def isJarOrZip: Boolean = file.hasExtension("jar") || file.hasExtension("zip") + def isJarOrZip: Boolean = file.isInstanceOf[ZipArchive] || !file.isDirectory && (file.hasExtension("jar") || file.hasExtension("zip")) /** * Safe method returning a sequence containing one URL representing this file, when underlying file exists, diff --git a/src/compiler/scala/tools/nsc/io/Jar.scala b/src/compiler/scala/tools/nsc/io/Jar.scala index e95d48b5e8c0..66deaed0ee9c 100644 --- a/src/compiler/scala/tools/nsc/io/Jar.scala +++ b/src/compiler/scala/tools/nsc/io/Jar.scala @@ -165,11 +165,10 @@ object Jar { // See http://docs.oracle.com/javase/7/docs/api/java/nio/file/Path.html // for some ideas. private val ZipMagicNumber = List[Byte](80, 75, 3, 4) - private def magicNumberIsZip(f: Path) = f.isFile && (f.toFile.bytes().take(4).toList == ZipMagicNumber) + private def magicNumberIsZip(f: Path) = f.toFile.bytes().take(4).toList == ZipMagicNumber - def isJarOrZip(f: Path): Boolean = isJarOrZip(f, examineFile = true) - def isJarOrZip(f: Path, examineFile: Boolean): Boolean = - f.hasExtension("zip", "jar") || (examineFile && magicNumberIsZip(f)) + // file exists and either has name.jar or magic number + def isJarOrZip(f: Path): Boolean = f.isFile && (Path.isExtensionJarOrZip(f.name) || magicNumberIsZip(f)) def create(file: File, sourceDir: Directory, mainClass: String) { val writer = new Jar(file).jarWriter(Name.MAIN_CLASS -> mainClass) diff --git a/src/compiler/scala/tools/nsc/settings/MutableSettings.scala b/src/compiler/scala/tools/nsc/settings/MutableSettings.scala index dbdd8026cbbe..5c6544c7dba4 100644 --- a/src/compiler/scala/tools/nsc/settings/MutableSettings.scala +++ b/src/compiler/scala/tools/nsc/settings/MutableSettings.scala @@ -16,7 +16,7 @@ package scala.tools package nsc package settings -import io.{ AbstractFile, Jar, Path, PlainFile, VirtualDirectory } +import io.{ AbstractFile, Path, PlainFile, VirtualDirectory } import scala.collection.generic.Clearable import scala.io.Source import scala.reflect.internal.util.{ SomeOfNil, StringOps } @@ -274,25 +274,31 @@ class MutableSettings(val errorFn: String => Unit, val pathFactory: PathFactory) /** Add a destination directory for sources found under `srcDir`. * Both directories should exist. */ - def add(srcDir: String, outDir: String): Unit = // used in ide? - add(checkDir(pathFactory.getDirectory(srcDir), srcDir), - checkDir(pathFactory.getDirectory(outDir), outDir)) + // used in ide? + def add(srcDir: String, outDir: String): Unit = { + // Check that dir exists and is a directory. + def checkDir(name: String): AbstractFile = { + val dir = pathFactory.getDirectory(name) + if (dir != null && dir.isDirectory) dir + else throw new FatalError(s"$name does not exist or is not a directory") + } + add(checkDir(srcDir), checkDir(outDir)) + } - /** Check that dir is exists and is a directory. */ - private def checkDir(dir: AbstractFile, name: String, allowJar: Boolean = false): AbstractFile = + /** Check either existing dir, or if not dir in path, a jar/zip which may not yet exist. */ + private def checkDirOrJar(name: String): AbstractFile = { + val dir = pathFactory.getDirectory(name) if (dir != null && dir.isDirectory) dir - else if (allowJar && dir == null && Jar.isJarOrZip(name, examineFile = false)) new PlainFile(Path(name)) + else if (dir == null && Path.isExtensionJarOrZip(name)) new PlainFile(Path(name)) else throw new FatalError(s"$name does not exist or is not a directory") + } + + def getSingleOutput: Option[AbstractFile] = singleOutDir /** Set the single output directory. From now on, all files will * be dumped in there, regardless of previous calls to 'add'. */ - def setSingleOutput(outDir: String) { - val dst = pathFactory.getDirectory(outDir) - setSingleOutput(checkDir(dst, outDir, allowJar = true)) - } - - def getSingleOutput: Option[AbstractFile] = singleOutDir + def setSingleOutput(outDir: String): Unit = setSingleOutput(checkDirOrJar(outDir)) /** Set the single output directory. From now on, all files will * be dumped in there, regardless of previous calls to 'add'. diff --git a/test/files/run/t12019/J_1.java b/test/files/run/t12019/J_1.java new file mode 100644 index 000000000000..dfa9b96e58f0 --- /dev/null +++ b/test/files/run/t12019/J_1.java @@ -0,0 +1,7 @@ +package p; + +public class J_1 { + public int f() { + return 42; + } +} diff --git a/test/files/run/t12019/Test.scala b/test/files/run/t12019/Test.scala new file mode 100644 index 000000000000..0108763c7eae --- /dev/null +++ b/test/files/run/t12019/Test.scala @@ -0,0 +1,53 @@ + +import scala.tools.partest.DirectTest +import scala.util.Properties.isWin + +object Test extends DirectTest { + import java.nio.file.Files._ + + override def code: String = "class C { val c = new p.J_1().f() }" + + override def show(): Unit = { + val dir = createTempDirectory("t12019") + val out = createTempDirectory("t12019out") + try { + val target = createDirectory(dir.resolve("java.zip")) + val outdir = testOutput.jfile.toPath + val pkgdir = outdir.resolve("p") + val tocopy = walk(pkgdir) + try { + tocopy.forEach { p => + val partial = outdir.relativize(p) + val q = target.resolve(partial) + copy(p, q) + } + } finally { + tocopy.close() + } + val compiler = newCompiler(newSettings("-usejavacp" :: "-classpath" :: target.toString :: "-d" :: out.toString :: Nil)) + compileString(compiler)(code) + } finally { + if (!isWin) { + Zapper.remove(dir) + Zapper.remove(out) + } + } + } +} + +object Zapper { + import java.io.IOException + import java.nio.file._, Files._, FileVisitResult.{CONTINUE => Continue} + import java.nio.file.attribute._ + + def remove(path: Path): Unit = if (isDirectory(path)) removeRecursively(path) else delete(path) + + private def removeRecursively(path: Path): Unit = walkFileTree(path, new ZappingFileVisitor) + + private class ZappingFileVisitor extends SimpleFileVisitor[Path] { + private def zap(path: Path) = { delete(path) ; Continue } + override def postVisitDirectory(path: Path, e: IOException): FileVisitResult = if (e != null) throw e else zap(path) + override def visitFile(path: Path, attrs: BasicFileAttributes): FileVisitResult = zap(path) + } +} +// was: Error accessing /tmp/t120198214162953467729048/java.zip From 7e1d8e6a29d062cd39a78f3b0a1796bed40df1ce Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Tue, 2 Mar 2021 09:59:31 +1000 Subject: [PATCH 020/769] Improve support for macros targeting invokedynamic - Support macro varargs bootstrap methods - Support method handle bootstrap arguments Demonstrated with a test case that passes a Constant(symbol) through to a static bootstrap MethodHandle argument. This bootstrap also accepts a trailing varargs array of parameter names. --- .../nsc/backend/jvm/BCodeBodyBuilder.scala | 4 +- .../nsc/backend/jvm/BTypesFromSymbols.scala | 17 +++++- test/files/run/indy-via-macro-reflector.check | 3 ++ .../indy-via-macro-reflector/Bootstrap.java | 44 +++++++++++++++ .../run/indy-via-macro-reflector/Test_2.scala | 18 +++++++ .../indy-via-macro-reflector/macro_1.scala | 53 +++++++++++++++++++ 6 files changed, 136 insertions(+), 3 deletions(-) create mode 100644 test/files/run/indy-via-macro-reflector.check create mode 100644 test/files/run/indy-via-macro-reflector/Bootstrap.java create mode 100644 test/files/run/indy-via-macro-reflector/Test_2.scala create mode 100644 test/files/run/indy-via-macro-reflector/macro_1.scala diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala index 1f74fa888b68..2c215f23a2d5 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala @@ -303,8 +303,8 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder { generatedType = genApply(app, expectedType) case app @ ApplyDynamic(qual, Literal(Constant(bootstrapMethodRef: Symbol)) :: staticAndDynamicArgs) => - val numStaticArgs = bootstrapMethodRef.paramss.head.size - 3 /*JVM provided args*/ - val (staticArgs, dynamicArgs) = staticAndDynamicArgs.splitAt(numStaticArgs) + val numDynamicArgs = qual.symbol.info.params.length + val (staticArgs, dynamicArgs) = staticAndDynamicArgs.splitAt(staticAndDynamicArgs.length - numDynamicArgs) val bootstrapDescriptor = staticHandleFromSymbol(bootstrapMethodRef) val bootstrapArgs = staticArgs.map({case t @ Literal(c: Constant) => bootstrapMethodArg(c, t.pos)}) val descriptor = methodBTypeFromMethodType(qual.symbol.info, false) diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala b/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala index 6587262a1fa5..172708ef24d3 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala @@ -131,7 +131,8 @@ abstract class BTypesFromSymbols[G <: Global](val global: G) extends BTypes { def bootstrapMethodArg(t: Constant, pos: Position): AnyRef = t match { case Constant(mt: Type) => methodBTypeFromMethodType(transformedType(mt), isConstructor = false).toASMType - case c @ Constant(sym: Symbol) => staticHandleFromSymbol(sym) + case c @ Constant(sym: Symbol) if sym.owner.isJavaDefined && sym.isStaticMember => staticHandleFromSymbol(sym) + case c @ Constant(sym: Symbol) => handleFromMethodSymbol(sym) case c @ Constant(value: String) => value case c @ Constant(value) if c.isNonUnitAnyVal => c.value.asInstanceOf[AnyRef] case _ => reporter.error(pos, "Unable to convert static argument of ApplyDynamic into a classfile constant: " + t); null @@ -149,6 +150,20 @@ abstract class BTypesFromSymbols[G <: Global](val global: G) extends BTypes { new asm.Handle(asm.Opcodes.H_INVOKESTATIC, ownerInternalName, sym.name.encoded, descriptor, isInterface) } + def handleFromMethodSymbol(sym: Symbol): asm.Handle = { + val isConstructor = (sym.isClassConstructor) + val descriptor = methodBTypeFromMethodType(sym.info, isConstructor).descriptor + val ownerBType = classBTypeFromSymbol(sym.owner) + val rawInternalName = ownerBType.internalName + val ownerInternalName = rawInternalName + val isInterface = sym.owner.isTraitOrInterface + val tag = if (sym.owner.isJavaDefined && sym.isStaticMember) throw new UnsupportedOperationException() + else if (isConstructor) asm.Opcodes.H_NEWINVOKESPECIAL + else if (isInterface) asm.Opcodes.H_INVOKEINTERFACE + else asm.Opcodes.H_INVOKEVIRTUAL + new asm.Handle(tag, ownerInternalName, if (isConstructor) sym.name.toString else sym.name.encoded, descriptor, isInterface) + } + /** * This method returns the BType for a type reference, for example a parameter type. */ diff --git a/test/files/run/indy-via-macro-reflector.check b/test/files/run/indy-via-macro-reflector.check new file mode 100644 index 000000000000..e14bfd6b53ae --- /dev/null +++ b/test/files/run/indy-via-macro-reflector.check @@ -0,0 +1,3 @@ +HandleAndStrings{handle=MethodHandle(C,Object,int)String, scalaParamNames=[p1, p2]}, dynamic +HandleAndStrings{handle=MethodHandle(int)C1, scalaParamNames=[a]}, dynamic +HandleAndStrings{handle=MethodHandle(T)int, scalaParamNames=[]}, dynamic diff --git a/test/files/run/indy-via-macro-reflector/Bootstrap.java b/test/files/run/indy-via-macro-reflector/Bootstrap.java new file mode 100644 index 000000000000..468b8e43acc6 --- /dev/null +++ b/test/files/run/indy-via-macro-reflector/Bootstrap.java @@ -0,0 +1,44 @@ +package test; + +import java.lang.invoke.*; + +public final class Bootstrap { + private Bootstrap() { + } + + public static CallSite bootstrap(MethodHandles.Lookup lookup, String invokedName, + MethodType invokedType, + Object... args) throws Throwable { + int arity = (int) args[0]; + MethodHandle MH = (MethodHandle) args[1]; + String[] strings = new String[arity]; + for (int i = 0; i < arity; i++) { + strings[i] = (String) args[2 + i]; + } + + Reflection handleAndStrings = new Reflection(MH, strings); + MethodHandle foo = MethodHandles.lookup().findVirtual(Reflection.class, "foo", MethodType.methodType(String.class, String.class)); + return new java.lang.invoke.ConstantCallSite(foo.bindTo(handleAndStrings)); + } + static class Reflection { + private final MethodHandle handle; + private final String[] scalaParamNames; + + public Reflection(MethodHandle handle, String[] scalaParamNames) { + this.handle = handle; + this.scalaParamNames = scalaParamNames; + } + + public String foo(String f) { + return toString() + ", " + f; + } + + @java.lang.Override + public java.lang.String toString() { + return "HandleAndStrings{" + + "handle=" + handle + + ", scalaParamNames=" + java.util.Arrays.toString(scalaParamNames) + + '}'; + } + } +} diff --git a/test/files/run/indy-via-macro-reflector/Test_2.scala b/test/files/run/indy-via-macro-reflector/Test_2.scala new file mode 100644 index 000000000000..6e51340afa43 --- /dev/null +++ b/test/files/run/indy-via-macro-reflector/Test_2.scala @@ -0,0 +1,18 @@ +object Test { + def main(args: Array[String]) { + println(new C().foo(null, 0)) + println(Macro.reflectorConstructor("dynamic")) + println(Macro.reflectorTrait("dynamic")) + } +} + +class C { + def foo(p1: Object, p2: Int): String = { + Macro.reflector("dynamic") + privateFoo(p1, p2) + } + + private def privateFoo(p1: Object, p2: Int): String = { + Macro.reflector("dynamic") + } +} diff --git a/test/files/run/indy-via-macro-reflector/macro_1.scala b/test/files/run/indy-via-macro-reflector/macro_1.scala new file mode 100644 index 000000000000..46783d8cecaa --- /dev/null +++ b/test/files/run/indy-via-macro-reflector/macro_1.scala @@ -0,0 +1,53 @@ +import java.util.regex._ +import scala.reflect.internal.SymbolTable +import scala.reflect.macros.blackbox._ +import language.experimental.macros +import java.lang.invoke.{MethodHandle, MethodHandles} + +object Macro { + def reflector(dynamic: String): String = macro Impl.reflector + def reflectorConstructor(dynamic: String): String = macro Impl.reflectorConstructor + def reflectorTrait(dynamic: String): String = macro Impl.reflectorTrait +} + +class C1(a: Int) { +} + +trait T { + def foo = 42 +} + +class Impl(val c: Context) { + def reflectorConstructor(dynamic: c.Tree): c.Tree = { + import c.universe._ + impl(dynamic, symbolOf[C1].info.decl(nme.CONSTRUCTOR)) + } + def reflectorTrait(dynamic: c.Tree): c.Tree = { + import c.universe._ + impl(dynamic, symbolOf[T].info.decl(TermName("foo"))) + } + + def reflector(dynamic: c.Tree): c.Tree = { + impl(dynamic, c.internal.enclosingOwner) + } + + private def impl(dynamic: c.Tree, reflectionSubject0: c.Symbol): c.Tree = { + { + val symtab = c.universe.asInstanceOf[SymbolTable] + import symtab._ + val reflectionSubject = reflectionSubject0.asInstanceOf[Symbol] + val bootstrapMethod = typeOf[test.Bootstrap].companion.member(TermName("bootstrap")) + val paramSym = NoSymbol.newTermSymbol(TermName("x")).setInfo(typeOf[String]) + val dummySymbol = NoSymbol.newTermSymbol(TermName("reflector")).setInfo(internal.methodType(paramSym :: Nil, typeOf[String])) + val reflectionSubjectParams = reflectionSubject.info.paramss.flatten + val bootstrapArgTrees: List[Tree] = List( + Literal(Constant(bootstrapMethod)).setType(NoType), + Literal(Constant(reflectionSubjectParams.length)).setType(typeOf[Int]), + Literal(Constant(reflectionSubject)).setType(typeOf[MethodHandle]) + ) ::: reflectionSubjectParams.map(s => Literal(Constant(s.name.decoded)).setType(typeOf[String])) + val result = ApplyDynamic(Ident(dummySymbol).setType(dummySymbol.info), bootstrapArgTrees ::: List(dynamic.asInstanceOf[symtab.Tree])) + result.setType(dummySymbol.info.resultType) + result.asInstanceOf[c.Tree] + } + } +} From b8772bbfddac2945cb22df6565160cb4e057140d Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Tue, 2 Mar 2021 14:11:42 +1000 Subject: [PATCH 021/769] Test case showing macro to summon an j.l.i.MethodType from a DefDef --- .../run/indy-via-macro-method-type-bsa.check | 2 ++ .../Bootstrap.java | 14 ++++++++ .../Test_2.scala | 7 ++++ .../macro_1.scala | 35 +++++++++++++++++++ 4 files changed, 58 insertions(+) create mode 100644 test/files/run/indy-via-macro-method-type-bsa.check create mode 100644 test/files/run/indy-via-macro-method-type-bsa/Bootstrap.java create mode 100644 test/files/run/indy-via-macro-method-type-bsa/Test_2.scala create mode 100644 test/files/run/indy-via-macro-method-type-bsa/macro_1.scala diff --git a/test/files/run/indy-via-macro-method-type-bsa.check b/test/files/run/indy-via-macro-method-type-bsa.check new file mode 100644 index 000000000000..c0297137ee5e --- /dev/null +++ b/test/files/run/indy-via-macro-method-type-bsa.check @@ -0,0 +1,2 @@ +(int)String +()int diff --git a/test/files/run/indy-via-macro-method-type-bsa/Bootstrap.java b/test/files/run/indy-via-macro-method-type-bsa/Bootstrap.java new file mode 100644 index 000000000000..2a788a758dd5 --- /dev/null +++ b/test/files/run/indy-via-macro-method-type-bsa/Bootstrap.java @@ -0,0 +1,14 @@ +package test; + +import java.lang.invoke.*; + +public final class Bootstrap { + private Bootstrap() { + } + + public static CallSite bootstrap(MethodHandles.Lookup lookup, String invokedName, + MethodType invokedType, + MethodType mt) throws Throwable { + return new java.lang.invoke.ConstantCallSite(java.lang.invoke.MethodHandles.constant(MethodType.class, mt)); + } +} diff --git a/test/files/run/indy-via-macro-method-type-bsa/Test_2.scala b/test/files/run/indy-via-macro-method-type-bsa/Test_2.scala new file mode 100644 index 000000000000..a284e28725f8 --- /dev/null +++ b/test/files/run/indy-via-macro-method-type-bsa/Test_2.scala @@ -0,0 +1,7 @@ +object Test { + def main(args: Array[String]) { + println(Macro.methodTypeOf({def x(a: Int): String = ???})) + println(Macro.methodTypeOf({def x(): C = ???})) + } + class C(val x: Int) extends AnyVal +} diff --git a/test/files/run/indy-via-macro-method-type-bsa/macro_1.scala b/test/files/run/indy-via-macro-method-type-bsa/macro_1.scala new file mode 100644 index 000000000000..f058584587e2 --- /dev/null +++ b/test/files/run/indy-via-macro-method-type-bsa/macro_1.scala @@ -0,0 +1,35 @@ +import java.util.regex._ +import scala.reflect.internal.SymbolTable +import scala.reflect.macros.blackbox._ +import language.experimental.macros +import java.lang.invoke._ + +object Macro { + def methodTypeOf(expr: Any): MethodType = macro Impl.methodTypeOf +} + + +class Impl(val c: Context) { + def methodTypeOf(expr: c.Tree): c.Tree = { + { + val symtab = c.universe.asInstanceOf[SymbolTable] + import symtab._ + val tp = transformedType(expr.asInstanceOf[Tree] match { + case Block((dd: DefDef) :: Nil, Literal(Constant(()))) => + dd.symbol.info + case expr => + expr.tpe + }) + val bootstrapMethod = typeOf[test.Bootstrap].companion.member(TermName("bootstrap")) + val paramSym = NoSymbol.newTermSymbol(TermName("x")).setInfo(typeOf[String]) + val dummySymbol = NoSymbol.newTermSymbol(TermName("methodTypeOf")).setInfo(internal.nullaryMethodType(typeOf[java.lang.invoke.MethodType])) + val bootstrapArgTrees: List[Tree] = List( + Literal(Constant(bootstrapMethod)).setType(NoType), + Literal(Constant(tp)).setType(typeOf[java.lang.invoke.MethodType]), + ) + val result = ApplyDynamic(Ident(dummySymbol).setType(dummySymbol.info), bootstrapArgTrees) + result.setType(dummySymbol.info.resultType) + result.asInstanceOf[c.Tree] + } + } +} From 25e6bc4f275d6dbcec5b13aa7bd6d971be1f3aa1 Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Mon, 1 Mar 2021 11:32:32 +0100 Subject: [PATCH 022/769] Move suspended warnings back from CompilationUnit to RunReporting Index suspended warnings by SourceFile. Test case for issue 12308. --- .../scala/tools/nsc/CompilationUnits.scala | 8 --- src/compiler/scala/tools/nsc/Global.scala | 4 +- src/compiler/scala/tools/nsc/Reporting.scala | 59 ++++++++----------- .../tools/nsc/typechecker/Analyzer.scala | 6 +- .../tools/nsc/typechecker/Contexts.scala | 36 ++++------- .../scala/tools/nsc/typechecker/Typers.scala | 2 +- .../scala/tools/nsc/interactive/Global.scala | 1 + test/files/neg/annot-nonconst.check | 12 ++-- test/files/neg/badtok-1-212.check | 6 +- test/files/neg/for-comprehension-old.check | 24 ++++---- test/files/neg/nested-annotation.check | 6 +- test/files/neg/t10678.check | 6 +- test/files/neg/t6082.check | 10 ++-- test/files/neg/t6083.check | 6 +- test/files/neg/t6675b.check | 28 ++++----- test/files/neg/t8704.check | 6 +- test/files/presentation/t12308.check | 50 ++++++++++++++++ test/files/presentation/t12308/Test.scala | 50 ++++++++++++++++ test/files/presentation/t12308/src/Foo.scala | 5 ++ test/scaladoc/run/t5527.check | 21 ++++--- 20 files changed, 210 insertions(+), 136 deletions(-) create mode 100644 test/files/presentation/t12308.check create mode 100644 test/files/presentation/t12308/Test.scala create mode 100644 test/files/presentation/t12308/src/Foo.scala diff --git a/src/compiler/scala/tools/nsc/CompilationUnits.scala b/src/compiler/scala/tools/nsc/CompilationUnits.scala index f05cc719ec5b..b1fcd1b558d5 100644 --- a/src/compiler/scala/tools/nsc/CompilationUnits.scala +++ b/src/compiler/scala/tools/nsc/CompilationUnits.scala @@ -15,7 +15,6 @@ package scala.tools.nsc import scala.collection.mutable import scala.collection.mutable.{LinkedHashSet, ListBuffer} import scala.reflect.internal.util.{FreshNameCreator, NoSourceFile, SourceFile} -import scala.tools.nsc.Reporting.Message trait CompilationUnits { global: Global => @@ -128,13 +127,6 @@ trait CompilationUnits { global: Global => /** things to check at end of compilation unit */ val toCheck = new ListBuffer[() => Unit] - var suspendMessages = true - private[this] var _suspendedMessages: mutable.LinkedHashSet[Message] = null - def suspendedMessages: mutable.LinkedHashSet[Message] = { - if (_suspendedMessages == null) _suspendedMessages = mutable.LinkedHashSet.empty - _suspendedMessages - } - /** The features that were already checked for this unit */ var checkedFeatures = Set[Symbol]() diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala index 6497beae19a0..74a9454a80b5 100644 --- a/src/compiler/scala/tools/nsc/Global.scala +++ b/src/compiler/scala/tools/nsc/Global.scala @@ -1549,10 +1549,8 @@ class Global(var currentSettings: Settings, reporter0: Reporter) if (settings.YstatisticsEnabled && settings.Ystatistics.contains(phase.name)) printStatisticsFor(phase) - if (!globalPhase.hasNext || reporter.hasErrors) { - units.foreach(unit => unit.suspendedMessages.foreach(runReporting.issueIfNotSuppressed)) + if (!globalPhase.hasNext || reporter.hasErrors) runReporting.warnUnusedSuppressions() - } advancePhase() } diff --git a/src/compiler/scala/tools/nsc/Reporting.scala b/src/compiler/scala/tools/nsc/Reporting.scala index c86a2d46b380..0272376761b0 100644 --- a/src/compiler/scala/tools/nsc/Reporting.scala +++ b/src/compiler/scala/tools/nsc/Reporting.scala @@ -56,6 +56,8 @@ trait Reporting extends scala.reflect.internal.Reporting { self: ast.Positions w private val summarizedInfos: mutable.Map[WarningCategory, mutable.LinkedHashMap[Position, Message]] = mutable.HashMap.empty private val suppressions: mutable.LinkedHashMap[SourceFile, mutable.ListBuffer[Suppression]] = mutable.LinkedHashMap.empty + private val suppressionsComplete: mutable.Set[SourceFile] = mutable.Set.empty + private val suspendedMessages: mutable.LinkedHashMap[SourceFile, mutable.LinkedHashSet[Message]] = mutable.LinkedHashMap.empty private def isSuppressed(warning: Message): Boolean = suppressions.getOrElse(warning.pos.source, Nil).find(_.matches(warning)) match { @@ -63,6 +65,8 @@ trait Reporting extends scala.reflect.internal.Reporting { self: ast.Positions w case _ => false } + def clearSuppressionsComplete(sourceFile: SourceFile): Unit = suppressionsComplete -= sourceFile + def addSuppression(sup: Suppression): Unit = { val source = sup.annotPos.source suppressions.getOrElseUpdate(source, mutable.ListBuffer.empty) += sup @@ -72,6 +76,8 @@ trait Reporting extends scala.reflect.internal.Reporting { self: ast.Positions w suppressions.getOrElse(pos.source, Nil).exists(_.annotPos.point == pos.point) def warnUnusedSuppressions(): Unit = { + // if we stop before typer completes (errors in parser, Ystop), report all suspended messages + suspendedMessages.valuesIterator.foreach(_.foreach(issueWarning)) if (settings.warnUnusedNowarn && !settings.isScaladoc) { // scaladoc doesn't run all phases, so not all warnings are emitted val sources = suppressions.keysIterator.toList for (source <- sources; sups <- suppressions.remove(source); sup <- sups.reverse) { @@ -85,7 +91,8 @@ trait Reporting extends scala.reflect.internal.Reporting { self: ast.Positions w // sort suppressions. they are not added in any particular order because of lazy type completion for (sups <- suppressions.get(unit.source)) suppressions(unit.source) = sups.sortBy(sup => 0 - sup.start) - unit.suspendedMessages.foreach(issueIfNotSuppressed) + suppressionsComplete += unit.source + suspendedMessages.remove(unit.source).foreach(_.foreach(issueIfNotSuppressed)) } private def summaryMap(action: Action, category: WarningCategory) = { @@ -114,9 +121,13 @@ trait Reporting extends scala.reflect.internal.Reporting { self: ast.Positions w } } - def issueIfNotSuppressed(warning: Message): Unit = - if (!isSuppressed(warning)) - issueWarning(warning) + def issueIfNotSuppressed(warning: Message): Unit = { + if (suppressionsComplete(warning.pos.source)) { + if (!isSuppressed(warning)) + issueWarning(warning) + } else + suspendedMessages.getOrElseUpdate(warning.pos.source, mutable.LinkedHashSet.empty) += warning + } private def summarize(action: Action, category: WarningCategory): Unit = { def rerunMsg: String = { @@ -176,28 +187,19 @@ trait Reporting extends scala.reflect.internal.Reporting { self: ast.Positions w impl(sym) } else "" - def deprecationWarningMessage(pos: Position, msg: String, since: String, site: String, origin: String): Message = - Message.Deprecation(pos, msg, site, origin, Version.fromString(since)) + def deprecationWarning(pos: Position, msg: String, since: String, site: String, origin: String): Unit = + issueIfNotSuppressed(Message.Deprecation(pos, msg, site, origin, Version.fromString(since))) - def deprecationWarningMessage(pos: Position, origin: Symbol, site: Symbol, msg: String, since: String): Message = - deprecationWarningMessage(pos, msg, since, siteName(site), siteName(origin)) + def deprecationWarning(pos: Position, origin: Symbol, site: Symbol, msg: String, since: String): Unit = + deprecationWarning(pos, msg, since, siteName(site), siteName(origin)) - def deprecationWarningMessage(pos: Position, origin: Symbol, site: Symbol): Message = { + def deprecationWarning(pos: Position, origin: Symbol, site: Symbol): Unit = { val version = origin.deprecationVersion.getOrElse("") val since = if (version.isEmpty) version else s" (since $version)" val message = origin.deprecationMessage.map(": " + _).getOrElse("") - deprecationWarningMessage(pos, origin, site, s"$origin${origin.locationString} is deprecated$since$message", version) + deprecationWarning(pos, origin, site, s"$origin${origin.locationString} is deprecated$since$message", version) } - def deprecationWarning(pos: Position, msg: String, since: String, site: String, origin: String): Unit = - issueIfNotSuppressed(deprecationWarningMessage(pos, msg, since, site, origin)) - - def deprecationWarning(pos: Position, origin: Symbol, site: Symbol, msg: String, since: String): Unit = - issueIfNotSuppressed(deprecationWarningMessage(pos, origin, site, msg, since)) - - def deprecationWarning(pos: Position, origin: Symbol, site: Symbol): Unit = - issueIfNotSuppressed(deprecationWarningMessage(pos, origin, site)) - private[this] var reportedFeature = Set[Symbol]() // we don't have access to runDefinitions here, so mapping from strings instead of feature symbols private val featureCategory: Map[String, WarningCategory.Feature] = { @@ -213,7 +215,7 @@ trait Reporting extends scala.reflect.internal.Reporting { self: ast.Positions w ).withDefaultValue(Feature) } - def featureWarningMessage(pos: Position, featureName: String, featureDesc: String, featureTrait: Symbol, construct: => String = "", required: Boolean, site: Symbol): Option[Message] = { + def featureWarning(pos: Position, featureName: String, featureDesc: String, featureTrait: Symbol, construct: => String = "", required: Boolean, site: Symbol): Unit = { val req = if (required) "needs to" else "should" val fqname = "scala.language." + featureName val explain = ( @@ -237,26 +239,17 @@ trait Reporting extends scala.reflect.internal.Reporting { self: ast.Positions w && parentFileName(pos.source).getOrElse("") == "xsbt" && Thread.currentThread.getStackTrace.exists(_.getClassName.startsWith("sbt.")) ) - if (required && !isSbtCompat) { reporter.error(pos, msg); None } - else Some(warningMessage(pos, msg, featureCategory(featureTrait.nameString), site)) + if (required && !isSbtCompat) reporter.error(pos, msg) + else warning(pos, msg, featureCategory(featureTrait.nameString), site) } - def featureWarning(pos: Position, featureName: String, featureDesc: String, featureTrait: Symbol, construct: => String = "", required: Boolean, site: Symbol): Unit = - featureWarningMessage(pos, featureName, featureDesc, featureTrait, construct, required, site).foreach(issueIfNotSuppressed) - - def warningMessage(pos: Position, msg: String, category: WarningCategory, site: String): Message = - Message.Plain(pos, msg, category, site) - - def warningMessage(pos: Position, msg: String, category: WarningCategory, site: Symbol): Message = - warningMessage(pos, msg, category, siteName(site)) - // Used in the optimizer where we don't have no symbols, the site string is created from the class internal name and method name. def warning(pos: Position, msg: String, category: WarningCategory, site: String): Unit = - issueIfNotSuppressed(warningMessage(pos, msg, category, site)) + issueIfNotSuppressed(Message.Plain(pos, msg, category, site)) // Preferred over the overload above whenever a site symbol is available def warning(pos: Position, msg: String, category: WarningCategory, site: Symbol): Unit = - issueIfNotSuppressed(warningMessage(pos, msg, category, site)) + warning(pos, msg, category, siteName(site)) // used by Global.deprecationWarnings, which is used by sbt def deprecationWarnings: List[(Position, String)] = summaryMap(Action.WarningSummary, WarningCategory.Deprecation).toList.map(p => (p._1, p._2.msg)) diff --git a/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala b/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala index f9fdd7a08b87..11c2f28703f7 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala @@ -114,6 +114,7 @@ trait Analyzer extends AnyRef try { val typer = newTyper(rootContext(unit)) unit.body = typer.typed(unit.body) + // interactive typed may finish by throwing a `TyperResult` if (!settings.Youtline.value) { for (workItem <- unit.toCheck) workItem() if (settings.warnUnusedImport) @@ -121,13 +122,10 @@ trait Analyzer extends AnyRef if (settings.warnUnused.isSetByUser) new checkUnused(typer).apply(unit) } - if (unit.suspendMessages) - runReporting.reportSuspendedMessages(unit) } finally { + runReporting.reportSuspendedMessages(unit) unit.toCheck.clear() - unit.suspendMessages = false - unit.suspendedMessages.clear() } } } diff --git a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala index 8cc3d41942aa..bb6f19138449 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala @@ -606,8 +606,8 @@ trait Contexts { self: Analyzer => /** Issue/throw the given error message according to the current mode for error reporting. */ def error(pos: Position, msg: String) = reporter.error(fixPosition(pos), msg) /** Issue/throw the given error message according to the current mode for error reporting. */ - def warning(pos: Position, msg: String, category: WarningCategory) = reporter.warning(fixPosition(pos), msg, category, owner, this) - def warning(pos: Position, msg: String, category: WarningCategory, site: Symbol) = reporter.warning(fixPosition(pos), msg, category, site, this) + def warning(pos: Position, msg: String, category: WarningCategory) = reporter.warning(fixPosition(pos), msg, category, owner) + def warning(pos: Position, msg: String, category: WarningCategory, site: Symbol) = reporter.warning(fixPosition(pos), msg, category, site) def echo(pos: Position, msg: String) = reporter.echo(fixPosition(pos), msg) def fixPosition(pos: Position): Position = pos match { case NoPosition => nextEnclosing(_.tree.pos != NoPosition).tree.pos @@ -617,22 +617,13 @@ trait Contexts { self: Analyzer => // TODO: buffer deprecations under silent (route through ContextReporter, store in BufferingReporter) def deprecationWarning(pos: Position, sym: Symbol, msg: String, since: String): Unit = - if (unit.suspendMessages) - unit.suspendedMessages += runReporting.deprecationWarningMessage(fixPosition(pos), sym, owner, msg, since) - else - runReporting.deprecationWarning(fixPosition(pos), sym, owner, msg, since) + runReporting.deprecationWarning(fixPosition(pos), sym, owner, msg, since) def deprecationWarning(pos: Position, sym: Symbol): Unit = - if (unit.suspendMessages) - unit.suspendedMessages += runReporting.deprecationWarningMessage(fixPosition(pos), sym, owner) - else - runReporting.deprecationWarning(fixPosition(pos), sym, owner) + runReporting.deprecationWarning(fixPosition(pos), sym, owner) def featureWarning(pos: Position, featureName: String, featureDesc: String, featureTrait: Symbol, construct: => String = "", required: Boolean): Unit = - if (unit.suspendMessages) - unit.suspendedMessages ++= runReporting.featureWarningMessage(fixPosition(pos), featureName, featureDesc, featureTrait, construct, required, owner) - else - runReporting.featureWarning(fixPosition(pos), featureName, featureDesc, featureTrait, construct, required, owner) + runReporting.featureWarning(fixPosition(pos), featureName, featureDesc, featureTrait, construct, required, owner) // nextOuter determines which context is searched next for implicits @@ -1369,11 +1360,8 @@ trait Contexts { self: Analyzer => def echo(msg: String): Unit = echo(NoPosition, msg) def echo(pos: Position, msg: String): Unit = reporter.echo(pos, msg) - def warning(pos: Position, msg: String, category: WarningCategory, site: Symbol, context: Context): Unit = - if (context.unit.suspendMessages) - context.unit.suspendedMessages += runReporting.warningMessage(pos, msg, category, site) - else - runReporting.warning(pos, msg, category, site) + def warning(pos: Position, msg: String, category: WarningCategory, site: Symbol): Unit = + runReporting.warning(pos, msg, category, site) def error(pos: Position, msg: String): Unit @@ -1466,13 +1454,9 @@ trait Contexts { self: Analyzer => else msg } - final def emitWarnings(context: Context) = if (_warningBuffer != null) { + final def emitWarnings() = if (_warningBuffer != null) { _warningBuffer foreach { - case (pos, msg, category, site) => - if (context.unit.suspendMessages) - context.unit.suspendedMessages += runReporting.warningMessage(pos, msg, category, site) - else - runReporting.warning(pos, msg, category, site) + case (pos, msg, category, site) => runReporting.warning(pos, msg, category, site) } _warningBuffer = null } @@ -1510,7 +1494,7 @@ trait Contexts { self: Analyzer => // the old throwing behavior was relied on by diagnostics in manifestOfType def error(pos: Position, msg: String): Unit = errorBuffer += TypeErrorWrapper(new TypeError(pos, msg)) - override def warning(pos: Position, msg: String, category: WarningCategory, site: Symbol, context: Context): Unit = + override def warning(pos: Position, msg: String, category: WarningCategory, site: Symbol): Unit = warningBuffer += ((pos, msg, category, site)) override protected def handleSuppressedAmbiguous(err: AbsAmbiguousTypeError): Unit = errorBuffer += err diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index b115242aaef7..9843d03d12ae 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -716,7 +716,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper // If we have a successful result, emit any warnings it created. if (!context1.reporter.hasErrors) - context1.reporter.emitWarnings(context1) + context1.reporter.emitWarnings() wrapResult(context1.reporter, result) } else { diff --git a/src/interactive/scala/tools/nsc/interactive/Global.scala b/src/interactive/scala/tools/nsc/interactive/Global.scala index b30c880a7bcc..9a89589f890b 100644 --- a/src/interactive/scala/tools/nsc/interactive/Global.scala +++ b/src/interactive/scala/tools/nsc/interactive/Global.scala @@ -650,6 +650,7 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "") private def parseAndEnter(unit: RichCompilationUnit): Unit = if (unit.status == NotLoaded) { debugLog("parsing: "+unit) + runReporting.clearSuppressionsComplete(unit.source) currentTyperRun.compileLate(unit) if (debugIDE && !reporter.hasErrors) validatePositions(unit.body) if (!unit.isJava) syncTopLevelSyms(unit) diff --git a/test/files/neg/annot-nonconst.check b/test/files/neg/annot-nonconst.check index a96eb08df5f3..58a13b10e9c3 100644 --- a/test/files/neg/annot-nonconst.check +++ b/test/files/neg/annot-nonconst.check @@ -1,3 +1,9 @@ +annot-nonconst.scala:6: error: annotation argument needs to be a constant; found: Test.this.n + @Length(n) def foo = "foo" + ^ +annot-nonconst.scala:7: error: annotation argument cannot be null + @Ann2(null) def bar = "bar" + ^ annot-nonconst.scala:1: warning: Implementation restriction: subclassing ClassfileAnnotation does not make your annotation visible at runtime. If that is what you want, you must write the annotation class in Java. @@ -8,11 +14,5 @@ make your annotation visible at runtime. If that is what you want, you must write the annotation class in Java. class Ann2(value: String) extends annotation.ClassfileAnnotation ^ -annot-nonconst.scala:6: error: annotation argument needs to be a constant; found: Test.this.n - @Length(n) def foo = "foo" - ^ -annot-nonconst.scala:7: error: annotation argument cannot be null - @Ann2(null) def bar = "bar" - ^ two warnings found two errors found diff --git a/test/files/neg/badtok-1-212.check b/test/files/neg/badtok-1-212.check index 7e3d7cbdfdf1..754652dd2db5 100644 --- a/test/files/neg/badtok-1-212.check +++ b/test/files/neg/badtok-1-212.check @@ -4,14 +4,14 @@ badtok-1-212.scala:3: error: unclosed character literal (or use " not ' for stri badtok-1-212.scala:3: error: unclosed character literal (or use " not ' for string literal) '42' ^ -badtok-1-212.scala:7: warning: deprecated syntax for character literal (use '\'' for single quote) -''' -^ badtok-1-212.scala:9: error: empty character literal ''; ^ badtok-1-212.scala:11: error: unclosed character literal ' ^ +badtok-1-212.scala:7: warning: deprecated syntax for character literal (use '\'' for single quote) +''' +^ one warning found four errors found diff --git a/test/files/neg/for-comprehension-old.check b/test/files/neg/for-comprehension-old.check index b863c59538f1..47cca09953bc 100644 --- a/test/files/neg/for-comprehension-old.check +++ b/test/files/neg/for-comprehension-old.check @@ -1,15 +1,3 @@ -for-comprehension-old.scala:4: warning: val keyword in for comprehension is deprecated - for (x <- 1 to 5 ; val y = x) yield x+y // fail - ^ -for-comprehension-old.scala:6: warning: val keyword in for comprehension is deprecated - for (val x <- 1 to 5 ; val y = x) yield x+y // fail - ^ -for-comprehension-old.scala:9: warning: val keyword in for comprehension is deprecated - for (z <- 1 to 2 ; x <- 1 to 5 ; val y = x) yield x+y // fail - ^ -for-comprehension-old.scala:11: warning: val keyword in for comprehension is deprecated - for (z <- 1 to 2 ; val x <- 1 to 5 ; val y = x) yield x+y // fail - ^ for-comprehension-old.scala:5: error: val in for comprehension must be followed by assignment for (val x <- 1 to 5 ; y = x) yield x+y // fail ^ @@ -22,5 +10,17 @@ for-comprehension-old.scala:10: error: val in for comprehension must be followed for-comprehension-old.scala:11: error: val in for comprehension must be followed by assignment for (z <- 1 to 2 ; val x <- 1 to 5 ; val y = x) yield x+y // fail ^ +for-comprehension-old.scala:4: warning: val keyword in for comprehension is deprecated + for (x <- 1 to 5 ; val y = x) yield x+y // fail + ^ +for-comprehension-old.scala:6: warning: val keyword in for comprehension is deprecated + for (val x <- 1 to 5 ; val y = x) yield x+y // fail + ^ +for-comprehension-old.scala:9: warning: val keyword in for comprehension is deprecated + for (z <- 1 to 2 ; x <- 1 to 5 ; val y = x) yield x+y // fail + ^ +for-comprehension-old.scala:11: warning: val keyword in for comprehension is deprecated + for (z <- 1 to 2 ; val x <- 1 to 5 ; val y = x) yield x+y // fail + ^ four warnings found four errors found diff --git a/test/files/neg/nested-annotation.check b/test/files/neg/nested-annotation.check index a3e159ab3dac..1cd3df5bb054 100644 --- a/test/files/neg/nested-annotation.check +++ b/test/files/neg/nested-annotation.check @@ -1,10 +1,10 @@ +nested-annotation.scala:8: error: nested classfile annotations must be defined in java; found: inline + @ComplexAnnotation(new inline) def bippy(): Int = 1 + ^ nested-annotation.scala:3: warning: Implementation restriction: subclassing ClassfileAnnotation does not make your annotation visible at runtime. If that is what you want, you must write the annotation class in Java. class ComplexAnnotation(val value: Annotation) extends ClassfileAnnotation ^ -nested-annotation.scala:8: error: nested classfile annotations must be defined in java; found: inline - @ComplexAnnotation(new inline) def bippy(): Int = 1 - ^ one warning found one error found diff --git a/test/files/neg/t10678.check b/test/files/neg/t10678.check index 09c1bafeac55..d73e3ca30649 100644 --- a/test/files/neg/t10678.check +++ b/test/files/neg/t10678.check @@ -1,11 +1,11 @@ -t10678.scala:5: warning: Using `<:` for `extends` is deprecated -trait U <: T - ^ t10678.scala:7: error: ';' expected but '<:' found. class C <: T { ^ t10678.scala:10: error: ';' expected but '<:' found. object O <: T { ^ +t10678.scala:5: warning: Using `<:` for `extends` is deprecated +trait U <: T + ^ one warning found two errors found diff --git a/test/files/neg/t6082.check b/test/files/neg/t6082.check index 9bd5cb1866b7..9f757d2db82a 100644 --- a/test/files/neg/t6082.check +++ b/test/files/neg/t6082.check @@ -1,13 +1,13 @@ -t6082.scala:1: warning: Implementation restriction: subclassing ClassfileAnnotation does not -make your annotation visible at runtime. If that is what -you want, you must write the annotation class in Java. -class annot(notValue: String) extends annotation.ClassfileAnnotation - ^ t6082.scala:2: error: classfile annotation arguments have to be supplied as named arguments @annot("") class C ^ t6082.scala:2: error: annotation annot is missing argument notValue @annot("") class C ^ +t6082.scala:1: warning: Implementation restriction: subclassing ClassfileAnnotation does not +make your annotation visible at runtime. If that is what +you want, you must write the annotation class in Java. +class annot(notValue: String) extends annotation.ClassfileAnnotation + ^ one warning found two errors found diff --git a/test/files/neg/t6083.check b/test/files/neg/t6083.check index b9869cd092e1..7116bda41d9a 100644 --- a/test/files/neg/t6083.check +++ b/test/files/neg/t6083.check @@ -1,10 +1,10 @@ +t6083.scala:7: error: annotation argument needs to be a constant; found: conv.i2s(101) +@annot(101) class C + ^ t6083.scala:6: warning: Implementation restriction: subclassing ClassfileAnnotation does not make your annotation visible at runtime. If that is what you want, you must write the annotation class in Java. class annot(value: String) extends annotation.ClassfileAnnotation ^ -t6083.scala:7: error: annotation argument needs to be a constant; found: conv.i2s(101) -@annot(101) class C - ^ one warning found one error found diff --git a/test/files/neg/t6675b.check b/test/files/neg/t6675b.check index 284046a70c87..c78d8edb1b14 100644 --- a/test/files/neg/t6675b.check +++ b/test/files/neg/t6675b.check @@ -1,37 +1,37 @@ -t6675b.scala:18: warning: deprecated adaptation: object LeftOrRight expects 2 patterns to hold (Int, Int) but crushing into 2-tuple to fit single pattern (scala/bug#6675) - def f1 = (Left((0, 0)): Either[(Int, Int), (Int, Int)]) match { case LeftOrRight(a) => a } // warn - ^ t6675b.scala:20: error: constructor cannot be instantiated to expected type; found : (T1, T2, T3) required: (Int, Int) def f3 = (Left((0, 0)): Either[(Int, Int), (Int, Int)]) match { case LeftOrRight((a, b, c)) => a } // fail ^ -t6675b.scala:25: warning: deprecated adaptation: object LeftOrRight expects 2 patterns to hold (A, A) but crushing into 2-tuple to fit single pattern (scala/bug#6675) - def f2[A](x: A) = (Left(x -> x): Either[(A, A), (A, A)]) match { case LeftOrRight(a) => a } // warn - ^ t6675b.scala:27: error: constructor cannot be instantiated to expected type; found : (T1, T2, T3) required: (?A1, ?A2) where type ?A2 <: A (this is a GADT skolem), type ?A1 <: A (this is a GADT skolem) def f4[A](x: A) = (Left(x -> x): Either[(A, A), (A, A)]) match { case LeftOrRight((a, b, c)) => a } // fail ^ -t6675b.scala:31: warning: deprecated adaptation: object NativelyTwo expects 2 patterns to hold ((Int, Int), (Int, Int)) but crushing into 2-tuple to fit single pattern (scala/bug#6675) - def f1 = (Left((0, 0)): Either[(Int, Int), (Int, Int)]) match { case NativelyTwo(a) => a } // warn - ^ t6675b.scala:33: error: constructor cannot be instantiated to expected type; found : (T1, T2, T3) required: ((Int, Int), (Int, Int)) def f3 = (Left((0, 0)): Either[(Int, Int), (Int, Int)]) match { case NativelyTwo((a, b, c)) => a } // fail ^ +t6675b.scala:40: error: constructor cannot be instantiated to expected type; + found : (T1, T2, T3) + required: ((?A1, ?A2), (?A3, ?A4)) where type ?A4 <: A (this is a GADT skolem), type ?A3 <: A (this is a GADT skolem), type ?A2 <: A (this is a GADT skolem), type ?A1 <: A (this is a GADT skolem) + def f4[A](x: A) = (Left(x -> x): Either[(A, A), (A, A)]) match { case NativelyTwo((a, b, c)) => a } // fail + ^ +t6675b.scala:18: warning: deprecated adaptation: object LeftOrRight expects 2 patterns to hold (Int, Int) but crushing into 2-tuple to fit single pattern (scala/bug#6675) + def f1 = (Left((0, 0)): Either[(Int, Int), (Int, Int)]) match { case LeftOrRight(a) => a } // warn + ^ +t6675b.scala:25: warning: deprecated adaptation: object LeftOrRight expects 2 patterns to hold (A, A) but crushing into 2-tuple to fit single pattern (scala/bug#6675) + def f2[A](x: A) = (Left(x -> x): Either[(A, A), (A, A)]) match { case LeftOrRight(a) => a } // warn + ^ +t6675b.scala:31: warning: deprecated adaptation: object NativelyTwo expects 2 patterns to hold ((Int, Int), (Int, Int)) but crushing into 2-tuple to fit single pattern (scala/bug#6675) + def f1 = (Left((0, 0)): Either[(Int, Int), (Int, Int)]) match { case NativelyTwo(a) => a } // warn + ^ t6675b.scala:37: warning: deprecated adaptation: object NativelyTwo expects 2 patterns to hold (A, A) but crushing into 2-tuple to fit single pattern (scala/bug#6675) def f1[A](x: A) = (Left(x): Either[A, A]) match { case NativelyTwo(a) => a } // warn ^ t6675b.scala:38: warning: deprecated adaptation: object NativelyTwo expects 2 patterns to hold ((A, A), (A, A)) but crushing into 2-tuple to fit single pattern (scala/bug#6675) def f2[A](x: A) = (Left(x -> x): Either[(A, A), (A, A)]) match { case NativelyTwo(a) => a } // warn ^ -t6675b.scala:40: error: constructor cannot be instantiated to expected type; - found : (T1, T2, T3) - required: ((?A1, ?A2), (?A3, ?A4)) where type ?A4 <: A (this is a GADT skolem), type ?A3 <: A (this is a GADT skolem), type ?A2 <: A (this is a GADT skolem), type ?A1 <: A (this is a GADT skolem) - def f4[A](x: A) = (Left(x -> x): Either[(A, A), (A, A)]) match { case NativelyTwo((a, b, c)) => a } // fail - ^ 5 warnings found four errors found diff --git a/test/files/neg/t8704.check b/test/files/neg/t8704.check index eff35e61d653..1083bdba3fb1 100644 --- a/test/files/neg/t8704.check +++ b/test/files/neg/t8704.check @@ -1,11 +1,11 @@ -t8704.scala:8: warning: 2 parameter sections are effectively implicit -class D(private implicit val i: Int)(implicit s: String) - ^ t8704.scala:4: error: an implicit parameter section must be last class C(i: Int)(implicit j: Int)(implicit k: Int)(n: Int) { ^ t8704.scala:4: error: multiple implicit parameter sections are not allowed class C(i: Int)(implicit j: Int)(implicit k: Int)(n: Int) { ^ +t8704.scala:8: warning: 2 parameter sections are effectively implicit +class D(private implicit val i: Int)(implicit s: String) + ^ one warning found two errors found diff --git a/test/files/presentation/t12308.check b/test/files/presentation/t12308.check new file mode 100644 index 000000000000..80792e4a7f27 --- /dev/null +++ b/test/files/presentation/t12308.check @@ -0,0 +1,50 @@ +reload: Foo.scala +askLoadedTyped 1 +Problem(RangePosition(t12308/src/Foo.scala, 67, 67, 72),A try without a catch or finally is equivalent to putting its body in a block; no exceptions are handled.,1) +askLoadedTyped 2 +Problem(RangePosition(t12308/src/Foo.scala, 67, 67, 72),A try without a catch or finally is equivalent to putting its body in a block; no exceptions are handled.,1) +reload: Foo.scala +askLoadedTyped 3 +Problem(RangePosition(t12308/src/Foo.scala, 67, 67, 72),A try without a catch or finally is equivalent to putting its body in a block; no exceptions are handled.,1) +targeted 1 + +askType at Foo.scala(2,37) +================================================================================ +[response] askTypeAt (2,37) +1 +================================================================================ + +askType at Foo.scala(3,17) +================================================================================ +[response] askTypeAt (3,17) +1 +================================================================================ + +askType at Foo.scala(4,37) +================================================================================ +[response] askTypeAt (4,37) +1 +================================================================================ +Problem(RangePosition(t12308/src/Foo.scala, 67, 67, 72),A try without a catch or finally is equivalent to putting its body in a block; no exceptions are handled.,1) +reload: Foo.scala +targeted 2 - doesn't handle nowarn correctly + +askType at Foo.scala(2,37) +================================================================================ +[response] askTypeAt (2,37) +1 +================================================================================ + +askType at Foo.scala(3,17) +================================================================================ +[response] askTypeAt (3,17) +1 +================================================================================ + +askType at Foo.scala(4,37) +================================================================================ +[response] askTypeAt (4,37) +1 +================================================================================ +Problem(RangePosition(t12308/src/Foo.scala, 67, 67, 72),A try without a catch or finally is equivalent to putting its body in a block; no exceptions are handled.,1) +Problem(RangePosition(t12308/src/Foo.scala, 109, 109, 114),A try without a catch or finally is equivalent to putting its body in a block; no exceptions are handled.,1) diff --git a/test/files/presentation/t12308/Test.scala b/test/files/presentation/t12308/Test.scala new file mode 100644 index 000000000000..fe767587654c --- /dev/null +++ b/test/files/presentation/t12308/Test.scala @@ -0,0 +1,50 @@ +import scala.tools.nsc.interactive.tests.InteractiveTest + +object Test extends InteractiveTest { + + def ws(): Unit = { + println(compiler.unitOfFile.values.flatMap(_.problems).mkString("", "\n", "")) + } + + override def runDefaultTests() { + val run = compiler.currentRun + + println("askLoadedTyped 1") + sourceFiles foreach (src => askLoadedTyped(src).get) + ws() + assert(run eq compiler.currentRun) + + println("askLoadedTyped 2") + sourceFiles foreach (src => askLoadedTyped(src).get) // tree is already typed, typer is not called + ws() + assert(run eq compiler.currentRun) + + askReload(sourceFiles) // new run, new tree, type checking again + println("askLoadedTyped 3") + sourceFiles foreach (src => askLoadedTyped(src).get) + ws() + val run1 = compiler.currentRun + assert(run ne run1) + + println("targeted 1") + // tree is already typed, typer not called + new TypeAction(compiler).runTest() + assert(run1 eq compiler.currentRun) + ws() + + askReload(sourceFiles) + + + // what happens here: + // 1. targeted type check of `foo`, warningin is suspended, then *not* reported because of the nowarn. + // once that type check is finished, `reportSuspendedMessages` is called + // 2. targeted type check of `bar`, warning is directly issued because `reportSuspendedMessages` was called + // before in that run, for that source file; `suppressions` are considered known. + // 3. targeted type check of `baz`, warning is directly issued, though it should be filtered out... + println("targeted 2 - doesn't handle nowarn correctly") + // tree not yet typed + new TypeAction(compiler).runTest() + assert(run1 ne compiler.currentRun) + ws() + } +} diff --git a/test/files/presentation/t12308/src/Foo.scala b/test/files/presentation/t12308/src/Foo.scala new file mode 100644 index 000000000000..5a5d918cb3bd --- /dev/null +++ b/test/files/presentation/t12308/src/Foo.scala @@ -0,0 +1,5 @@ +class Foo { + @annotation.nowarn def foo = try 1 /*?*/ + def bar = try 1/*?*/ + @annotation.nowarn def bzz = try 1 /*?*/ +} diff --git a/test/scaladoc/run/t5527.check b/test/scaladoc/run/t5527.check index bfaa4ad0ed07..84392fe76ffa 100644 --- a/test/scaladoc/run/t5527.check +++ b/test/scaladoc/run/t5527.check @@ -1,12 +1,3 @@ -newSource1.scala:47: warning: discarding unmoored doc comment - /** Document this crucial constant for posterity. - ^ -newSource1.scala:64: warning: discarding unmoored doc comment - /*************************\ - ^ -newSource1.scala:73: warning: discarding unmoored doc comment - val i = 10 */** Important! - ^ [[syntax trees at end of parser]] // newSource1.scala package { object UselessComments extends scala.AnyRef { @@ -130,3 +121,15 @@ package { } } +newSource1.scala:42: warning: Tag '@martin' is not recognised + /** @martin is this right? It shouldn't flag me as scaladoc. */ + ^ +newSource1.scala:47: warning: discarding unmoored doc comment + /** Document this crucial constant for posterity. + ^ +newSource1.scala:64: warning: discarding unmoored doc comment + /*************************\ + ^ +newSource1.scala:73: warning: discarding unmoored doc comment + val i = 10 */** Important! + ^ From 9bc7bfa0f6a561bac6bd400b73fc43c12c07fd53 Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Tue, 2 Mar 2021 14:59:10 +0100 Subject: [PATCH 023/769] [nomerge] Reuse `tasksupport` in CombinerFactory Reported in scala-parallel-collections 152. --- .../scala/collection/parallel/ParIterableLike.scala | 12 ++++++++++-- test/junit/scala/collection/parallel/TaskTest.scala | 9 +++++++++ 2 files changed, 19 insertions(+), 2 deletions(-) diff --git a/src/library/scala/collection/parallel/ParIterableLike.scala b/src/library/scala/collection/parallel/ParIterableLike.scala index 9633d3aac315..496da06b3c91 100644 --- a/src/library/scala/collection/parallel/ParIterableLike.scala +++ b/src/library/scala/collection/parallel/ParIterableLike.scala @@ -574,7 +574,11 @@ self: ParIterableLike[T, Repr, Sequential] => def apply() = shared def doesShareCombiners = true } else new CombinerFactory[T, Repr] { - def apply() = newCombiner + def apply() = { + val r = newCombiner + r.combinerTaskSupport = tasksupport + r + } def doesShareCombiners = false } } @@ -587,7 +591,11 @@ self: ParIterableLike[T, Repr, Sequential] => def apply() = shared def doesShareCombiners = true } else new CombinerFactory[S, That] { - def apply() = cbf() + def apply() = { + val r = cbf() + r.combinerTaskSupport = tasksupport + r + } def doesShareCombiners = false } } diff --git a/test/junit/scala/collection/parallel/TaskTest.scala b/test/junit/scala/collection/parallel/TaskTest.scala index 6a86f78261b2..fe9589746741 100644 --- a/test/junit/scala/collection/parallel/TaskTest.scala +++ b/test/junit/scala/collection/parallel/TaskTest.scala @@ -27,4 +27,13 @@ class TaskTest { for (x <- one ; y <- two) assert(Thread.currentThread.getName == "two") } + + @Test // https://github.com/scala/scala-parallel-collections/issues/152 + def `propagate tasksupport through CombinerFactory`(): Unit = { + val myTs = new ExecutionContextTaskSupport() + val c = List(1).par + c.tasksupport = myTs + val r = c.filter(_ != 0).map(_ + 1) + assert(myTs eq r.tasksupport) + } } From 6eaf2174ff95ac91c3431f6048913be1f300256c Mon Sep 17 00:00:00 2001 From: Dale Wijnand Date: Thu, 25 Feb 2021 13:47:31 +0000 Subject: [PATCH 024/769] Fix bad performance on complex patmat AnalysisBudget.maxDPLLdepth is already working to limit the initial SAT solving. But given enough unassigned symbols, like the test case, the compiler can end up spending the rest of eternity and all its memory expanding the model. So apply the limit where it hurts most (the cartesian product part). The ordered sets and various sortings, instead, are to stabilise the results. --- .../tools/nsc/transform/patmat/Logic.scala | 133 ++++---- .../nsc/transform/patmat/MatchAnalysis.scala | 71 ++--- .../transform/patmat/MatchOptimization.scala | 24 +- .../transform/patmat/MatchTreeMaking.scala | 2 +- .../tools/nsc/transform/patmat/Solving.scala | 285 ++++++++---------- test/files/neg/t12237.check | 10 + test/files/neg/t12237.scala | 30 ++ 7 files changed, 278 insertions(+), 277 deletions(-) create mode 100644 test/files/neg/t12237.check create mode 100644 test/files/neg/t12237.scala diff --git a/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala b/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala index a575a4c933e7..4a86fd7f912e 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala @@ -18,7 +18,7 @@ import scala.collection.immutable.ArraySeq import scala.reflect.internal.util.Collections._ import scala.reflect.internal.util.{HashSet, StatisticsStatics} -trait Logic extends Debugging { +trait Logic extends Debugging { import global._ private def max(xs: Seq[Int]) = if (xs.isEmpty) 0 else xs.max @@ -117,12 +117,20 @@ trait Logic extends Debugging { // but that requires typing relations like And(x: Tx, y: Ty) : (if(Tx == PureProp && Ty == PureProp) PureProp else Prop) final case class And(ops: Set[Prop]) extends Prop object And { - def apply(ops: Prop*) = new And(ops.toSet) + def apply(ps: Prop*) = create(ps) + def create(ps: Iterable[Prop]) = ps match { + case ps: Set[Prop] => new And(ps) + case _ => new And(ps.to(scala.collection.immutable.ListSet)) + } } final case class Or(ops: Set[Prop]) extends Prop object Or { - def apply(ops: Prop*) = new Or(ops.toSet) + def apply(ps: Prop*) = create(ps) + def create(ps: Iterable[Prop]) = ps match { + case ps: Set[Prop] => new Or(ps) + case _ => new Or(ps.to(scala.collection.immutable.ListSet)) + } } final case class Not(a: Prop) extends Prop @@ -161,8 +169,17 @@ trait Logic extends Debugging { implicit val SymOrdering: Ordering[Sym] = Ordering.by(_.id) } - def /\(props: Iterable[Prop]) = if (props.isEmpty) True else And(props.toSeq: _*) - def \/(props: Iterable[Prop]) = if (props.isEmpty) False else Or(props.toSeq: _*) + def /\(props: Iterable[Prop]) = props match { + case _ if props.isEmpty => True + case _ if props.sizeIs == 1 => props.head + case _ => And.create(props) + } + + def \/(props: Iterable[Prop]) = props match { + case _ if props.isEmpty => False + case _ if props.sizeIs == 1 => props.head + case _ => Or.create(props) + } /** * Simplifies propositional formula according to the following rules: @@ -267,61 +284,44 @@ trait Logic extends Debugging { | (_: AtMostOne) => p } - def simplifyProp(p: Prop): Prop = p match { - case And(fv) => - // recurse for nested And (pulls all Ands up) - // build up Set in order to remove duplicates - val opsFlattenedBuilder = collection.immutable.Set.newBuilder[Prop] - for (prop <- fv) { - val simplified = simplifyProp(prop) - if (simplified != True) { // ignore `True` - simplified match { - case And(fv) => fv.foreach(opsFlattenedBuilder += _) - case f => opsFlattenedBuilder += f - } - } - } - val opsFlattened = opsFlattenedBuilder.result() - - if (opsFlattened.contains(False) || hasImpureAtom(opsFlattened)) { - False - } else { - opsFlattened.size match { - case 0 => True - case 1 => opsFlattened.head - case _ => new And(opsFlattened) - } + def simplifyAnd(ps: Set[Prop]): Prop = { + // recurse for nested And (pulls all Ands up) + // build up Set in order to remove duplicates + val props = mutable.HashSet.empty[Prop] + for (prop <- ps) { + simplifyProp(prop) match { + case True => // ignore `True` + case And(fv) => fv.foreach(props += _) + case f => props += f } - case Or(fv) => - // recurse for nested Or (pulls all Ors up) - // build up Set in order to remove duplicates - val opsFlattenedBuilder = collection.immutable.Set.newBuilder[Prop] - for (prop <- fv) { - val simplified = simplifyProp(prop) - if (simplified != False) { // ignore `False` - simplified match { - case Or(fv) => fv.foreach(opsFlattenedBuilder += _) - case f => opsFlattenedBuilder += f - } - } - } - val opsFlattened = opsFlattenedBuilder.result() - - if (opsFlattened.contains(True) || hasImpureAtom(opsFlattened)) { - True - } else { - opsFlattened.size match { - case 0 => False - case 1 => opsFlattened.head - case _ => new Or(opsFlattened) - } + } + + if (props.contains(False) || hasImpureAtom(props)) False + else /\(props) + } + + def simplifyOr(ps: Set[Prop]): Prop = { + // recurse for nested Or (pulls all Ors up) + // build up Set in order to remove duplicates + val props = mutable.HashSet.empty[Prop] + for (prop <- ps) { + simplifyProp(prop) match { + case False => // ignore `False` + case Or(fv) => props ++= fv + case f => props += f } - case Not(Not(a)) => - simplify(a) - case Not(p) => - Not(simplify(p)) - case p => - p + } + + if (props.contains(True) || hasImpureAtom(props)) True + else \/(props) + } + + def simplifyProp(p: Prop): Prop = p match { + case And(ps) => simplifyAnd(ps) + case Or(ps) => simplifyOr(ps) + case Not(Not(a)) => simplify(a) + case Not(p) => Not(simplify(p)) + case p => p } val nnf = negationNormalForm(f) @@ -344,7 +344,7 @@ trait Logic extends Debugging { } def gatherVariables(p: Prop): collection.Set[Var] = { - val vars = new mutable.HashSet[Var]() + val vars = new mutable.LinkedHashSet[Var]() (new PropTraverser { override def applyVar(v: Var) = vars += v })(p) @@ -352,7 +352,7 @@ trait Logic extends Debugging { } def gatherSymbols(p: Prop): collection.Set[Sym] = { - val syms = new mutable.HashSet[Sym]() + val syms = new mutable.LinkedHashSet[Sym]() (new PropTraverser { override def applySymbol(s: Sym) = syms += s })(p) @@ -511,7 +511,7 @@ trait Logic extends Debugging { final case class Solution(model: Model, unassigned: List[Sym]) - def findModelFor(solvable: Solvable): Model + def hasModel(solvable: Solvable): Boolean def findAllModelsFor(solvable: Solvable, sym: Symbol = NoSymbol): List[Solution] } @@ -562,7 +562,7 @@ trait ScalaLogic extends Interface with Logic with TreeAndTypeAnalysis { val subConsts = enumerateSubtypes(staticTp, grouped = false) .headOption.map { tps => - tps.toSet[Type].map{ tp => + tps.to(scala.collection.immutable.ListSet).map { tp => val domainC = TypeConst(tp) registerEquality(domainC) domainC @@ -583,7 +583,7 @@ trait ScalaLogic extends Interface with Logic with TreeAndTypeAnalysis { val subtypes = enumerateSubtypes(staticTp, grouped = true) subtypes.map { subTypes => - val syms = subTypes.flatMap(tpe => symForEqualsTo.get(TypeConst(tpe))).toSet + val syms = subTypes.flatMap(tpe => symForEqualsTo.get(TypeConst(tpe))).to(scala.collection.immutable.ListSet) if (mayBeNull) syms + symForEqualsTo(NullConst) else syms }.filter(_.nonEmpty) } @@ -719,13 +719,14 @@ trait ScalaLogic extends Interface with Logic with TreeAndTypeAnalysis { lazy val symForStaticTp: Option[Sym] = symForEqualsTo.get(TypeConst(staticTpCheckable)) // don't access until all potential equalities have been registered using registerEquality - private lazy val equalitySyms = {observed(); symForEqualsTo.values.toList} + private lazy val equalitySyms = {observed(); symForEqualsTo.values.toList.sortBy(_.toString) } // don't call until all equalities have been registered and registerNull has been called (if needed) def describe = { + val consts = symForEqualsTo.keys.toSeq.sortBy(_.toString) def domain_s = domain match { - case Some(d) => d.mkString(" ::= ", " | ", "// "+ symForEqualsTo.keys) - case _ => symForEqualsTo.keys.mkString(" ::= ", " | ", " | ...") + case Some(d) => d.mkString(" ::= ", " | ", "// " + consts) + case _ => consts.mkString(" ::= ", " | ", " | ...") } s"$this: ${staticTp}${domain_s} // = $path" } diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala index 2ea32b41e270..dba50d2ef944 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala @@ -498,13 +498,8 @@ trait MatchAnalysis extends MatchApproximation { else { prefix += prefHead current = current.tail - val and = And((current.head +: prefix).toIndexedSeq: _*) - val model = findModelFor(eqFreePropToSolvable(and)) - - // debug.patmat("trying to reach:\n"+ cnfString(current.head) +"\nunder prefix:\n"+ cnfString(prefix)) - // if (NoModel ne model) debug.patmat("reached: "+ modelString(model)) - - reachable = NoModel ne model + val and = And((current.head +: prefix).toIndexedSeq: _*) + reachable = hasModel(eqFreePropToSolvable(and)) } } @@ -573,13 +568,9 @@ trait MatchAnalysis extends MatchApproximation { val matchFailModels = findAllModelsFor(propToSolvable(matchFails), prevBinder) val scrutVar = Var(prevBinderTree) - val counterExamples = { - matchFailModels.flatMap { - model => - val varAssignments = expandModel(model) - varAssignments.flatMap(modelToCounterExample(scrutVar) _) - } - } + val counterExamples = matchFailModels.iterator.flatMap { model => + expandModel(model).flatMap(modelToCounterExample(scrutVar)) + }.take(AnalysisBudget.maxDPLLdepth).toList // sorting before pruning is important here in order to // keep neg/t7020.scala stable @@ -658,16 +649,18 @@ trait MatchAnalysis extends MatchApproximation { case object WildcardExample extends CounterExample { override def toString = "_" } case object NoExample extends CounterExample { override def toString = "??" } + type VarAssignment = Map[Var, (Seq[Const], Seq[Const])] + // returns a mapping from variable to // equal and notEqual symbols - def modelToVarAssignment(model: Model): Map[Var, (Seq[Const], Seq[Const])] = + def modelToVarAssignment(model: Model): VarAssignment = model.toSeq.groupBy(_._1.variable).view.mapValues{ xs => val (trues, falses) = xs.partition(_._2) (trues map (_._1.const), falses map (_._1.const)) // should never be more than one value in trues... }.to(Map) - def varAssignmentString(varAssignment: Map[Var, (Seq[Const], Seq[Const])]) = + def varAssignmentString(varAssignment: VarAssignment) = varAssignment.toSeq.sortBy(_._1.toString).map { case (v, (trues, falses)) => s"$v(=${v.path}: ${v.staticTpCheckable}) == ${trues.mkString("(", ", ", ")")} != (${falses.mkString(", ")})" }.mkString("\n") @@ -702,7 +695,7 @@ trait MatchAnalysis extends MatchApproximation { * Only one of these symbols can be set to true, * since `V2` can at most be equal to one of {2,6,5,4,7}. */ - def expandModel(solution: Solution): List[Map[Var, (Seq[Const], Seq[Const])]] = { + def expandModel(solution: Solution): List[VarAssignment] = { val model = solution.model @@ -719,7 +712,7 @@ trait MatchAnalysis extends MatchApproximation { val groupedByVar: Map[Var, List[Sym]] = solution.unassigned.groupBy(_.variable) val expanded = for { - (variable, syms) <- groupedByVar.toList + (variable, syms) <- groupedByVar.toList.sortBy(_._1.toString) } yield { val (equal, notEqual) = varAssignment.getOrElse(variable, Nil -> Nil) @@ -735,7 +728,7 @@ trait MatchAnalysis extends MatchApproximation { // a list counter example could contain wildcards: e.g. `List(_,_)` val allEqual = addVarAssignment(syms.map(_.const), Nil) - if(equal.isEmpty) { + if (equal.isEmpty) { val oneHot = for { s <- syms } yield { @@ -747,34 +740,32 @@ trait MatchAnalysis extends MatchApproximation { } } - if (expanded.isEmpty) { - List(varAssignment) - } else { - // we need the Cartesian product here, - // since we want to report all missing cases - // (i.e., combinations) - val cartesianProd = expanded.reduceLeft((xs, ys) => - for {map1 <- xs - map2 <- ys} yield { - map1 ++ map2 - }) - - // add expanded variables - // note that we can just use `++` - // since the Maps have disjoint keySets - for { - m <- cartesianProd - } yield { - varAssignment ++ m + // we need the Cartesian product here, + // since we want to report all missing cases + // (i.e., combinations) + @tailrec def loop(acc: List[VarAssignment], in: List[List[VarAssignment]]): List[VarAssignment] = { + if (acc.sizeIs > AnalysisBudget.maxDPLLdepth) acc.take(AnalysisBudget.maxDPLLdepth) + else in match { + case vs :: vss => loop(for (map1 <- acc; map2 <- vs) yield map1 ++ map2, vss) + case _ => acc } } + expanded match { + case head :: tail => + val cartesianProd = loop(head, tail) + // add expanded variables + // note that we can just use `++` + // since the Maps have disjoint keySets + for (m <- cartesianProd) yield varAssignment ++ m + case _ => List(varAssignment) + } } // return constructor call when the model is a true counter example // (the variables don't take into account type information derived from other variables, // so, naively, you might try to construct a counter example like _ :: Nil(_ :: _, _ :: _), // since we didn't realize the tail of the outer cons was a Nil) - def modelToCounterExample(scrutVar: Var)(varAssignment: Map[Var, (Seq[Const], Seq[Const])]): Option[CounterExample] = { + def modelToCounterExample(scrutVar: Var)(varAssignment: VarAssignment): Option[CounterExample] = { val strict = !settings.nonStrictPatmatAnalysis.value // chop a path into a list of symbols @@ -919,7 +910,7 @@ trait MatchAnalysis extends MatchApproximation { } // slurp in information from other variables - varAssignment.keys.foreach{ v => if (v != scrutVar) VariableAssignment(v) } + varAssignment.keys.toSeq.sortBy(_.toString).foreach(v => if (v != scrutVar) VariableAssignment(v)) // this is the variable we want a counter example for VariableAssignment(scrutVar).toCounterExample() diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchOptimization.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchOptimization.scala index f94b457ce70f..b76395f69338 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/MatchOptimization.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchOptimization.scala @@ -43,8 +43,8 @@ trait MatchOptimization extends MatchTreeMaking with MatchAnalysis { val testss = approximateMatchConservative(prevBinder, cases) // interpret: - val dependencies = new mutable.LinkedHashMap[Test, Set[Prop]] - val tested = new mutable.HashSet[Prop] + val dependencies = new mutable.LinkedHashMap[Test, mutable.LinkedHashSet[Prop]] + val tested = new mutable.LinkedHashSet[Prop] val reusesMap = new mutable.LinkedHashMap[Int, Test] val reusesTest = { (test: Test) => reusesMap.get(test.id) } val registerReuseBy = { (priorTest: Test, later: Test) => @@ -57,32 +57,32 @@ trait MatchOptimization extends MatchTreeMaking with MatchAnalysis { val cond = test.prop def simplify(c: Prop): Set[Prop] = c match { - case And(ops) => ops.toSet flatMap simplify + case And(ops) => ops flatMap simplify case Or(ops) => Set(False) // TODO: make more precise - case Not(Eq(Var(_), NullConst)) => Set(True) // not worth remembering + case Not(Eq(Var(_), NullConst)) => Set.empty // not worth remembering + case True => Set.empty // same case _ => Set(c) } val conds = simplify(cond) if (conds(False)) false // stop when we encounter a definite "no" or a "not sure" else { - val nonTrivial = conds - True - if (!nonTrivial.isEmpty) { - tested ++= nonTrivial + if (!conds.isEmpty) { + tested ++= conds // is there an earlier test that checks our condition and whose dependencies are implied by ours? dependencies find { case (priorTest, deps) => - ((simplify(priorTest.prop) == nonTrivial) || // our conditions are implied by priorTest if it checks the same thing directly - (nonTrivial subsetOf deps) // or if it depends on a superset of our conditions - ) && (deps subsetOf tested) // the conditions we've tested when we are here in the match satisfy the prior test, and hence what it tested + ((simplify(priorTest.prop) == conds) || // our conditions are implied by priorTest if it checks the same thing directly + (conds subsetOf deps) // or if it depends on a superset of our conditions + ) && (deps subsetOf tested) // the conditions we've tested when we are here in the match satisfy the prior test, and hence what it tested } foreach { case (priorTest, _) => // if so, note the dependency in both tests registerReuseBy(priorTest, test) } - dependencies(test) = tested.toSet // copies + dependencies(test) = tested.clone() } true } @@ -108,7 +108,7 @@ trait MatchOptimization extends MatchTreeMaking with MatchAnalysis { val collapsed = testss map { tests => // map tests to the equivalent list of treemakers, replacing shared prefixes by a reusing treemaker // if there's no sharing, simply map to the tree makers corresponding to the tests - var currDeps = Set[Prop]() + var currDeps = mutable.LinkedHashSet.empty[Prop] val (sharedPrefix, suffix) = tests span { test => (test.prop == True) || (for( reusedTest <- reusesTest(test); diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala index aa6412d55883..f5eed14680b0 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala @@ -145,7 +145,7 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging { // mutable case class fields need to be stored regardless (scala/bug#5158, scala/bug#6070) -- see override in ProductExtractorTreeMaker // sub patterns bound to wildcard (_) are never stored as they can't be referenced // dirty debuggers will have to get dirty to see the wildcards - lazy val storedBinders: Set[Symbol] = + private lazy val storedBinders: Set[Symbol] = (if (debugInfoEmitVars) subPatBinders.toSet else Set.empty) ++ extraStoredBinders diff ignoredSubPatBinders // e.g., mutable fields of a case class in ProductExtractorTreeMaker diff --git a/src/compiler/scala/tools/nsc/transform/patmat/Solving.scala b/src/compiler/scala/tools/nsc/transform/patmat/Solving.scala index 6f52f70bc53b..b7049821f0c5 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/Solving.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/Solving.scala @@ -12,61 +12,38 @@ package scala.tools.nsc.transform.patmat -import java.util - import scala.annotation.tailrec import scala.collection.mutable.ArrayBuffer import scala.collection.{immutable, mutable} import scala.reflect.internal.util.StatisticsStatics -// a literal is a (possibly negated) variable -case class Lit(val v: Int) { - private var negated: Lit = null - def unary_- : Lit = { - if (negated eq null) negated = Lit(-v) - negated - } - - def variable: Int = Math.abs(v) - - def positive = v >= 0 - - override def toString(): String = s"Lit#$v" - - override val hashCode: Int = v -} - -object Lit { - def apply(v: Int): Lit = new Lit(v) - - implicit val LitOrdering: Ordering[Lit] = Ordering.by(_.v) -} - -/** Solve pattern matcher exhaustivity problem via DPLL. - */ +/** Solve pattern matcher exhaustivity problem via DPLL. */ trait Solving extends Logic { import global._ trait CNF extends PropositionalLogic { + // a literal is a (possibly negated) variable + type Lit <: LitApi + trait LitApi { + def unary_- : Lit + } - type Clause = Set[Lit] + def Lit: LitModule + trait LitModule { + def apply(v: Int): Lit + } + + type Clause = Set[Lit] - val NoClauses: Array[Clause] = Array() + val NoClauses: Array[Clause] = Array() val ArrayOfFalse: Array[Clause] = Array(clause()) + // a clause is a disjunction of distinct literals - def clause(): Clause = Set.empty - def clause(l: Lit): Clause = { - Set.empty + l - } - def clause(l: Lit, l2: Lit): Clause = { - Set.empty + l + l2 - } - def clause(l: Lit, l2: Lit, ls: Lit*): Clause = { - Set.empty + l + l2 ++ ls - } - def clause(ls: IterableOnce[Lit]): Clause = { - Set.from(ls) - } + def clause(): Clause = Set.empty + def clause(l: Lit): Clause = Set.empty + l + def clause(l: Lit, l2: Lit): Clause = Set.empty + l + l2 + def clause(l: Lit, l2: Lit, ls: Lit*): Clause = Set.empty + l + l2 ++ ls + def clause(ls: IterableOnce[Lit]): Clause = Set.from(ls) /** Conjunctive normal form (of a Boolean formula). * A formula in this form is amenable to a SAT solver @@ -83,8 +60,7 @@ trait Solving extends Logic { val symForVar: Map[Int, Sym] = variableForSymbol.map(_.swap) - val relevantVars = - symForVar.keysIterator.map(math.abs).to(immutable.BitSet) + val relevantVars = symForVar.keysIterator.map(math.abs).to(immutable.BitSet) def lit(sym: Sym): Lit = Lit(variableForSymbol(sym)) @@ -390,7 +366,22 @@ trait Solving extends Logic { } // simple solver using DPLL + // adapted from https://lara.epfl.ch/w/sav10:simple_sat_solver (original by Hossein Hojjat) trait Solver extends CNF { + case class Lit(v: Int) extends LitApi { + private lazy val negated: Lit = Lit(-v) + + def unary_- : Lit = negated + def variable: Int = Math.abs(v) + def positive: Boolean = v >= 0 + + override def toString = s"Lit#$v" + override def hashCode = v + } + + object Lit extends LitModule { + def apply(v: Int): Lit = new Lit(v) + } def cnfString(f: Array[Clause]): String = { val lits: Array[List[String]] = f map (_.map(_.toString).toList) @@ -399,8 +390,6 @@ trait Solving extends Logic { aligned } - // adapted from https://lara.epfl.ch/w/sav10:simple_sat_solver (original by Hossein Hojjat) - // empty set of clauses is trivially satisfied val EmptyModel = Map.empty[Sym, Boolean] @@ -411,57 +400,59 @@ trait Solving extends Logic { // this model contains the auxiliary variables as well type TseitinModel = List[Lit] - val EmptyTseitinModel = Nil val NoTseitinModel: TseitinModel = null // returns all solutions, if any (TODO: better infinite recursion backstop -- detect fixpoint??) def findAllModelsFor(solvable: Solvable, owner: Symbol): List[Solution] = { - debug.patmat("find all models for\n"+ cnfString(solvable.cnf)) + import solvable.{ cnf, symbolMapping }, symbolMapping.{ symForVar, relevantVars } + debug.patmat(s"find all models for\n${cnfString(cnf)}") // we must take all vars from non simplified formula // otherwise if we get `T` as formula, we don't expand the variables // that are not in the formula... - val relevantVars: immutable.BitSet = solvable.symbolMapping.relevantVars // debug.patmat("vars "+ vars) // the negation of a model -(S1=True/False /\ ... /\ SN=True/False) = clause(S1=False/True, ...., SN=False/True) // (i.e. the blocking clause - used for ALL-SAT) - def negateModel(m: TseitinModel) = { + def negateModel(m: TseitinModel): TseitinModel = { // filter out auxiliary Tseitin variables - val relevantLits = m.filter(l => relevantVars.contains(l.variable)) - relevantLits.map(lit => -lit) + m.filter(lit => relevantVars.contains(lit.variable)).map(lit => -lit) } - final case class TseitinSolution(model: TseitinModel, unassigned: List[Int]) { - def projectToSolution(symForVar: Map[Int, Sym]) = Solution(projectToModel(model, symForVar), unassigned map symForVar) + def newSolution(model: TseitinModel, unassigned: List[Int]): Solution = { + val newModel: Model = if (model eq NoTseitinModel) NoModel else { + model.iterator.collect { + case lit if symForVar.isDefinedAt(lit.variable) => (symForVar(lit.variable), lit.positive) + }.toMap + } + Solution(newModel, unassigned.map(symForVar)) } @tailrec def findAllModels(clauses: Array[Clause], - models: List[TseitinSolution], - recursionDepthAllowed: Int = AnalysisBudget.maxDPLLdepth): List[TseitinSolution]= + models: List[Solution], + recursionDepthAllowed: Int = AnalysisBudget.maxDPLLdepth): List[Solution] = { if (recursionDepthAllowed == 0) { uncheckedWarning(owner.pos, AnalysisBudget.recursionDepthReached, owner) models } else { - debug.patmat("find all models for\n" + cnfString(clauses)) + debug.patmat(s"find all models for\n${cnfString(clauses)}") val model = findTseitinModelFor(clauses) // if we found a solution, conjunct the formula with the model's negation and recurse - if (model ne NoTseitinModel) { + if (model eq NoTseitinModel) models else { // note that we should not expand the auxiliary variables (from Tseitin transformation) // since they are existentially quantified in the final solution - val unassigned: List[Int] = (relevantVars.toList.filterNot(x => model.exists(lit => x == lit.variable))) - debug.patmat("unassigned "+ unassigned +" in "+ model) + val unassigned: List[Int] = relevantVars.filterNot(x => model.exists(lit => x == lit.variable)).toList.sorted + debug.patmat(s"unassigned $unassigned in $model") - val solution = TseitinSolution(model, unassigned) - val negated = negateModel(model) - findAllModels(clauses :+ negated.toSet, solution :: models, recursionDepthAllowed - 1) + val solution = newSolution(model, unassigned) + val negated = negateModel(model).to(scala.collection.immutable.ListSet) + findAllModels(clauses :+ negated, solution :: models, recursionDepthAllowed - 1) } - else models } + } - val tseitinSolutions = findAllModels(solvable.cnf, Nil) - tseitinSolutions.map(_.projectToSolution(solvable.symbolMapping.symForVar)) + findAllModels(solvable.cnf, Nil) } /** Drop trivially true clauses, simplify others by dropping negation of `unitLit`. @@ -485,16 +476,13 @@ trait Solving extends Logic { } } - def findModelFor(solvable: Solvable): Model = { - projectToModel(findTseitinModelFor(solvable.cnf.map(_.toSet)), solvable.symbolMapping.symForVar) - } + def hasModel(solvable: Solvable): Boolean = findTseitinModelFor(solvable.cnf) != NoTseitinModel def findTseitinModelFor(clauses: Array[Clause]): TseitinModel = { - debug.patmat(s"DPLL\n${cnfString(clauses)}") - val start = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.startTimer(statistics.patmatAnaDPLL) else null - val satisfiableWithModel = findTseitinModel0((util.Arrays.copyOf(clauses, clauses.length), Nil) :: Nil) + debug.patmat(s"DPLL\n${cnfString(clauses)}") + val satisfiableWithModel = findTseitinModel0((java.util.Arrays.copyOf(clauses, clauses.length), Nil) :: Nil) if (StatisticsStatics.areSomeColdStatsEnabled) statistics.stopTimer(statistics.patmatAnaDPLL, start) satisfiableWithModel @@ -535,104 +523,85 @@ trait Solving extends Logic { * */ private def findTseitinModel0(state: TseitinSearch): TseitinModel = { - val pos = new util.BitSet() - val neg = new util.BitSet() - - @annotation.tailrec - def loop(state: TseitinSearch): TseitinModel ={ - state match { - case Nil => NoTseitinModel - case (clauses, assignments) :: rest => - if (clauses.isEmpty || clauses.head == null) assignments - else { - var i = 0 - var emptyIndex = -1 - var unitIndex = -1 - while (i < clauses.length && emptyIndex == -1) { - val clause = clauses(i) - if (clause != null) { - clause.size match { - case 0 => emptyIndex = i - case 1 if unitIndex == -1 => - unitIndex = i - case _ => - } + val pos = new java.util.BitSet() + val neg = new java.util.BitSet() + @tailrec def loop(state: TseitinSearch): TseitinModel = state match { + case Nil => NoTseitinModel + case (clauses, assignments) :: rest => + if (clauses.isEmpty || clauses.head == null) assignments + else { + var i = 0 + var emptyIndex = -1 + var unitIndex = -1 + while (i < clauses.length && emptyIndex == -1) { + val clause = clauses(i) + if (clause != null) { + clause.size match { + case 0 => emptyIndex = i + case 1 if unitIndex == -1 => + unitIndex = i + case _ => } - i += 1 } - if (emptyIndex != -1) - loop(rest) - else if (unitIndex != -1) { - val unitLit = clauses(unitIndex).head - dropUnit(clauses, unitLit) - val tuples: TseitinSearch = (clauses, unitLit :: assignments) :: rest - loop(tuples) - } else { - // partition symbols according to whether they appear in positive and/or negative literals - pos.clear() - neg.clear() - for (clause <- clauses) { - if (clause != null) { - clause.foreach { lit: Lit => - if (lit.positive) pos.set(lit.variable) else neg.set(lit.variable) - } + i += 1 + } + if (emptyIndex != -1) + loop(rest) + else if (unitIndex != -1) { + val unitLit = clauses(unitIndex).head + dropUnit(clauses, unitLit) + val tuples: TseitinSearch = (clauses, unitLit :: assignments) :: rest + loop(tuples) + } else { + // partition symbols according to whether they appear in positive and/or negative literals + pos.clear() + neg.clear() + for (clause <- clauses) { + if (clause != null) { + clause.foreach { lit: Lit => + if (lit.positive) pos.set(lit.variable) else neg.set(lit.variable) } } + } - // appearing only in either positive/negative positions - - pos.xor(neg) - val pures = pos - - if (!pures.isEmpty) { - val pureVar = pures.nextSetBit(0) - // turn it back into a literal - // (since equality on literals is in terms of equality - // of the underlying symbol and its positivity, simply construct a new Lit) - val pureLit: Lit = Lit(if (neg.get(pureVar)) -pureVar else pureVar) - // debug.patmat("pure: "+ pureLit +" pures: "+ pures) - val simplified = clauses.filterNot(clause => clause != null && clause.contains(pureLit)) - loop((simplified, pureLit :: assignments) :: rest) - } else { - val split = clauses.find(_ != null).get.head - // debug.patmat("split: "+ split) - var i = 0 - var nullIndex = -1 - while (i < clauses.length && nullIndex == -1) { - if (clauses(i) eq null) nullIndex = i - i += 1 - } + // appearing only in either positive/negative positions - val effectiveLength = if (nullIndex == -1) clauses.length else nullIndex - val posClauses = util.Arrays.copyOf(clauses, effectiveLength + 1) - val negClauses = util.Arrays.copyOf(clauses, effectiveLength + 1) - posClauses(effectiveLength) = Set.empty[Lit] + split - negClauses(effectiveLength) = Set.empty[Lit] + (-split) + pos.xor(neg) + val pures = pos - val pos = (posClauses, assignments) - val neg = (negClauses, assignments) - loop(pos :: neg :: rest) + if (!pures.isEmpty) { + val pureVar = pures.nextSetBit(0) + // turn it back into a literal + // (since equality on literals is in terms of equality + // of the underlying symbol and its positivity, simply construct a new Lit) + val pureLit: Lit = Lit(if (neg.get(pureVar)) -pureVar else pureVar) + // debug.patmat("pure: "+ pureLit +" pures: "+ pures) + val simplified = clauses.filterNot(clause => clause != null && clause.contains(pureLit)) + loop((simplified, pureLit :: assignments) :: rest) + } else { + val split = clauses.find(_ != null).get.head + // debug.patmat("split: "+ split) + var i = 0 + var nullIndex = -1 + while (i < clauses.length && nullIndex == -1) { + if (clauses(i) eq null) nullIndex = i + i += 1 } + + val effectiveLength = if (nullIndex == -1) clauses.length else nullIndex + val posClauses = java.util.Arrays.copyOf(clauses, effectiveLength + 1) + val negClauses = java.util.Arrays.copyOf(clauses, effectiveLength + 1) + posClauses(effectiveLength) = Set.empty[Lit] + split + negClauses(effectiveLength) = Set.empty[Lit] + (-split) + + val pos = (posClauses, assignments) + val neg = (negClauses, assignments) + loop(pos :: neg :: rest) } } - } + } } loop(state) } - - private def projectToModel(model: TseitinModel, symForVar: Map[Int, Sym]): Model = - if (model == NoTseitinModel) NoModel - else if (model == EmptyTseitinModel) EmptyModel - else { - val mappedModels = model.iterator.toList collect { - case lit if symForVar isDefinedAt lit.variable => (symForVar(lit.variable), lit.positive) - } - if (mappedModels.isEmpty) { - // could get an empty model if mappedModels is a constant like `True` - EmptyModel - } else { - mappedModels.toMap - } - } } } diff --git a/test/files/neg/t12237.check b/test/files/neg/t12237.check new file mode 100644 index 000000000000..dbe091243e5e --- /dev/null +++ b/test/files/neg/t12237.check @@ -0,0 +1,10 @@ +t12237.scala:24: warning: Exhaustivity analysis reached max recursion depth, not all missing cases are reported. +(Please try with scalac -Ypatmat-exhaust-depth 40 or -Ypatmat-exhaust-depth off.) + (pq: PathAndQuery) match { + ^ +t12237.scala:24: warning: match may not be exhaustive. + (pq: PathAndQuery) match { + ^ +error: No warnings can be incurred under -Werror. +2 warnings +1 error diff --git a/test/files/neg/t12237.scala b/test/files/neg/t12237.scala new file mode 100644 index 000000000000..480634cf86f6 --- /dev/null +++ b/test/files/neg/t12237.scala @@ -0,0 +1,30 @@ +// scalac: -Werror +sealed trait PathAndQuery +sealed trait Path extends PathAndQuery +sealed trait Query extends PathAndQuery + +object PathAndQuery { + case object Root extends Path + case class /(prev: Path, value: String) extends Path + + case class ===(k: String, v: String) extends Query + case class :&(prev: Query, next: (===)) extends Query + case class +?(path: Path, next: (===)) extends Query +} + +object Main { + def main(args: Array[String]): Unit = { + import PathAndQuery._ + + val path = /(/(Root, "page"), "1") + val q1 = ===("k1", "v1") + val q2 = ===("k2", "v2") + val pq = :&(+?(path, q1), q2) + + (pq: PathAndQuery) match { + case Root / "page" / "1" => println("match 1") + case Root / "page" / "1" +? ("k1" === "v1") => println("match 2") + case Root / "page" / "1" +? ("k1" === "v1") :& ("k2" === "v2") => println("match 3") + } + } +} From 238a621bfadc009d038b94d22de3c468f93e74b2 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Wed, 3 Mar 2021 23:56:27 +1000 Subject: [PATCH 025/769] Support scala STATIC methods in constant pool MethodHandles --- .../scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala b/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala index 172708ef24d3..8554304cb7c3 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala @@ -157,8 +157,11 @@ abstract class BTypesFromSymbols[G <: Global](val global: G) extends BTypes { val rawInternalName = ownerBType.internalName val ownerInternalName = rawInternalName val isInterface = sym.owner.isTraitOrInterface - val tag = if (sym.owner.isJavaDefined && sym.isStaticMember) throw new UnsupportedOperationException() - else if (isConstructor) asm.Opcodes.H_NEWINVOKESPECIAL + val tag = + if (sym.isStaticMember) { + if (sym.owner.isJavaDefined) throw new UnsupportedOperationException("handled by staticHandleFromSymbol") + else asm.Opcodes.H_INVOKESTATIC + } else if (isConstructor) asm.Opcodes.H_NEWINVOKESPECIAL else if (isInterface) asm.Opcodes.H_INVOKEINTERFACE else asm.Opcodes.H_INVOKEVIRTUAL new asm.Handle(tag, ownerInternalName, if (isConstructor) sym.name.toString else sym.name.encoded, descriptor, isInterface) From abc6c20df7483454f493ab4ff8947bcaa6486a97 Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Tue, 23 Feb 2021 17:00:19 +0100 Subject: [PATCH 026/769] Follow JDK 11+ spec for signature polymorphic methods No test case because JDK 11... --- spec/06-expressions.md | 12 +++++--- .../backend/jvm/opt/ByteCodeRepository.scala | 29 ++++++++++++++----- .../nsc/backend/jvm/opt/BytecodeUtils.scala | 2 ++ .../scala/tools/nsc/typechecker/Typers.scala | 7 +++-- 4 files changed, 37 insertions(+), 13 deletions(-) diff --git a/spec/06-expressions.md b/spec/06-expressions.md index 905fa5bf4925..d857a56219f7 100644 --- a/spec/06-expressions.md +++ b/spec/06-expressions.md @@ -400,13 +400,17 @@ The final result of the transformation is a block of the form For invocations of signature polymorphic methods of the target platform `´f´(´e_1 , \ldots , e_m´)`, the invoked method has a different method type `(´p_1´:´T_1 , \ldots , p_n´:´T_n´)´U´` at each call site. The parameter types `´T_ , \ldots , T_n´` are the types of the argument expressions -`´e_1 , \ldots , e_m´` and `´U´` is the expected type at the call site. If the expected type is -undefined then `´U´` is `scala.AnyRef`. The parameter names `´p_1 , \ldots , p_n´` are fresh. +`´e_1 , \ldots , e_m´`. If the declared return type `´R´` of the signature polymorphic method is +any type other than `scala.AnyRef`, then the return type `´U´` is `´R´`. +Otherwise, `´U´` is the expected type at the call site. If the expected type is undefined then +`´U´` is `scala.AnyRef`. The parameter names `´p_1 , \ldots , p_n´` are fresh. ###### Note -On the Java platform version 7 and later, the methods `invoke` and `invokeExact` in class -`java.lang.invoke.MethodHandle` are signature polymorphic. +On the Java platform version 11 and later, signature polymorphic methods are native, +members of `java.lang.invoke.MethodHandle` or `java.lang.invoke.VarHandle`, and have a single +repeated parameter of type `java.lang.Object*`. + ## Method Values diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/ByteCodeRepository.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/ByteCodeRepository.scala index a3d500fb1dfa..2d08d3ea5d8b 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/opt/ByteCodeRepository.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/ByteCodeRepository.scala @@ -19,7 +19,7 @@ import scala.collection.{concurrent, mutable} import scala.jdk.CollectionConverters._ import scala.reflect.internal.util.NoPosition import scala.tools.asm -import scala.tools.asm.Attribute +import scala.tools.asm.{Attribute, Type} import scala.tools.asm.tree._ import scala.tools.nsc.backend.jvm.BTypes.InternalName import scala.tools.nsc.backend.jvm.BackendReporting._ @@ -162,9 +162,21 @@ abstract class ByteCodeRepository extends PerRunInit { def methodNode(ownerInternalNameOrArrayDescriptor: String, name: String, descriptor: String): Either[MethodNotFound, (MethodNode, InternalName)] = { def findMethod(c: ClassNode): Option[MethodNode] = c.methods.asScala.find(m => m.name == name && m.desc == descriptor) - // https://docs.oracle.com/javase/specs/jvms/se8/html/jvms-2.html#jvms-2.9: "In Java SE 8, the only - // signature polymorphic methods are the invoke and invokeExact methods of the class MethodHandle. - def isSignaturePolymorphic(owner: InternalName) = owner == coreBTypes.jliMethodHandleRef.internalName && (name == "invoke" || name == "invokeExact") + // https://docs.oracle.com/javase/specs/jvms/se11/html/jvms-2.html#jvms-2.9.3 + def findSignaturePolymorphic(owner: ClassNode): Option[MethodNode] = { + def hasObjectArrayParam(m: MethodNode) = Type.getArgumentTypes(m.desc) match { + case Array(pt) => pt.getDimensions == 1 && pt.getElementType.getInternalName == coreBTypes.ObjectRef.internalName + case _ => false + } + // Don't try to build a BType for `VarHandle`, it doesn't exist on JDK 8 + if (owner.name == coreBTypes.jliMethodHandleRef.internalName || owner.name == "java/lang/invoke/VarHandle") + owner.methods.asScala.find(m => + m.name == name && + isNativeMethod(m) && + isVarargsMethod(m) && + hasObjectArrayParam(m)) + else None + } // Note: if `owner` is an interface, in the first iteration we search for a matching member in the interface itself. // If that fails, the recursive invocation checks in the superclass (which is Object) with `publicInstanceOnly == true`. @@ -173,9 +185,12 @@ abstract class ByteCodeRepository extends PerRunInit { findMethod(owner) match { case Some(m) if !publicInstanceOnly || (isPublicMethod(m) && !isStaticMethod(m)) => Right(Some((m, owner.name))) case _ => - if (isSignaturePolymorphic(owner.name)) Right(Some((owner.methods.asScala.find(_.name == name).get, owner.name))) - else if (owner.superName == null) Right(None) - else classNode(owner.superName).flatMap(findInSuperClasses(_, publicInstanceOnly = isInterface(owner))) + findSignaturePolymorphic(owner) match { + case Some(m) => Right(Some((m, owner.name))) + case _ => + if (owner.superName == null) Right(None) + else classNode(owner.superName).flatMap(findInSuperClasses(_, publicInstanceOnly = isInterface(owner))) + } } } diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/BytecodeUtils.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/BytecodeUtils.scala index 3da843e45244..5b58d29ecd6b 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/opt/BytecodeUtils.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/BytecodeUtils.scala @@ -126,6 +126,8 @@ object BytecodeUtils { def isNativeMethod(methodNode: MethodNode): Boolean = (methodNode.access & ACC_NATIVE) != 0 + def isVarargsMethod(methodNode: MethodNode): Boolean = (methodNode.access & ACC_VARARGS) != 0 + def isSyntheticMethod(methodNode: MethodNode): Boolean = (methodNode.access & ACC_SYNTHETIC) != 0 // cross-jdk diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index b7bd6589062d..e4c25eb59e73 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -3582,7 +3582,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper case _ if currentRun.runDefinitions.isPolymorphicSignature(fun.symbol) => // Mimic's Java's treatment of polymorphic signatures as described in - // https://docs.oracle.com/javase/specs/jls/se8/html/jls-15.html#jls-15.12.3 + // https://docs.oracle.com/javase/specs/jls/se11/html/jls-15.html#jls-15.12.3 // // One can think of these methods as being infinitely overloaded. We create // a fictitious new cloned method symbol for each call site that takes on a signature @@ -3590,7 +3590,10 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper val args1 = typedArgs(args, forArgMode(fun, mode)) val clone = fun.symbol.cloneSymbol.withoutAnnotations val cloneParams = args1.map(arg => clone.newValueParameter(freshTermName()).setInfo(arg.tpe.deconst)) - val resultType = if (isFullyDefined(pt)) pt else ObjectTpe + val resultType = + if (fun.symbol.tpe.resultType.typeSymbol != ObjectClass) fun.symbol.tpe.resultType + else if (isFullyDefined(pt)) pt + else ObjectTpe clone.modifyInfo(mt => copyMethodType(mt, cloneParams, resultType)) val fun1 = fun.setSymbol(clone).setType(clone.info) doTypedApply(tree, fun1, args1, mode, resultType).setType(resultType) From 02dd73baed4d642d64e451cf74ceffc409133364 Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Tue, 23 Feb 2021 17:00:19 +0100 Subject: [PATCH 027/769] [backport] Follow JDK 11+ spec for signature polymorphic methods No test case because JDK 11... --- spec/06-expressions.md | 10 ++++-- .../backend/jvm/opt/ByteCodeRepository.scala | 31 ++++++++++++++----- .../nsc/backend/jvm/opt/BytecodeUtils.scala | 2 ++ .../scala/tools/nsc/typechecker/Typers.scala | 7 +++-- 4 files changed, 37 insertions(+), 13 deletions(-) diff --git a/spec/06-expressions.md b/spec/06-expressions.md index dd267558a89d..d88c7324f1de 100644 --- a/spec/06-expressions.md +++ b/spec/06-expressions.md @@ -400,13 +400,17 @@ The final result of the transformation is a block of the form For invocations of signature polymorphic methods of the target platform `$f$($e_1 , \ldots , e_m$)`, the invoked method has a different method type `($p_1$:$T_1 , \ldots , p_n$:$T_n$)$U$` at each call site. The parameter types `$T_ , \ldots , T_n$` are the types of the argument expressions -`$e_1 , \ldots , e_m$` and `$U$` is the expected type at the call site. If the expected type is +`$e_1 , \ldots , e_m$`. If the declared return type `$R$` of the signature polymorphic method is +any type other than `scala.AnyRef`, then the return type `$U$` is `$R$`. +Otherwise, `$U$` is the expected type at the call site. If the expected type is undefined then `$U$` is `scala.AnyRef`. The parameter names `$p_1 , \ldots , p_n$` are fresh. ###### Note -On the Java platform version 7 and later, the methods `invoke` and `invokeExact` in class -`java.lang.invoke.MethodHandle` are signature polymorphic. +On the Java platform version 11 and later, signature polymorphic methods are native, +members of `java.lang.invoke.MethodHandle` or `java.lang.invoke.VarHandle`, and have a single +repeated parameter of type `java.lang.Object*`. + ## Method Values diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/ByteCodeRepository.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/ByteCodeRepository.scala index c19148506f81..af6de030a587 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/opt/ByteCodeRepository.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/ByteCodeRepository.scala @@ -19,7 +19,7 @@ import scala.collection.JavaConverters._ import scala.collection.{concurrent, mutable} import scala.reflect.internal.util.NoPosition import scala.tools.asm -import scala.tools.asm.Attribute +import scala.tools.asm.{Attribute, Type} import scala.tools.asm.tree._ import scala.tools.nsc.backend.jvm.BTypes.InternalName import scala.tools.nsc.backend.jvm.BackendReporting._ @@ -161,9 +161,21 @@ abstract class ByteCodeRepository extends PerRunInit { def methodNode(ownerInternalNameOrArrayDescriptor: String, name: String, descriptor: String): Either[MethodNotFound, (MethodNode, InternalName)] = { def findMethod(c: ClassNode): Option[MethodNode] = c.methods.asScala.find(m => m.name == name && m.desc == descriptor) - // https://docs.oracle.com/javase/specs/jvms/se8/html/jvms-2.html#jvms-2.9: "In Java SE 8, the only - // signature polymorphic methods are the invoke and invokeExact methods of the class MethodHandle. - def isSignaturePolymorphic(owner: InternalName) = owner == coreBTypes.jliMethodHandleRef.internalName && (name == "invoke" || name == "invokeExact") + // https://docs.oracle.com/javase/specs/jvms/se11/html/jvms-2.html#jvms-2.9.3 + def findSignaturePolymorphic(owner: ClassNode): Option[MethodNode] = { + def hasObjectArrayParam(m: MethodNode) = Type.getArgumentTypes(m.desc) match { + case Array(pt) => pt.getDimensions == 1 && pt.getElementType.getInternalName == coreBTypes.ObjectRef.internalName + case _ => false + } + // Don't try to build a BType for `VarHandle`, it doesn't exist on JDK 8 + if (owner.name == coreBTypes.jliMethodHandleRef.internalName || owner.name == "java/lang/invoke/VarHandle") + owner.methods.asScala.find(m => + m.name == name && + isNativeMethod(m) && + isVarargsMethod(m) && + hasObjectArrayParam(m)) + else None + } // Note: if `owner` is an interface, in the first iteration we search for a matching member in the interface itself. // If that fails, the recursive invocation checks in the superclass (which is Object) with `publicInstanceOnly == true`. @@ -171,10 +183,13 @@ abstract class ByteCodeRepository extends PerRunInit { def findInSuperClasses(owner: ClassNode, publicInstanceOnly: Boolean = false): Either[ClassNotFound, Option[(MethodNode, InternalName)]] = { findMethod(owner) match { case Some(m) if !publicInstanceOnly || (isPublicMethod(m) && !isStaticMethod(m)) => Right(Some((m, owner.name))) - case None => - if (isSignaturePolymorphic(owner.name)) Right(Some((owner.methods.asScala.find(_.name == name).get, owner.name))) - else if (owner.superName == null) Right(None) - else classNode(owner.superName).flatMap(findInSuperClasses(_, isInterface(owner))) + case _ => + findSignaturePolymorphic(owner) match { + case Some(m) => Right(Some((m, owner.name))) + case _ => + if (owner.superName == null) Right(None) + else classNode(owner.superName).flatMap(findInSuperClasses(_, isInterface(owner))) + } } } diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/BytecodeUtils.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/BytecodeUtils.scala index b99b0e747251..d6fd2d123261 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/opt/BytecodeUtils.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/BytecodeUtils.scala @@ -125,6 +125,8 @@ object BytecodeUtils { def isNativeMethod(methodNode: MethodNode): Boolean = (methodNode.access & ACC_NATIVE) != 0 + def isVarargsMethod(methodNode: MethodNode): Boolean = (methodNode.access & ACC_VARARGS) != 0 + // cross-jdk def hasCallerSensitiveAnnotation(methodNode: MethodNode): Boolean = methodNode.visibleAnnotations != null && diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index 9843d03d12ae..0a88e8e1a56c 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -3526,7 +3526,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper case _ if currentRun.runDefinitions.isPolymorphicSignature(fun.symbol) => // Mimic's Java's treatment of polymorphic signatures as described in - // https://docs.oracle.com/javase/specs/jls/se8/html/jls-15.html#jls-15.12.3 + // https://docs.oracle.com/javase/specs/jls/se11/html/jls-15.html#jls-15.12.3 // // One can think of these methods as being infinitely overloaded. We create // a fictitious new cloned method symbol for each call site that takes on a signature @@ -3534,7 +3534,10 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper val args1 = typedArgs(args, forArgMode(fun, mode)) val clone = fun.symbol.cloneSymbol.withoutAnnotations val cloneParams = args1.map(arg => clone.newValueParameter(freshTermName()).setInfo(arg.tpe.deconst)) - val resultType = if (isFullyDefined(pt)) pt else ObjectTpe + val resultType = + if (fun.symbol.tpe.resultType.typeSymbol != ObjectClass) fun.symbol.tpe.resultType + else if (isFullyDefined(pt)) pt + else ObjectTpe clone.modifyInfo(mt => copyMethodType(mt, cloneParams, resultType)) val fun1 = fun.setSymbol(clone).setType(clone.info) doTypedApply(tree, fun1, args1, mode, resultType).setType(resultType) From 79ca1408c7f713d1ca0ae644d800691c9839b8a1 Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Fri, 5 Mar 2021 13:14:14 +0100 Subject: [PATCH 028/769] Don't discard suspended parser warnings in REPL --- src/compiler/scala/tools/nsc/Reporting.scala | 5 +++++ src/repl/scala/tools/nsc/interpreter/IMain.scala | 3 +++ test/files/neg/t10729.check | 6 +++--- test/files/run/repl-errors.check | 3 +++ test/files/run/repl-errors.scala | 1 + test/files/run/t11402.check | 6 +++--- 6 files changed, 18 insertions(+), 6 deletions(-) diff --git a/src/compiler/scala/tools/nsc/Reporting.scala b/src/compiler/scala/tools/nsc/Reporting.scala index 4125ed978cf7..c69a60f3f8be 100644 --- a/src/compiler/scala/tools/nsc/Reporting.scala +++ b/src/compiler/scala/tools/nsc/Reporting.scala @@ -59,6 +59,11 @@ trait Reporting extends internal.Reporting { self: ast.Positions with Compilatio private val suppressionsComplete: mutable.Set[SourceFile] = mutable.Set.empty private val suspendedMessages: mutable.LinkedHashMap[SourceFile, mutable.LinkedHashSet[Message]] = mutable.LinkedHashMap.empty + // Used in REPL. The old run is used for parsing. Don't discard its suspended warnings. + def initFrom(old: PerRunReporting): Unit = { + suspendedMessages ++= old.suspendedMessages + } + private def isSuppressed(warning: Message): Boolean = suppressions.getOrElse(warning.pos.source, Nil).find(_.matches(warning)) match { case Some(s) => s.markUsed(); true diff --git a/src/repl/scala/tools/nsc/interpreter/IMain.scala b/src/repl/scala/tools/nsc/interpreter/IMain.scala index a38b1075490b..6c2381dbf2aa 100644 --- a/src/repl/scala/tools/nsc/interpreter/IMain.scala +++ b/src/repl/scala/tools/nsc/interpreter/IMain.scala @@ -735,7 +735,10 @@ class IMain(val settings: Settings, parentClassLoaderOverride: Option[ClassLoade compile(new CompilationUnit(new BatchSourceFile(label, packaged(code)))) def compile(unit: CompilationUnit): Boolean = { + val oldRunReporting = currentRun.reporting val run = new Run() + // The unit is already parsed and won't be parsed again. This makes sure suspended warnings are not discarded. + run.reporting.initFrom(oldRunReporting) assert(run.typerPhase != NoPhase, "REPL requires a typer phase.") run.compileUnits(unit :: Nil) diff --git a/test/files/neg/t10729.check b/test/files/neg/t10729.check index 4942ca1bdfc2..a4143cb6b0e6 100644 --- a/test/files/neg/t10729.check +++ b/test/files/neg/t10729.check @@ -10,11 +10,11 @@ SeqAsAnnotation.scala:2: error: trait Seq is abstract; cannot be instantiated Switch.scala:4: error: class switch does not extend scala.annotation.Annotation def test(x: Int) = (x: @switch) match { ^ -TraitAnnotation.scala:6: error: trait TraitAnnotation is abstract; cannot be instantiated - 1: @TraitAnnotation - ^ Switch.scala:1: warning: imported `switch` is permanently hidden by definition of class switch import annotation.switch ^ +TraitAnnotation.scala:6: error: trait TraitAnnotation is abstract; cannot be instantiated + 1: @TraitAnnotation + ^ 1 warning 5 errors diff --git a/test/files/run/repl-errors.check b/test/files/run/repl-errors.check index ab259dd20aab..836a14911295 100644 --- a/test/files/run/repl-errors.check +++ b/test/files/run/repl-errors.check @@ -8,4 +8,7 @@ scala> def foo() { } warning: procedure syntax is deprecated: instead, add `: Unit =` to explicitly declare `foo`'s return type def foo(): Unit +scala> @annotation.nowarn def sshhh() { } +def sshhh(): Unit + scala> :quit diff --git a/test/files/run/repl-errors.scala b/test/files/run/repl-errors.scala index 5fbe994e8afa..cb7f2150465c 100644 --- a/test/files/run/repl-errors.scala +++ b/test/files/run/repl-errors.scala @@ -6,5 +6,6 @@ object Test extends ReplTest { def code = """ '\060' def foo() { } +@annotation.nowarn def sshhh() { } """.trim } diff --git a/test/files/run/t11402.check b/test/files/run/t11402.check index 238c777f5d97..2ccfa8be1b9e 100644 --- a/test/files/run/t11402.check +++ b/test/files/run/t11402.check @@ -1,12 +1,12 @@ scala> def f = { val x = 'abc - val x = 'abc - ^ -On line 2: warning: symbol literal is deprecated; use Symbol("abc") instead val y = x.toString y } + val x = 'abc + ^ +On line 2: warning: symbol literal is deprecated; use Symbol("abc") instead def f: String scala> :quit From fc82328338a6740648024f6df0cce3886436abd2 Mon Sep 17 00:00:00 2001 From: Martijn Hoekstra Date: Fri, 5 Mar 2021 14:08:04 +0100 Subject: [PATCH 029/769] Add note about top-level private templates in the section about modifiers. --- spec/05-classes-and-objects.md | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/spec/05-classes-and-objects.md b/spec/05-classes-and-objects.md index b297712c17ff..5c3a74e608a2 100644 --- a/spec/05-classes-and-objects.md +++ b/spec/05-classes-and-objects.md @@ -502,11 +502,14 @@ definition apply to all constituent definitions. The rules governing the validity and meaning of a modifier are as follows. ### `private` -The `private` modifier can be used with any definition or -declaration in a template. Such members can be accessed only from -within the directly enclosing template and its companion module or +The `private` modifier can be used with any definition or declaration in a +template. Private members of a template can be accessed only from within the +directly enclosing template and its companion module or [companion class](#object-definitions). +The `private` modifier is also valid for +[top-level](09-top-level-definitions.html#packagings) templates. + A `private` modifier can be _qualified_ with an identifier ´C´ (e.g. `private[´C´]`) that must denote a class or package enclosing the definition. Members labeled with such a modifier are accessible respectively only from code From 68cc48675d9799a551cd0e4ce0fe2ce2889f9d0a Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Mon, 8 Mar 2021 08:16:19 -0800 Subject: [PATCH 030/769] Tweak pattern for cntrl chars in REPL --- src/repl/scala/tools/nsc/interpreter/Naming.scala | 2 +- test/files/run/t12276.check | 10 ++++++++++ test/files/run/t12276.scala | 3 ++- 3 files changed, 13 insertions(+), 2 deletions(-) diff --git a/src/repl/scala/tools/nsc/interpreter/Naming.scala b/src/repl/scala/tools/nsc/interpreter/Naming.scala index 344e1f84ee4b..5b6dab253480 100644 --- a/src/repl/scala/tools/nsc/interpreter/Naming.scala +++ b/src/repl/scala/tools/nsc/interpreter/Naming.scala @@ -30,7 +30,7 @@ object Naming { // group 1 is the CSI command letter, where 'm' is color rendition // group 2 is a sequence of chars to be rendered as `?`: anything non-printable and not some space char // additional groups are introduced by linePattern but not used - private lazy val cleaner = raw"$csi|([^\p{Print}\p{Space}]+)|$linePattern".r + private lazy val cleaner = raw"$csi|([\p{Cntrl}&&[^\p{Space}]]+)|$linePattern".r /** Final pass to clean up REPL output. * diff --git a/test/files/run/t12276.check b/test/files/run/t12276.check index 302c6ff9eb63..5de7a7314602 100644 --- a/test/files/run/t12276.check +++ b/test/files/run/t12276.check @@ -82,5 +82,15 @@ 00000000 1b 5b 33 35 6d |.[35m| +00000000 73 63 61 6c 61 3e 20 1b 5b 30 6d 22 5c 75 43 41 |scala> .[0m"\uCA| +00000010 46 45 20 63 61 66 66 c3 a8 22 |FE caff.."| + +00000000 76 61 6c 20 1b 5b 31 6d 1b 5b 33 34 6d 72 65 73 |val .[1m.[34mres| +00000010 36 1b 5b 30 6d 3a 20 1b 5b 31 6d 1b 5b 33 32 6d |6.[0m: .[1m.[32m| +00000020 53 74 72 69 6e 67 1b 5b 30 6d 20 3d 20 ec ab be |String.[0m = ...| +00000030 20 63 61 66 66 c3 a8 | caff..| + +00000000 1b 5b 33 35 6d |.[35m| + 00000000 73 63 61 6c 61 3e 20 1b 5b 30 6d 3a 71 75 69 74 |scala> .[0m:quit| diff --git a/test/files/run/t12276.scala b/test/files/run/t12276.scala index 94425242fb69..50ef6b0edc5e 100644 --- a/test/files/run/t12276.scala +++ b/test/files/run/t12276.scala @@ -3,7 +3,7 @@ import scala.tools.nsc.interpreter.shell.{ILoop, ShellConfig} import scala.tools.partest.{hexdump, ReplTest} object Test extends ReplTest { - def code = """ + def code = s""" |java.nio.CharBuffer.allocate(5) |java.nio.CharBuffer.allocate(6) |class C @@ -12,6 +12,7 @@ object Test extends ReplTest { |classOf[C].toString + esc + "[3z" |classOf[C].toString + esc + "[3!" |classOf[C].toString + scala.io.AnsiColor.YELLOW + |"${"\\"}uCAFE caffè" |""".stripMargin override protected def shellConfig(testSettings: Settings) = From 5a92deadba8877da03ca791c8fec2a241b4274d6 Mon Sep 17 00:00:00 2001 From: Dale Wijnand Date: Mon, 8 Mar 2021 16:23:01 +0000 Subject: [PATCH 031/769] REPL: Wrap annotated expressions in a "resN" result val --- .../scala/tools/nsc/interpreter/IMain.scala | 16 ++++++---- test/files/run/t12292.check | 30 +++++++++++++++++++ test/files/run/t12292.scala | 14 +++++++++ 3 files changed, 54 insertions(+), 6 deletions(-) create mode 100644 test/files/run/t12292.check create mode 100644 test/files/run/t12292.scala diff --git a/src/repl/scala/tools/nsc/interpreter/IMain.scala b/src/repl/scala/tools/nsc/interpreter/IMain.scala index a38b1075490b..8234e858c5c6 100644 --- a/src/repl/scala/tools/nsc/interpreter/IMain.scala +++ b/src/repl/scala/tools/nsc/interpreter/IMain.scala @@ -794,12 +794,16 @@ class IMain(val settings: Settings, parentClassLoaderOverride: Option[ClassLoade } // Wrap last tree in a valdef to give user a nice handle for it (`resN`) - val trees: List[Tree] = - origTrees.init :+ (origTrees.last match { - case tree@(_: Assign) => tree - case tree@(_: RefTree | _: TermTree) => storeInVal(tree) - case tree => tree - }) + val trees: List[Tree] = origTrees.init :+ { + val tree = origTrees.last + @tailrec def loop(scrut: Tree): Tree = scrut match { + case _: Assign => tree + case _: RefTree | _: TermTree => storeInVal(tree) + case Annotated(_, arg) => loop(arg) + case _ => tree + } + loop(tree) + } /** handlers for each tree in this request */ val handlers: List[MemberHandler] = trees map (memberHandlers chooseHandler _) diff --git a/test/files/run/t12292.check b/test/files/run/t12292.check new file mode 100644 index 000000000000..0d3abd20aa61 --- /dev/null +++ b/test/files/run/t12292.check @@ -0,0 +1,30 @@ + +scala> import scala.annotation.nowarn +import scala.annotation.nowarn + +scala> scala.#::.unapply(Stream(1)) + ^ + warning: method unapply in object #:: is deprecated (since 2.13.0): Prefer LazyList instead + ^ + warning: value Stream in package scala is deprecated (since 2.13.0): Use LazyList instead of Stream +val res0: Option[(Int, Stream[Int])] = Some((1,Stream())) + +scala> scala.#::.unapply(Stream(1)): @nowarn +val res1: Option[(Int, Stream[Int])] @scala.annotation.nowarn = Some((1,Stream())) + +scala> (scala.#::.unapply(Stream(1)): @nowarn) +val res2: Option[(Int, Stream[Int])] @scala.annotation.nowarn = Some((1,Stream())) + +scala> scala.#::.unapply(Stream(1)): @inline + ^ + warning: method unapply in object #:: is deprecated (since 2.13.0): Prefer LazyList instead + ^ + warning: value Stream in package scala is deprecated (since 2.13.0): Use LazyList instead of Stream + ^ + warning: type Stream in package scala is deprecated (since 2.13.0): Use LazyList instead of Stream +val res3: Option[(Int, Stream[Int])] @inline = Some((1,Stream())) + +scala> (scala.#::.unapply(Stream(1)): @nowarn).isEmpty +val res4: Boolean = false + +scala> :quit diff --git a/test/files/run/t12292.scala b/test/files/run/t12292.scala new file mode 100644 index 000000000000..83aa11272931 --- /dev/null +++ b/test/files/run/t12292.scala @@ -0,0 +1,14 @@ +import scala.tools.partest.ReplTest + +object Test extends ReplTest { + override def extraSettings = "-deprecation" + + def code = """ +import scala.annotation.nowarn +scala.#::.unapply(Stream(1)) +scala.#::.unapply(Stream(1)): @nowarn +(scala.#::.unapply(Stream(1)): @nowarn) +scala.#::.unapply(Stream(1)): @inline +(scala.#::.unapply(Stream(1)): @nowarn).isEmpty +""" +} From 9d876006a9770fa7e0ae41a867e2421037c43954 Mon Sep 17 00:00:00 2001 From: Dale Wijnand Date: Fri, 5 Mar 2021 16:36:39 +0000 Subject: [PATCH 032/769] Make restarr <-> reload switching faster Define the `ThisBuild / target` key and rewire the build to it so the `restarr` command can redefine it and `ThisBuild / buildDirectory`, which is also used. This allows switching between both versions much, much faster. When you make any source change you'll need to do the more time-consuming `restarrFull` but the point is that switching back to the previous starr (via `reload`) will be fast because the build products haven't been wiped out in the process. There's a small amount of projects that use "target" as their project base, which is a harder value to change (requires a `reload` during `restarr` which wipes out the session settings, so not 100% clear how to even do it). Looking at them I think it might be fine and a small smoke test seems to show it is fine. --- .gitignore | 4 ++++ README.md | 8 ++++++-- build.sbt | 7 ++----- project/BuildSettings.scala | 11 ++++++++++- project/ScriptCommands.scala | 23 ++++++++++++++++------- 5 files changed, 38 insertions(+), 15 deletions(-) diff --git a/.gitignore b/.gitignore index b49d07b1e72f..082750115d52 100644 --- a/.gitignore +++ b/.gitignore @@ -57,6 +57,10 @@ local.sbt jitwatch.out +# Used by the restarr/restarrFull commands as target directories +/build-restarr/ +/target-restarr/ + # metals .metals .bloop diff --git a/README.md b/README.md index 63f3edafc6bd..1fefc3f11305 100644 --- a/README.md +++ b/README.md @@ -152,8 +152,12 @@ distribution to your local artifact repository and then switch sbt to use that version as its new `scalaVersion`. You may then revert back with `reload`. Note `restarrFull` will also write the STARR version to `buildcharacter.properties` so you can switch back to it with -`restarr` without republishing (though incremental compilation will -recompile from scratch, sadly.) +`restarr` without republishing. This will switch the sbt session to +use the `build-restarr` and `target-restarr` directories instead of +`build` and `target`, which avoids wiping out classfiles and +incremental metadata. IntelliJ will continue to be configured to +compile and run tests using the starr version in +`versions.properties`. For history on how the current scheme was arrived at, see https://groups.google.com/d/topic/scala-internals/gp5JsM1E0Fo/discussion. diff --git a/build.sbt b/build.sbt index a02c8ab62674..7317975c4a7c 100644 --- a/build.sbt +++ b/build.sbt @@ -141,7 +141,7 @@ lazy val commonSettings = instanceSettings ++ clearSourceAndResourceDirectories Global / excludeLintKeys ++= Set(scalaSource), // each subproject has to ask specifically for files they want to include Compile / unmanagedResources / includeFilter := NothingFilter, - target := (ThisBuild / baseDirectory).value / "target" / thisProject.value.id, + target := (ThisBuild / target).value / thisProject.value.id, Compile / classDirectory := buildDirectory.value / "quick/classes" / thisProject.value.id, Compile / doc / target := buildDirectory.value / "scaladoc" / thisProject.value.id, // given that classDirectory and doc target are overridden to be _outside_ of target directory, we have @@ -1091,7 +1091,7 @@ lazy val dist = (project in file("dist")) (ThisBuild / buildDirectory).value / "quick" }.dependsOn((distDependencies.map(_ / Runtime / products) :+ mkBin): _*).value, mkPack := Def.task { (ThisBuild / buildDirectory).value / "pack" }.dependsOn(Compile / packageBin / packagedArtifact, mkBin).value, - target := (ThisBuild / baseDirectory).value / "target" / thisProject.value.id, + target := (ThisBuild / target).value / thisProject.value.id, Compile / packageBin := { val targetDir = (ThisBuild / buildDirectory).value / "pack" / "lib" val jlineJAR = findJar((Compile / dependencyClasspath).value, jlineDep).get.data @@ -1130,7 +1130,6 @@ def configureAsSubproject(project: Project, srcdir: Option[String] = None): Proj .settings(generatePropertiesFileSettings) } -lazy val buildDirectory = settingKey[File]("The directory where all build products go. By default ./build") lazy val mkBin = taskKey[Seq[File]]("Generate shell script (bash or Windows batch).") lazy val mkQuick = taskKey[File]("Generate a full build, including scripts, in build/quick") lazy val mkPack = taskKey[File]("Generate a full build, including scripts, in build/pack") @@ -1198,8 +1197,6 @@ def generateServiceProviderResources(services: (String, String)*): Setting[_] = } }.taskValue -ThisBuild / buildDirectory := (ThisBuild / baseDirectory).value / "build" - // Add tab completion to partest commands += Command("partest")(_ => PartestUtil.partestParser((ThisBuild / baseDirectory).value, (ThisBuild / baseDirectory).value / "test")) { (state, parsed) => ("test/it:testOnly -- " + parsed) :: state diff --git a/project/BuildSettings.scala b/project/BuildSettings.scala index 3cec68215323..5d4418a6fe0a 100644 --- a/project/BuildSettings.scala +++ b/project/BuildSettings.scala @@ -1,11 +1,20 @@ package scala.build -import sbt._ +import sbt._, Keys._ /** This object defines keys that should be visible with an unqualified name in all .sbt files and the command line */ object BuildSettings extends AutoPlugin { + override def trigger = allRequirements + object autoImport { lazy val baseVersion = settingKey[String]("The base version number from which all others are derived") lazy val baseVersionSuffix = settingKey[String]("Identifies the kind of version to build") + lazy val buildDirectory = settingKey[File]("The directory where all build products go. By default ./build") } + import autoImport._ + + override def buildSettings = Def.settings( + ThisBuild / target := (ThisBuild / baseDirectory).value / "target", + ThisBuild / buildDirectory := (ThisBuild / baseDirectory).value / "build", + ) } diff --git a/project/ScriptCommands.scala b/project/ScriptCommands.scala index 156a40dbd72a..9ee4beafe60c 100644 --- a/project/ScriptCommands.scala +++ b/project/ScriptCommands.scala @@ -4,6 +4,7 @@ import java.nio.file.Paths import sbt._ import Keys._ +import sbt.complete.Parser._ import sbt.complete.Parsers._ import BuildSettings.autoImport._ @@ -115,12 +116,17 @@ object ScriptCommands { /** For local dev: sets `scalaVersion` to the version in `/buildcharacter.properties` or the given arg. * Running `reload` will re-read the build files, resetting `scalaVersion`. */ - def restarr = Command("restarr")(_ => (Space ~> StringBasic).?) { (state, s) => - val newVersion = s.getOrElse(readVersionFromPropsFile(state)) - val x = Project.extract(state) - val sv = x.get(Global / scalaVersion) - state.log.info(s"Re-STARR'ing: setting scalaVersion from $sv to $newVersion (`reload` to undo)") - x.appendWithSession(Global / scalaVersion := newVersion, state) // don't use version.value or it'll be a wrong, new value + def restarr = Command("restarr")(_ => (Space ~> token(StringBasic, "scalaVersion")).?) { (state, argSv) => + val x = Project.extract(state) + val oldSv = x.get(Global / scalaVersion) + val newSv = argSv.getOrElse(readVersionFromPropsFile(state)) + state.log.info(s"Re-STARR'ing: setting scalaVersion from $oldSv to $newSv (`reload` to undo; IntelliJ still uses $oldSv)") + val settings = Def.settings( + Global / scalaVersion := newSv, // don't use version.value or it'll be a wrong, new value + ThisBuild / target := (ThisBuild / baseDirectory).value / "target-restarr", + ThisBuild / buildDirectory := (ThisBuild / baseDirectory).value / "build-restarr", + ) + x.appendWithSession(settings, state) } /** For local dev: publishes locally (without optimizing) & then sets the new `scalaVersion`. @@ -134,7 +140,10 @@ object ScriptCommands { } private def readVersionFromPropsFile(state: State): String = { - val props = readProps(file("buildcharacter.properties")) + val propsFile = file("buildcharacter.properties") + if (!propsFile.exists()) + throw new MessageOnlyException("No buildcharacter.properties found - try restarrFull") + val props = readProps(propsFile) val newVersion = props("maven.version.number") val fullVersion = props("version.number") state.log.info(s"Read STARR version from buildcharacter.properties: $newVersion (full version: $fullVersion)") From 4013395ffd256da6f524273faa0e8b6dcc00e870 Mon Sep 17 00:00:00 2001 From: Dale Wijnand Date: Wed, 10 Mar 2021 10:29:01 +0000 Subject: [PATCH 033/769] Try to fix the flaky patmat test --- src/compiler/scala/tools/nsc/transform/patmat/Logic.scala | 4 ++-- .../scala/tools/nsc/transform/patmat/MatchAnalysis.scala | 2 +- src/compiler/scala/tools/nsc/transform/patmat/Solving.scala | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala b/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala index 182f0639aeb3..a06f648680ce 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala @@ -287,7 +287,7 @@ trait Logic extends Debugging { def simplifyAnd(ps: Set[Prop]): Prop = { // recurse for nested And (pulls all Ands up) // build up Set in order to remove duplicates - val props = mutable.HashSet.empty[Prop] + val props = mutable.LinkedHashSet.empty[Prop] for (prop <- ps) { simplifyProp(prop) match { case True => // ignore `True` @@ -303,7 +303,7 @@ trait Logic extends Debugging { def simplifyOr(ps: Set[Prop]): Prop = { // recurse for nested Or (pulls all Ors up) // build up Set in order to remove duplicates - val props = mutable.HashSet.empty[Prop] + val props = mutable.LinkedHashSet.empty[Prop] for (prop <- ps) { simplifyProp(prop) match { case False => // ignore `False` diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala index dba50d2ef944..3730a5668bcb 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala @@ -814,7 +814,7 @@ trait MatchAnalysis extends MatchApproximation { // node in the tree that describes how to construct a counter-example case class VariableAssignment(variable: Var, equalTo: List[Const], notEqualTo: List[Const]) { - private val fields: mutable.Map[Symbol, VariableAssignment] = mutable.HashMap.empty + private val fields: mutable.LinkedHashMap[Symbol, VariableAssignment] = mutable.LinkedHashMap.empty // need to prune since the model now incorporates all super types of a constant (needed for reachability) private lazy val uniqueEqualTo = equalTo filterNot (subsumed => equalTo.exists(better => (better ne subsumed) && instanceOfTpImplies(better.tp, subsumed.tp))) private lazy val inSameDomain = uniqueEqualTo forall (const => variable.domainSyms.exists(_.exists(_.const.tp =:= const.tp))) diff --git a/src/compiler/scala/tools/nsc/transform/patmat/Solving.scala b/src/compiler/scala/tools/nsc/transform/patmat/Solving.scala index b7049821f0c5..4146db459b4e 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/Solving.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/Solving.scala @@ -423,7 +423,7 @@ trait Solving extends Logic { val newModel: Model = if (model eq NoTseitinModel) NoModel else { model.iterator.collect { case lit if symForVar.isDefinedAt(lit.variable) => (symForVar(lit.variable), lit.positive) - }.toMap + }.to(scala.collection.immutable.ListMap) } Solution(newModel, unassigned.map(symForVar)) } From 206af8a5254c137c0ad31536e2848b3955bb80c6 Mon Sep 17 00:00:00 2001 From: Hatano Yuusuke <56590357+kynthus@users.noreply.github.com> Date: Fri, 26 Feb 2021 09:57:22 +0900 Subject: [PATCH 034/769] Test case for override access check when overridign Java methods --- test/files/neg/t12349.check | 131 ++++++++++++++++++++++++++++ test/files/neg/t12349/t12349a.java | 45 ++++++++++ test/files/neg/t12349/t12349b.scala | 47 ++++++++++ test/files/neg/t12349/t12349c.scala | 53 +++++++++++ 4 files changed, 276 insertions(+) create mode 100644 test/files/neg/t12349.check create mode 100644 test/files/neg/t12349/t12349a.java create mode 100644 test/files/neg/t12349/t12349b.scala create mode 100644 test/files/neg/t12349/t12349c.scala diff --git a/test/files/neg/t12349.check b/test/files/neg/t12349.check new file mode 100644 index 000000000000..ed582c0954f5 --- /dev/null +++ b/test/files/neg/t12349.check @@ -0,0 +1,131 @@ +t12349b.scala:8: error: weaker access privileges in overriding +def a3(): Unit (defined in class t12349a) + override should not be private + private override def a3(): Unit = println("Inner12349b#a3()") // weaker access privileges + ^ +t12349b.scala:18: error: weaker access privileges in overriding +protected[package t12349] def b3(): Unit (defined in class t12349a) + override should not be private + private override def b3(): Unit = println("Inner12349b#b3()") // weaker access privileges + ^ +t12349b.scala:20: error: weaker access privileges in overriding +protected[package t12349] def b5(): Unit (defined in class t12349a) + override should at least be protected[t12349] + private[t12349b] override def b5(): Unit = println("Inner12349b#b5()") // weaker access privileges + ^ +t12349b.scala:22: error: weaker access privileges in overriding +protected[package t12349] def b7(): Unit (defined in class t12349a) + override should at least be protected[t12349] + private[t12349] override def b7(): Unit = println("Inner12349b#b7()") // weaker access privileges + ^ +t12349b.scala:28: error: weaker access privileges in overriding +private[package t12349] def c3(): Unit (defined in class t12349a) + override should not be private + private override def c3(): Unit = println("Inner12349b#c3()") // weaker access privileges + ^ +t12349b.scala:36: error: method d1 overrides nothing + override def d1(): Unit = println("Inner12349b#d1()") // overrides nothing + ^ +t12349b.scala:37: error: method d2 overrides nothing + protected override def d2(): Unit = println("Inner12349b#d2()") // overrides nothing + ^ +t12349b.scala:38: error: method d3 overrides nothing + private override def d3(): Unit = println("Inner12349b#d3()") // overrides nothing + ^ +t12349b.scala:39: error: method d4 overrides nothing + protected[t12349b] override def d4(): Unit = println("Inner12349b#d4()") // overrides nothing + ^ +t12349b.scala:40: error: method d5 overrides nothing + private[t12349b] override def d5(): Unit = println("Inner12349b#d5()") // overrides nothing + ^ +t12349b.scala:41: error: method d6 overrides nothing + protected[t12349] override def d6(): Unit = println("Inner12349b#d6()") // overrides nothing + ^ +t12349b.scala:42: error: method d7 overrides nothing + private[t12349] override def d7(): Unit = println("Inner12349b#d7()") // overrides nothing + ^ +t12349b.scala:43: error: method d8 overrides nothing + protected[this] override def d8(): Unit = println("Inner12349b#d8()") // overrides nothing + ^ +t12349b.scala:44: error: method d9 overrides nothing + private[this] override def d9(): Unit = println("Inner12349b#d9()") // overrides nothing + ^ +t12349c.scala:12: error: weaker access privileges in overriding +def a3(): Unit (defined in class t12349a) + override should not be private + private override def a3(): Unit = println("Inner12349c#a3()") // weaker access privileges + ^ +t12349c.scala:22: error: weaker access privileges in overriding +protected[package t12349] def b3(): Unit (defined in class t12349a) + override should not be private + private override def b3(): Unit = println("Inner12349c#b3()") // weaker access privileges + ^ +t12349c.scala:24: error: weaker access privileges in overriding +protected[package t12349] def b5(): Unit (defined in class t12349a) + override should at least be protected[t12349] + private[t12349c] override def b5(): Unit = println("Inner12349c#b5()") // weaker access privileges + ^ +t12349c.scala:26: error: weaker access privileges in overriding +protected[package t12349] def b7(): Unit (defined in class t12349a) + override should at least be protected[t12349] + private[pkg] override def b7(): Unit = println("Inner12349c#b7()") // weaker access privileges + ^ +t12349c.scala:32: error: weaker access privileges in overriding +private[package t12349] def c3(): Unit (defined in class t12349a) + override should not be private + private override def c3(): Unit = println("Inner12349c#c3()") // weaker access privileges + ^ +t12349c.scala:30: error: method c1 overrides nothing + override def c1(): Unit = println("Inner12349c#c1()") // overrides nothing (invisible) + ^ +t12349c.scala:31: error: method c2 overrides nothing + protected override def c2(): Unit = println("Inner12349c#c2()") // [#12349] + ^ +t12349c.scala:33: error: method c4 overrides nothing + protected[t12349c] override def c4(): Unit = println("Inner12349c#c4()") // [#12349] + ^ +t12349c.scala:34: error: method c5 overrides nothing + private[t12349c] override def c5(): Unit = println("Inner12349c#c5()") // [#12349] + ^ +t12349c.scala:35: error: method c6 overrides nothing + protected[pkg] override def c6(): Unit = println("Inner12349c#c6()") // [#12349] + ^ +t12349c.scala:36: error: method c7 overrides nothing + private[pkg] override def c7(): Unit = println("Inner12349c#c7()") // [#12349] + ^ +t12349c.scala:37: error: method c8 overrides nothing + protected[this] override def c8(): Unit = println("Inner12349c#c8()") // [#12349] + ^ +t12349c.scala:38: error: method c9 overrides nothing. +Note: the super classes of class Inner12349c contain the following, non final members named c9: +private[package t12349] def c9(): Unit + private[this] override def c9(): Unit = println("Inner12349c#c9()") // overrides nothing (invisible) + ^ +t12349c.scala:40: error: method d1 overrides nothing + override def d1(): Unit = println("Inner12349c#d1()") // overrides nothing + ^ +t12349c.scala:41: error: method d2 overrides nothing + protected override def d2(): Unit = println("Inner12349c#d2()") // overrides nothing + ^ +t12349c.scala:42: error: method d3 overrides nothing + private override def d3(): Unit = println("Inner12349c#d3()") // overrides nothing + ^ +t12349c.scala:43: error: method d4 overrides nothing + protected[t12349c] override def d4(): Unit = println("Inner12349c#d4()") // overrides nothing + ^ +t12349c.scala:44: error: method d5 overrides nothing + private[t12349c] override def d5(): Unit = println("Inner12349c#d5()") // overrides nothing + ^ +t12349c.scala:45: error: method d6 overrides nothing + protected[pkg] override def d6(): Unit = println("Inner12349c#d6()") // overrides nothing + ^ +t12349c.scala:46: error: method d7 overrides nothing + private[pkg] override def d7(): Unit = println("Inner12349c#d7()") // overrides nothing + ^ +t12349c.scala:47: error: method d8 overrides nothing + protected[this] override def d8(): Unit = println("Inner12349c#d8()") // overrides nothing + ^ +t12349c.scala:48: error: method d9 overrides nothing + private[this] override def d9(): Unit = println("Inner12349c#d9()") // overrides nothing + ^ +36 errors diff --git a/test/files/neg/t12349/t12349a.java b/test/files/neg/t12349/t12349a.java new file mode 100644 index 000000000000..db9de0b0a539 --- /dev/null +++ b/test/files/neg/t12349/t12349a.java @@ -0,0 +1,45 @@ +package t12349; + +public class t12349a { + + public void a1() { System.out.println("t12349a#a1()"); } + public void a2() { System.out.println("t12349a#a2()"); } + public void a3() { System.out.println("t12349a#a3()"); } + public void a4() { System.out.println("t12349a#a4()"); } + public void a5() { System.out.println("t12349a#a5()"); } + public void a6() { System.out.println("t12349a#a6()"); } + public void a7() { System.out.println("t12349a#a7()"); } + public void a8() { System.out.println("t12349a#a8()"); } + public void a9() { System.out.println("t12349a#a9()"); } + + protected void b1() { System.out.println("t12349a#b1()"); } + protected void b2() { System.out.println("t12349a#b2()"); } + protected void b3() { System.out.println("t12349a#b3()"); } + protected void b4() { System.out.println("t12349a#b4()"); } + protected void b5() { System.out.println("t12349a#b5()"); } + protected void b6() { System.out.println("t12349a#b6()"); } + protected void b7() { System.out.println("t12349a#b7()"); } + protected void b8() { System.out.println("t12349a#b8()"); } + protected void b9() { System.out.println("t12349a#b9()"); } + + void c1() { System.out.println("t12349a#c1()"); } + void c2() { System.out.println("t12349a#c2()"); } + void c3() { System.out.println("t12349a#c3()"); } + void c4() { System.out.println("t12349a#c4()"); } + void c5() { System.out.println("t12349a#c5()"); } + void c6() { System.out.println("t12349a#c6()"); } + void c7() { System.out.println("t12349a#c7()"); } + void c8() { System.out.println("t12349a#c8()"); } + void c9() { System.out.println("t12349a#c9()"); } + + private void d1() { System.out.println("t12349a#d1()"); } + private void d2() { System.out.println("t12349a#d2()"); } + private void d3() { System.out.println("t12349a#d3()"); } + private void d4() { System.out.println("t12349a#d4()"); } + private void d5() { System.out.println("t12349a#d5()"); } + private void d6() { System.out.println("t12349a#d6()"); } + private void d7() { System.out.println("t12349a#d7()"); } + private void d8() { System.out.println("t12349a#d8()"); } + private void d9() { System.out.println("t12349a#d9()"); } + +} diff --git a/test/files/neg/t12349/t12349b.scala b/test/files/neg/t12349/t12349b.scala new file mode 100644 index 000000000000..687b67c7e039 --- /dev/null +++ b/test/files/neg/t12349/t12349b.scala @@ -0,0 +1,47 @@ +package t12349 + +object t12349b { + + class Inner12349b extends t12349a { + override def a1(): Unit = println("Inner12349b#a1()") + protected override def a2(): Unit = println("Inner12349b#a2()") // [#12349] + private override def a3(): Unit = println("Inner12349b#a3()") // weaker access privileges + protected[t12349b] override def a4(): Unit = println("Inner12349b#a4()") // [#12349] + private[t12349b] override def a5(): Unit = println("Inner12349b#a5()") // [#12349] + protected[t12349] override def a6(): Unit = println("Inner12349b#a6()") // [#12349] + private[t12349] override def a7(): Unit = println("Inner12349b#a7()") // [#12349] + protected[this] override def a8(): Unit = println("Inner12349b#a8()") // [#12349] + private[this] override def a9(): Unit = println("Inner12349b#a9()") // [#9334] + + override def b1(): Unit = println("Inner12349b#b1()") + protected override def b2(): Unit = println("Inner12349b#b2()") + private override def b3(): Unit = println("Inner12349b#b3()") // weaker access privileges + protected[t12349b] override def b4(): Unit = println("Inner12349b#b4()") + private[t12349b] override def b5(): Unit = println("Inner12349b#b5()") // weaker access privileges + protected[t12349] override def b6(): Unit = println("Inner12349b#b6()") + private[t12349] override def b7(): Unit = println("Inner12349b#b7()") // weaker access privileges + protected[this] override def b8(): Unit = println("Inner12349b#b8()") // [#12349] - not fixed by PR #9525 + private[this] override def b9(): Unit = println("Inner12349b#b9()") // [#9334] + + override def c1(): Unit = println("Inner12349b#c1()") + protected override def c2(): Unit = println("Inner12349b#c2()") // [#12349] + private override def c3(): Unit = println("Inner12349b#c3()") // weaker access privileges + protected[t12349b] override def c4(): Unit = println("Inner12349b#c4()") // [#12349] + private[t12349b] override def c5(): Unit = println("Inner12349b#c5()") // [#12349] + protected[t12349] override def c6(): Unit = println("Inner12349b#c6()") + private[t12349] override def c7(): Unit = println("Inner12349b#c7()") + protected[this] override def c8(): Unit = println("Inner12349b#c8()") // [#12349] + private[this] override def c9(): Unit = println("Inner12349b#c9()") // [#9334] + + override def d1(): Unit = println("Inner12349b#d1()") // overrides nothing + protected override def d2(): Unit = println("Inner12349b#d2()") // overrides nothing + private override def d3(): Unit = println("Inner12349b#d3()") // overrides nothing + protected[t12349b] override def d4(): Unit = println("Inner12349b#d4()") // overrides nothing + private[t12349b] override def d5(): Unit = println("Inner12349b#d5()") // overrides nothing + protected[t12349] override def d6(): Unit = println("Inner12349b#d6()") // overrides nothing + private[t12349] override def d7(): Unit = println("Inner12349b#d7()") // overrides nothing + protected[this] override def d8(): Unit = println("Inner12349b#d8()") // overrides nothing + private[this] override def d9(): Unit = println("Inner12349b#d9()") // overrides nothing + } + +} diff --git a/test/files/neg/t12349/t12349c.scala b/test/files/neg/t12349/t12349c.scala new file mode 100644 index 000000000000..d7bbeaed5f09 --- /dev/null +++ b/test/files/neg/t12349/t12349c.scala @@ -0,0 +1,53 @@ +package t12349 + +import t12349.t12349a + +package pkg { + + object t12349c { + + class Inner12349c extends t12349a { + override def a1(): Unit = println("Inner12349c#a1()") + protected override def a2(): Unit = println("Inner12349c#a2()") // [#12349] + private override def a3(): Unit = println("Inner12349c#a3()") // weaker access privileges + protected[t12349c] override def a4(): Unit = println("Inner12349c#a4()") // [#12349] + private[t12349c] override def a5(): Unit = println("Inner12349c#a5()") // [#12349] + protected[pkg] override def a6(): Unit = println("Inner12349c#a6()") // [#12349] + private[pkg] override def a7(): Unit = println("Inner12349c#a7()") // [#12349] + protected[this] override def a8(): Unit = println("Inner12349c#a8()") // [#12349] + private[this] override def a9(): Unit = println("Inner12349c#a9()") // [#9334] + + override def b1(): Unit = println("Inner12349c#b1()") + protected override def b2(): Unit = println("Inner12349c#b2()") + private override def b3(): Unit = println("Inner12349c#b3()") // weaker access privileges + protected[t12349c] override def b4(): Unit = println("Inner12349c#b4()") + private[t12349c] override def b5(): Unit = println("Inner12349c#b5()") // weaker access privileges + protected[pkg] override def b6(): Unit = println("Inner12349c#b6()") + private[pkg] override def b7(): Unit = println("Inner12349c#b7()") // weaker access privileges + protected[this] override def b8(): Unit = println("Inner12349c#b8()") // [#12349] - not fixed by PR #9525 + private[this] override def b9(): Unit = println("Inner12349c#b9()") // [#9334] + + override def c1(): Unit = println("Inner12349c#c1()") // overrides nothing (invisible) + protected override def c2(): Unit = println("Inner12349c#c2()") // [#12349] + private override def c3(): Unit = println("Inner12349c#c3()") // weaker access privileges + protected[t12349c] override def c4(): Unit = println("Inner12349c#c4()") // [#12349] + private[t12349c] override def c5(): Unit = println("Inner12349c#c5()") // [#12349] + protected[pkg] override def c6(): Unit = println("Inner12349c#c6()") // [#12349] + private[pkg] override def c7(): Unit = println("Inner12349c#c7()") // [#12349] + protected[this] override def c8(): Unit = println("Inner12349c#c8()") // [#12349] + private[this] override def c9(): Unit = println("Inner12349c#c9()") // overrides nothing (invisible) + + override def d1(): Unit = println("Inner12349c#d1()") // overrides nothing + protected override def d2(): Unit = println("Inner12349c#d2()") // overrides nothing + private override def d3(): Unit = println("Inner12349c#d3()") // overrides nothing + protected[t12349c] override def d4(): Unit = println("Inner12349c#d4()") // overrides nothing + private[t12349c] override def d5(): Unit = println("Inner12349c#d5()") // overrides nothing + protected[pkg] override def d6(): Unit = println("Inner12349c#d6()") // overrides nothing + private[pkg] override def d7(): Unit = println("Inner12349c#d7()") // overrides nothing + protected[this] override def d8(): Unit = println("Inner12349c#d8()") // overrides nothing + private[this] override def d9(): Unit = println("Inner12349c#d9()") // overrides nothing + } + + } + +} From c6d126a0206f238570bdc0b3f0b1028da78e349a Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Thu, 4 Mar 2021 16:59:39 +0100 Subject: [PATCH 035/769] Fix override access checks for overriding Java methods --- .../tools/nsc/typechecker/RefChecks.scala | 2 +- test/files/neg/t12349.check | 124 +++++++++++++++--- test/files/neg/t12349/t12349b.scala | 20 +-- test/files/neg/t12349/t12349c.scala | 24 ++-- 4 files changed, 131 insertions(+), 39 deletions(-) diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala index 1eaca48723a8..3b0ad5ad7084 100644 --- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala +++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala @@ -383,7 +383,7 @@ abstract class RefChecks extends Transform { def isOverrideAccessOK = member.isPublic || { // member is public, definitely same or relaxed access (!other.isProtected || member.isProtected) && // if o is protected, so is m ((!isRootOrNone(ob) && ob.hasTransOwner(mb)) || // m relaxes o's access boundary - other.isJavaDefined) // overriding a protected java member, see #3946 + (other.isJavaDefined && other.isProtected)) // overriding a protected java member, see #3946 #12349 } if (!isOverrideAccessOK) { overrideAccessError() diff --git a/test/files/neg/t12349.check b/test/files/neg/t12349.check index ed582c0954f5..2c7426ad6a95 100644 --- a/test/files/neg/t12349.check +++ b/test/files/neg/t12349.check @@ -1,8 +1,38 @@ +t12349b.scala:7: error: weaker access privileges in overriding +def a2(): Unit (defined in class t12349a) + override should be public + protected override def a2(): Unit = println("Inner12349b#a2()") // weaker access privileges + ^ t12349b.scala:8: error: weaker access privileges in overriding def a3(): Unit (defined in class t12349a) override should not be private private override def a3(): Unit = println("Inner12349b#a3()") // weaker access privileges ^ +t12349b.scala:9: error: weaker access privileges in overriding +def a4(): Unit (defined in class t12349a) + override should be public + protected[t12349b] override def a4(): Unit = println("Inner12349b#a4()") // weaker access privileges + ^ +t12349b.scala:10: error: weaker access privileges in overriding +def a5(): Unit (defined in class t12349a) + override should be public + private[t12349b] override def a5(): Unit = println("Inner12349b#a5()") // weaker access privileges + ^ +t12349b.scala:11: error: weaker access privileges in overriding +def a6(): Unit (defined in class t12349a) + override should be public + protected[t12349] override def a6(): Unit = println("Inner12349b#a6()") // weaker access privileges + ^ +t12349b.scala:12: error: weaker access privileges in overriding +def a7(): Unit (defined in class t12349a) + override should be public + private[t12349] override def a7(): Unit = println("Inner12349b#a7()") // weaker access privileges + ^ +t12349b.scala:13: error: weaker access privileges in overriding +def a8(): Unit (defined in class t12349a) + override should be public + protected[this] override def a8(): Unit = println("Inner12349b#a8()") // weaker access privileges + ^ t12349b.scala:18: error: weaker access privileges in overriding protected[package t12349] def b3(): Unit (defined in class t12349a) override should not be private @@ -18,11 +48,31 @@ protected[package t12349] def b7(): Unit (defined in class t12349a) override should at least be protected[t12349] private[t12349] override def b7(): Unit = println("Inner12349b#b7()") // weaker access privileges ^ +t12349b.scala:27: error: weaker access privileges in overriding +private[package t12349] def c2(): Unit (defined in class t12349a) + override should at least be private[t12349] + protected override def c2(): Unit = println("Inner12349b#c2()") // weaker access privileges + ^ t12349b.scala:28: error: weaker access privileges in overriding private[package t12349] def c3(): Unit (defined in class t12349a) override should not be private private override def c3(): Unit = println("Inner12349b#c3()") // weaker access privileges ^ +t12349b.scala:29: error: weaker access privileges in overriding +private[package t12349] def c4(): Unit (defined in class t12349a) + override should at least be private[t12349] + protected[t12349b] override def c4(): Unit = println("Inner12349b#c4()") // weaker access privileges + ^ +t12349b.scala:30: error: weaker access privileges in overriding +private[package t12349] def c5(): Unit (defined in class t12349a) + override should at least be private[t12349] + private[t12349b] override def c5(): Unit = println("Inner12349b#c5()") // weaker access privileges + ^ +t12349b.scala:33: error: weaker access privileges in overriding +private[package t12349] def c8(): Unit (defined in class t12349a) + override should at least be private[t12349] + protected[this] override def c8(): Unit = println("Inner12349b#c8()") // weaker access privileges + ^ t12349b.scala:36: error: method d1 overrides nothing override def d1(): Unit = println("Inner12349b#d1()") // overrides nothing ^ @@ -50,11 +100,41 @@ t12349b.scala:43: error: method d8 overrides nothing t12349b.scala:44: error: method d9 overrides nothing private[this] override def d9(): Unit = println("Inner12349b#d9()") // overrides nothing ^ +t12349c.scala:11: error: weaker access privileges in overriding +def a2(): Unit (defined in class t12349a) + override should be public + protected override def a2(): Unit = println("Inner12349c#a2()") // weaker access privileges + ^ t12349c.scala:12: error: weaker access privileges in overriding def a3(): Unit (defined in class t12349a) override should not be private private override def a3(): Unit = println("Inner12349c#a3()") // weaker access privileges ^ +t12349c.scala:13: error: weaker access privileges in overriding +def a4(): Unit (defined in class t12349a) + override should be public + protected[t12349c] override def a4(): Unit = println("Inner12349c#a4()") // weaker access privileges + ^ +t12349c.scala:14: error: weaker access privileges in overriding +def a5(): Unit (defined in class t12349a) + override should be public + private[t12349c] override def a5(): Unit = println("Inner12349c#a5()") // weaker access privileges + ^ +t12349c.scala:15: error: weaker access privileges in overriding +def a6(): Unit (defined in class t12349a) + override should be public + protected[pkg] override def a6(): Unit = println("Inner12349c#a6()") // weaker access privileges + ^ +t12349c.scala:16: error: weaker access privileges in overriding +def a7(): Unit (defined in class t12349a) + override should be public + private[pkg] override def a7(): Unit = println("Inner12349c#a7()") // weaker access privileges + ^ +t12349c.scala:17: error: weaker access privileges in overriding +def a8(): Unit (defined in class t12349a) + override should be public + protected[this] override def a8(): Unit = println("Inner12349c#a8()") // weaker access privileges + ^ t12349c.scala:22: error: weaker access privileges in overriding protected[package t12349] def b3(): Unit (defined in class t12349a) override should not be private @@ -70,31 +150,43 @@ protected[package t12349] def b7(): Unit (defined in class t12349a) override should at least be protected[t12349] private[pkg] override def b7(): Unit = println("Inner12349c#b7()") // weaker access privileges ^ +t12349c.scala:31: error: weaker access privileges in overriding +private[package t12349] def c2(): Unit (defined in class t12349a) + override should at least be private[t12349] + protected override def c2(): Unit = println("Inner12349c#c2()") // weaker access privileges + ^ t12349c.scala:32: error: weaker access privileges in overriding private[package t12349] def c3(): Unit (defined in class t12349a) override should not be private private override def c3(): Unit = println("Inner12349c#c3()") // weaker access privileges ^ -t12349c.scala:30: error: method c1 overrides nothing - override def c1(): Unit = println("Inner12349c#c1()") // overrides nothing (invisible) - ^ -t12349c.scala:31: error: method c2 overrides nothing - protected override def c2(): Unit = println("Inner12349c#c2()") // [#12349] +t12349c.scala:33: error: weaker access privileges in overriding +private[package t12349] def c4(): Unit (defined in class t12349a) + override should at least be private[t12349] + protected[t12349c] override def c4(): Unit = println("Inner12349c#c4()") // weaker access privileges ^ -t12349c.scala:33: error: method c4 overrides nothing - protected[t12349c] override def c4(): Unit = println("Inner12349c#c4()") // [#12349] +t12349c.scala:34: error: weaker access privileges in overriding +private[package t12349] def c5(): Unit (defined in class t12349a) + override should at least be private[t12349] + private[t12349c] override def c5(): Unit = println("Inner12349c#c5()") // weaker access privileges ^ -t12349c.scala:34: error: method c5 overrides nothing - private[t12349c] override def c5(): Unit = println("Inner12349c#c5()") // [#12349] +t12349c.scala:35: error: weaker access privileges in overriding +private[package t12349] def c6(): Unit (defined in class t12349a) + override should at least be private[t12349] + protected[pkg] override def c6(): Unit = println("Inner12349c#c6()") // weaker access privileges ^ -t12349c.scala:35: error: method c6 overrides nothing - protected[pkg] override def c6(): Unit = println("Inner12349c#c6()") // [#12349] +t12349c.scala:36: error: weaker access privileges in overriding +private[package t12349] def c7(): Unit (defined in class t12349a) + override should at least be private[t12349] + private[pkg] override def c7(): Unit = println("Inner12349c#c7()") // weaker access privileges ^ -t12349c.scala:36: error: method c7 overrides nothing - private[pkg] override def c7(): Unit = println("Inner12349c#c7()") // [#12349] +t12349c.scala:37: error: weaker access privileges in overriding +private[package t12349] def c8(): Unit (defined in class t12349a) + override should at least be private[t12349] + protected[this] override def c8(): Unit = println("Inner12349c#c8()") // weaker access privileges ^ -t12349c.scala:37: error: method c8 overrides nothing - protected[this] override def c8(): Unit = println("Inner12349c#c8()") // [#12349] +t12349c.scala:30: error: method c1 overrides nothing + override def c1(): Unit = println("Inner12349c#c1()") // overrides nothing (invisible) ^ t12349c.scala:38: error: method c9 overrides nothing. Note: the super classes of class Inner12349c contain the following, non final members named c9: @@ -128,4 +220,4 @@ t12349c.scala:47: error: method d8 overrides nothing t12349c.scala:48: error: method d9 overrides nothing private[this] override def d9(): Unit = println("Inner12349c#d9()") // overrides nothing ^ -36 errors +52 errors diff --git a/test/files/neg/t12349/t12349b.scala b/test/files/neg/t12349/t12349b.scala index 687b67c7e039..19079a3eb003 100644 --- a/test/files/neg/t12349/t12349b.scala +++ b/test/files/neg/t12349/t12349b.scala @@ -4,13 +4,13 @@ object t12349b { class Inner12349b extends t12349a { override def a1(): Unit = println("Inner12349b#a1()") - protected override def a2(): Unit = println("Inner12349b#a2()") // [#12349] + protected override def a2(): Unit = println("Inner12349b#a2()") // weaker access privileges private override def a3(): Unit = println("Inner12349b#a3()") // weaker access privileges - protected[t12349b] override def a4(): Unit = println("Inner12349b#a4()") // [#12349] - private[t12349b] override def a5(): Unit = println("Inner12349b#a5()") // [#12349] - protected[t12349] override def a6(): Unit = println("Inner12349b#a6()") // [#12349] - private[t12349] override def a7(): Unit = println("Inner12349b#a7()") // [#12349] - protected[this] override def a8(): Unit = println("Inner12349b#a8()") // [#12349] + protected[t12349b] override def a4(): Unit = println("Inner12349b#a4()") // weaker access privileges + private[t12349b] override def a5(): Unit = println("Inner12349b#a5()") // weaker access privileges + protected[t12349] override def a6(): Unit = println("Inner12349b#a6()") // weaker access privileges + private[t12349] override def a7(): Unit = println("Inner12349b#a7()") // weaker access privileges + protected[this] override def a8(): Unit = println("Inner12349b#a8()") // weaker access privileges private[this] override def a9(): Unit = println("Inner12349b#a9()") // [#9334] override def b1(): Unit = println("Inner12349b#b1()") @@ -24,13 +24,13 @@ object t12349b { private[this] override def b9(): Unit = println("Inner12349b#b9()") // [#9334] override def c1(): Unit = println("Inner12349b#c1()") - protected override def c2(): Unit = println("Inner12349b#c2()") // [#12349] + protected override def c2(): Unit = println("Inner12349b#c2()") // weaker access privileges private override def c3(): Unit = println("Inner12349b#c3()") // weaker access privileges - protected[t12349b] override def c4(): Unit = println("Inner12349b#c4()") // [#12349] - private[t12349b] override def c5(): Unit = println("Inner12349b#c5()") // [#12349] + protected[t12349b] override def c4(): Unit = println("Inner12349b#c4()") // weaker access privileges + private[t12349b] override def c5(): Unit = println("Inner12349b#c5()") // weaker access privileges protected[t12349] override def c6(): Unit = println("Inner12349b#c6()") private[t12349] override def c7(): Unit = println("Inner12349b#c7()") - protected[this] override def c8(): Unit = println("Inner12349b#c8()") // [#12349] + protected[this] override def c8(): Unit = println("Inner12349b#c8()") // weaker access privileges private[this] override def c9(): Unit = println("Inner12349b#c9()") // [#9334] override def d1(): Unit = println("Inner12349b#d1()") // overrides nothing diff --git a/test/files/neg/t12349/t12349c.scala b/test/files/neg/t12349/t12349c.scala index d7bbeaed5f09..3ad062d33472 100644 --- a/test/files/neg/t12349/t12349c.scala +++ b/test/files/neg/t12349/t12349c.scala @@ -8,13 +8,13 @@ package pkg { class Inner12349c extends t12349a { override def a1(): Unit = println("Inner12349c#a1()") - protected override def a2(): Unit = println("Inner12349c#a2()") // [#12349] + protected override def a2(): Unit = println("Inner12349c#a2()") // weaker access privileges private override def a3(): Unit = println("Inner12349c#a3()") // weaker access privileges - protected[t12349c] override def a4(): Unit = println("Inner12349c#a4()") // [#12349] - private[t12349c] override def a5(): Unit = println("Inner12349c#a5()") // [#12349] - protected[pkg] override def a6(): Unit = println("Inner12349c#a6()") // [#12349] - private[pkg] override def a7(): Unit = println("Inner12349c#a7()") // [#12349] - protected[this] override def a8(): Unit = println("Inner12349c#a8()") // [#12349] + protected[t12349c] override def a4(): Unit = println("Inner12349c#a4()") // weaker access privileges + private[t12349c] override def a5(): Unit = println("Inner12349c#a5()") // weaker access privileges + protected[pkg] override def a6(): Unit = println("Inner12349c#a6()") // weaker access privileges + private[pkg] override def a7(): Unit = println("Inner12349c#a7()") // weaker access privileges + protected[this] override def a8(): Unit = println("Inner12349c#a8()") // weaker access privileges private[this] override def a9(): Unit = println("Inner12349c#a9()") // [#9334] override def b1(): Unit = println("Inner12349c#b1()") @@ -28,13 +28,13 @@ package pkg { private[this] override def b9(): Unit = println("Inner12349c#b9()") // [#9334] override def c1(): Unit = println("Inner12349c#c1()") // overrides nothing (invisible) - protected override def c2(): Unit = println("Inner12349c#c2()") // [#12349] + protected override def c2(): Unit = println("Inner12349c#c2()") // weaker access privileges private override def c3(): Unit = println("Inner12349c#c3()") // weaker access privileges - protected[t12349c] override def c4(): Unit = println("Inner12349c#c4()") // [#12349] - private[t12349c] override def c5(): Unit = println("Inner12349c#c5()") // [#12349] - protected[pkg] override def c6(): Unit = println("Inner12349c#c6()") // [#12349] - private[pkg] override def c7(): Unit = println("Inner12349c#c7()") // [#12349] - protected[this] override def c8(): Unit = println("Inner12349c#c8()") // [#12349] + protected[t12349c] override def c4(): Unit = println("Inner12349c#c4()") // weaker access privileges + private[t12349c] override def c5(): Unit = println("Inner12349c#c5()") // weaker access privileges + protected[pkg] override def c6(): Unit = println("Inner12349c#c6()") // weaker access privileges + private[pkg] override def c7(): Unit = println("Inner12349c#c7()") // weaker access privileges + protected[this] override def c8(): Unit = println("Inner12349c#c8()") // weaker access privileges private[this] override def c9(): Unit = println("Inner12349c#c9()") // overrides nothing (invisible) override def d1(): Unit = println("Inner12349c#d1()") // overrides nothing From 4d06ec15138dd3b9f0bfb8419d5a9b3103162f05 Mon Sep 17 00:00:00 2001 From: Seth Tisue Date: Mon, 8 Mar 2021 14:22:48 -0800 Subject: [PATCH 036/769] sbt 1.4.9 (was 1.4.8) --- project/build.properties | 2 +- scripts/common | 2 +- src/intellij/scala.ipr.SAMPLE | 36 ++++++++++++------------ test/benchmarks/project/build.properties | 2 +- test/jcstress/project/build.properties | 2 +- 5 files changed, 22 insertions(+), 22 deletions(-) diff --git a/project/build.properties b/project/build.properties index 0b2e09c5ac99..dbae93bcfd51 100644 --- a/project/build.properties +++ b/project/build.properties @@ -1 +1 @@ -sbt.version=1.4.7 +sbt.version=1.4.9 diff --git a/scripts/common b/scripts/common index a0c1e9af137e..d8645a48af42 100644 --- a/scripts/common +++ b/scripts/common @@ -11,7 +11,7 @@ else fi SBT_CMD=${SBT_CMD-sbt} -SBT_CMD="$SBT_CMD -sbt-version 1.4.7" +SBT_CMD="$SBT_CMD -sbt-version 1.4.9" # repo to publish builds integrationRepoUrl=${integrationRepoUrl-"https://scala-ci.typesafe.com/artifactory/scala-integration/"} diff --git a/src/intellij/scala.ipr.SAMPLE b/src/intellij/scala.ipr.SAMPLE index b545ddd3f227..a688c8d8e945 100644 --- a/src/intellij/scala.ipr.SAMPLE +++ b/src/intellij/scala.ipr.SAMPLE @@ -325,13 +325,13 @@ - + - + - - + + @@ -355,7 +355,7 @@ - + @@ -369,8 +369,8 @@ - - + + @@ -382,13 +382,13 @@ - + - + @@ -402,16 +402,16 @@ - + - + - + - + @@ -435,18 +435,18 @@ - - + + - + - + - + diff --git a/test/benchmarks/project/build.properties b/test/benchmarks/project/build.properties index 0b2e09c5ac99..dbae93bcfd51 100644 --- a/test/benchmarks/project/build.properties +++ b/test/benchmarks/project/build.properties @@ -1 +1 @@ -sbt.version=1.4.7 +sbt.version=1.4.9 diff --git a/test/jcstress/project/build.properties b/test/jcstress/project/build.properties index 0b2e09c5ac99..dbae93bcfd51 100644 --- a/test/jcstress/project/build.properties +++ b/test/jcstress/project/build.properties @@ -1 +1 @@ -sbt.version=1.4.7 +sbt.version=1.4.9 From cac58b53ccc3f0e1eb6a5ae538203826dffbafce Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Wed, 10 Mar 2021 17:15:47 -0800 Subject: [PATCH 037/769] [backport] Another correct seeding of root entry --- src/reflect/scala/reflect/io/ZipArchive.scala | 2 +- .../scala/reflect/io/ZipArchiveTest.scala | 53 +++++++++++++++++-- 2 files changed, 50 insertions(+), 5 deletions(-) diff --git a/src/reflect/scala/reflect/io/ZipArchive.scala b/src/reflect/scala/reflect/io/ZipArchive.scala index 685afe5d4ef7..53a85532bc67 100644 --- a/src/reflect/scala/reflect/io/ZipArchive.scala +++ b/src/reflect/scala/reflect/io/ZipArchive.scala @@ -124,7 +124,7 @@ abstract class ZipArchive(override val file: JFile, release: Option[String]) ext } } - @volatile private[this] var lastDirName: String = "" + @volatile private[this] var lastDirName: String = RootEntry private def dirNameUsingLast(name: String): String = { val last = lastDirName if (name.length > last.length + 1 && name.startsWith(last) && name.charAt(last.length) == '/' && name.indexOf('/', last.length + 1) == -1) { diff --git a/test/junit/scala/reflect/io/ZipArchiveTest.scala b/test/junit/scala/reflect/io/ZipArchiveTest.scala index 116c3e530858..4a4851662ab5 100644 --- a/test/junit/scala/reflect/io/ZipArchiveTest.scala +++ b/test/junit/scala/reflect/io/ZipArchiveTest.scala @@ -42,6 +42,37 @@ class ZipArchiveTest { } } + // was: java.lang.StringIndexOutOfBoundsException: String index out of range: -1, computing lazy val root + @Test def `weird entry name works`(): Unit = { + val jar = createSimpleTestJar("/bar") + val archive = new FileZipArchive(jar.toFile) + try { + val it = archive.iterator + assertTrue(it.hasNext) + val f = it.next() + assertFalse(it.hasNext) + assertEquals("bar", f.name) + } finally { + archive.close() + advisedly(Files.delete(jar)) + } + } + + @Test def `another weird entry name works`(): Unit = { + val jar = createSimpleTestJar("/.bar.baz") + val archive = new FileZipArchive(jar.toFile) + try { + val it = archive.iterator + assertTrue(it.hasNext) + val f = it.next() + assertFalse(it.hasNext) + assertEquals(".bar.baz", f.name) + } finally { + archive.close() + advisedly(Files.delete(jar)) + } + } + private def manifestAt(location: URI): URL = ScalaClassLoader.fromURLs(List(location.toURL), null).getResource("META-INF/MANIFEST.MF"); // ZipArchive.fromManifestURL(URL) @@ -57,21 +88,35 @@ class ZipArchiveTest { assertTrue(it.hasNext) val f = it.next() assertFalse(it.hasNext) - assertEquals("foo.class", f.name) + assertEquals(testEntry, f.name) } finally { archive.close() advisedly(Files.delete(jar)) } } - private def createTestJar(): JPath = { + private def testEntry = "foo.class" + + private def createTestJar(entryName: String = testEntry): JPath = { val f = Files.createTempFile("junit", ".jar") val man = new Manifest() man.getMainAttributes().put(Attributes.Name.MANIFEST_VERSION, "1.0") - man.getEntries().put("foo.class", new Attributes(0)) + man.getEntries().put(entryName, new Attributes(0)) val jout = new JarOutputStream(Files.newOutputStream(f), man) try { - jout.putNextEntry(new JarEntry("foo.class")) + jout.putNextEntry(new JarEntry(entryName)) + val bytes = "hello, world".getBytes + jout.write(bytes, 0, bytes.length) + } finally { + jout.close() + } + f + } + private def createSimpleTestJar(entryName: String = testEntry): JPath = { + val f = Files.createTempFile("junit", ".jar") + val jout = new JarOutputStream(Files.newOutputStream(f)) + try { + jout.putNextEntry(new JarEntry(entryName)) val bytes = "hello, world".getBytes jout.write(bytes, 0, bytes.length) } finally { From 411d548fb870aa7dd0cc7ca24d60157d9bcdde58 Mon Sep 17 00:00:00 2001 From: Eugene Platonov Date: Sat, 6 Mar 2021 20:24:10 -0500 Subject: [PATCH 038/769] Fix regression in ZipArchive --- src/reflect/scala/reflect/io/ZipArchive.scala | 2 +- test/junit/scala/reflect/io/ZipArchiveTest.scala | 15 +++++++++++++++ 2 files changed, 16 insertions(+), 1 deletion(-) diff --git a/src/reflect/scala/reflect/io/ZipArchive.scala b/src/reflect/scala/reflect/io/ZipArchive.scala index 157e2e8e2c62..24452194f190 100644 --- a/src/reflect/scala/reflect/io/ZipArchive.scala +++ b/src/reflect/scala/reflect/io/ZipArchive.scala @@ -139,7 +139,7 @@ abstract class ZipArchive(override val file: JFile, release: Option[String]) ext ensureDir(name) } - @volatile private[this] var lastDirName: String = "" + @volatile private[this] var lastDirName: String = RootEntry private def dirNameUsingLast(name: String): String = { val last = lastDirName if (name.length > last.length + 1 && name.startsWith(last) && name.charAt(last.length) == '/' && name.indexOf('/', last.length + 1) == -1) { diff --git a/test/junit/scala/reflect/io/ZipArchiveTest.scala b/test/junit/scala/reflect/io/ZipArchiveTest.scala index 40bf4b540839..ec7ede4348b6 100644 --- a/test/junit/scala/reflect/io/ZipArchiveTest.scala +++ b/test/junit/scala/reflect/io/ZipArchiveTest.scala @@ -28,6 +28,21 @@ class ZipArchiveTest { } } + @Test + def weirdFileAtRoot(): Unit = { + val f = Files.createTempFile("test", ".jar").tap {f => + Using.resource(new JarOutputStream(Files.newOutputStream(f))) { jout => + jout.putNextEntry(new JarEntry("/.hey.txt")) + val bytes = "hello, world".getBytes + jout.write(bytes, 0, bytes.length) + () + } + } + Using.resources(ForDeletion(f), new FileZipArchive(f.toFile)){ (_, fza) => + assertEquals(Seq(".hey.txt"), fza.iterator.toSeq.map(_.name)) + } + } + @Test def missingFile(): Unit = { val f = Paths.get("xxx.does.not.exist") From cde5db30faba9f466239704abf4c50576ac703eb Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Fri, 12 Mar 2021 12:48:26 +0100 Subject: [PATCH 039/769] check private[this] members in override checking --- .../tools/nsc/transform/OverridingPairs.scala | 2 +- .../tools/partest/ScaladocModelTest.scala | 10 ++--- .../scala/reflect/internal/Symbols.scala | 6 +-- test/files/neg/t12349.check | 37 ++++++++++++++++--- test/files/neg/t12349/t12349b.scala | 6 +-- test/files/neg/t12349/t12349c.scala | 4 +- test/files/neg/t4762.check | 6 ++- test/files/neg/t9334.check | 6 +++ test/files/neg/t9334.scala | 6 +++ 9 files changed, 62 insertions(+), 21 deletions(-) create mode 100644 test/files/neg/t9334.check create mode 100644 test/files/neg/t9334.scala diff --git a/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala b/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala index 6387ddde49d7..b1930b201737 100644 --- a/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala +++ b/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala @@ -37,7 +37,7 @@ abstract class OverridingPairs extends SymbolPairs { * including bridges. But it may be refined in subclasses. */ override protected def exclude(sym: Symbol) = ( - sym.isPrivateLocal + (sym.isPrivateLocal && sym.isParamAccessor) || sym.isArtifact || sym.isConstructor || (sym.isPrivate && sym.owner != base) // Privates aren't inherited. Needed for pos/t7475a.scala diff --git a/src/partest/scala/tools/partest/ScaladocModelTest.scala b/src/partest/scala/tools/partest/ScaladocModelTest.scala index 487c962a298a..ec158f9cfd60 100644 --- a/src/partest/scala/tools/partest/ScaladocModelTest.scala +++ b/src/partest/scala/tools/partest/ScaladocModelTest.scala @@ -85,15 +85,15 @@ abstract class ScaladocModelTest extends DirectTest { System.setErr(prevErr) } - private[this] var settings: doc.Settings = null + private[this] var docSettings: doc.Settings = null // create a new scaladoc compiler def newDocFactory: DocFactory = { - settings = new doc.Settings(_ => ()) - settings.scaladocQuietRun = true // yaay, no more "model contains X documentable templates"! + docSettings = new doc.Settings(_ => ()) + docSettings.scaladocQuietRun = true // yaay, no more "model contains X documentable templates"! val args = extraSettings + " " + scaladocSettings - new ScalaDoc.Command((CommandLineParser tokenize (args)), settings) // side-effecting, I think - val docFact = new DocFactory(new ConsoleReporter(settings), settings) + new ScalaDoc.Command((CommandLineParser tokenize (args)), docSettings) // side-effecting, I think + val docFact = new DocFactory(new ConsoleReporter(docSettings), docSettings) docFact } diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala index a144fe6e8c63..9a16166b1f69 100644 --- a/src/reflect/scala/reflect/internal/Symbols.scala +++ b/src/reflect/scala/reflect/internal/Symbols.scala @@ -3508,7 +3508,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => */ class ModuleClassSymbol protected[Symbols] (owner: Symbol, pos: Position, name: TypeName) extends ClassSymbol(owner, pos, name) { - private[this] var module: Symbol = _ + private[this] var moduleSymbol: Symbol = _ private[this] var typeOfThisCache: Type = _ private[this] var typeOfThisPeriod = NoPeriod @@ -3541,8 +3541,8 @@ trait Symbols extends api.Symbols { self: SymbolTable => implicitMembersCacheValue } // The null check seems to be necessary for the reifier. - override def sourceModule = if (module ne null) module else companionModule - override def sourceModule_=(module: Symbol): Unit = { this.module = module } + override def sourceModule = if (moduleSymbol ne null) moduleSymbol else companionModule + override def sourceModule_=(module: Symbol): Unit = { this.moduleSymbol = module } } class PackageObjectClassSymbol protected[Symbols] (owner0: Symbol, pos0: Position) diff --git a/test/files/neg/t12349.check b/test/files/neg/t12349.check index 2c7426ad6a95..ed6d1b26451d 100644 --- a/test/files/neg/t12349.check +++ b/test/files/neg/t12349.check @@ -33,6 +33,11 @@ def a8(): Unit (defined in class t12349a) override should be public protected[this] override def a8(): Unit = println("Inner12349b#a8()") // weaker access privileges ^ +t12349b.scala:14: error: weaker access privileges in overriding +def a9(): Unit (defined in class t12349a) + override should not be private + private[this] override def a9(): Unit = println("Inner12349b#a9()") // weaker access privileges + ^ t12349b.scala:18: error: weaker access privileges in overriding protected[package t12349] def b3(): Unit (defined in class t12349a) override should not be private @@ -48,6 +53,11 @@ protected[package t12349] def b7(): Unit (defined in class t12349a) override should at least be protected[t12349] private[t12349] override def b7(): Unit = println("Inner12349b#b7()") // weaker access privileges ^ +t12349b.scala:24: error: weaker access privileges in overriding +protected[package t12349] def b9(): Unit (defined in class t12349a) + override should not be private + private[this] override def b9(): Unit = println("Inner12349b#b9()") // weaker access privileges + ^ t12349b.scala:27: error: weaker access privileges in overriding private[package t12349] def c2(): Unit (defined in class t12349a) override should at least be private[t12349] @@ -73,6 +83,11 @@ private[package t12349] def c8(): Unit (defined in class t12349a) override should at least be private[t12349] protected[this] override def c8(): Unit = println("Inner12349b#c8()") // weaker access privileges ^ +t12349b.scala:34: error: weaker access privileges in overriding +private[package t12349] def c9(): Unit (defined in class t12349a) + override should not be private + private[this] override def c9(): Unit = println("Inner12349b#c9()") // weaker access privileges + ^ t12349b.scala:36: error: method d1 overrides nothing override def d1(): Unit = println("Inner12349b#d1()") // overrides nothing ^ @@ -135,6 +150,11 @@ def a8(): Unit (defined in class t12349a) override should be public protected[this] override def a8(): Unit = println("Inner12349c#a8()") // weaker access privileges ^ +t12349c.scala:18: error: weaker access privileges in overriding +def a9(): Unit (defined in class t12349a) + override should not be private + private[this] override def a9(): Unit = println("Inner12349c#a9()") // weaker access privileges + ^ t12349c.scala:22: error: weaker access privileges in overriding protected[package t12349] def b3(): Unit (defined in class t12349a) override should not be private @@ -150,6 +170,11 @@ protected[package t12349] def b7(): Unit (defined in class t12349a) override should at least be protected[t12349] private[pkg] override def b7(): Unit = println("Inner12349c#b7()") // weaker access privileges ^ +t12349c.scala:28: error: weaker access privileges in overriding +protected[package t12349] def b9(): Unit (defined in class t12349a) + override should not be private + private[this] override def b9(): Unit = println("Inner12349c#b9()") // weaker access privileges + ^ t12349c.scala:31: error: weaker access privileges in overriding private[package t12349] def c2(): Unit (defined in class t12349a) override should at least be private[t12349] @@ -185,14 +210,14 @@ private[package t12349] def c8(): Unit (defined in class t12349a) override should at least be private[t12349] protected[this] override def c8(): Unit = println("Inner12349c#c8()") // weaker access privileges ^ +t12349c.scala:38: error: weaker access privileges in overriding +private[package t12349] def c9(): Unit (defined in class t12349a) + override should not be private + private[this] override def c9(): Unit = println("Inner12349c#c9()") // overrides nothing (invisible) + ^ t12349c.scala:30: error: method c1 overrides nothing override def c1(): Unit = println("Inner12349c#c1()") // overrides nothing (invisible) ^ -t12349c.scala:38: error: method c9 overrides nothing. -Note: the super classes of class Inner12349c contain the following, non final members named c9: -private[package t12349] def c9(): Unit - private[this] override def c9(): Unit = println("Inner12349c#c9()") // overrides nothing (invisible) - ^ t12349c.scala:40: error: method d1 overrides nothing override def d1(): Unit = println("Inner12349c#d1()") // overrides nothing ^ @@ -220,4 +245,4 @@ t12349c.scala:47: error: method d8 overrides nothing t12349c.scala:48: error: method d9 overrides nothing private[this] override def d9(): Unit = println("Inner12349c#d9()") // overrides nothing ^ -52 errors +57 errors diff --git a/test/files/neg/t12349/t12349b.scala b/test/files/neg/t12349/t12349b.scala index 19079a3eb003..38b3309779b3 100644 --- a/test/files/neg/t12349/t12349b.scala +++ b/test/files/neg/t12349/t12349b.scala @@ -11,7 +11,7 @@ object t12349b { protected[t12349] override def a6(): Unit = println("Inner12349b#a6()") // weaker access privileges private[t12349] override def a7(): Unit = println("Inner12349b#a7()") // weaker access privileges protected[this] override def a8(): Unit = println("Inner12349b#a8()") // weaker access privileges - private[this] override def a9(): Unit = println("Inner12349b#a9()") // [#9334] + private[this] override def a9(): Unit = println("Inner12349b#a9()") // weaker access privileges override def b1(): Unit = println("Inner12349b#b1()") protected override def b2(): Unit = println("Inner12349b#b2()") @@ -21,7 +21,7 @@ object t12349b { protected[t12349] override def b6(): Unit = println("Inner12349b#b6()") private[t12349] override def b7(): Unit = println("Inner12349b#b7()") // weaker access privileges protected[this] override def b8(): Unit = println("Inner12349b#b8()") // [#12349] - not fixed by PR #9525 - private[this] override def b9(): Unit = println("Inner12349b#b9()") // [#9334] + private[this] override def b9(): Unit = println("Inner12349b#b9()") // weaker access privileges override def c1(): Unit = println("Inner12349b#c1()") protected override def c2(): Unit = println("Inner12349b#c2()") // weaker access privileges @@ -31,7 +31,7 @@ object t12349b { protected[t12349] override def c6(): Unit = println("Inner12349b#c6()") private[t12349] override def c7(): Unit = println("Inner12349b#c7()") protected[this] override def c8(): Unit = println("Inner12349b#c8()") // weaker access privileges - private[this] override def c9(): Unit = println("Inner12349b#c9()") // [#9334] + private[this] override def c9(): Unit = println("Inner12349b#c9()") // weaker access privileges override def d1(): Unit = println("Inner12349b#d1()") // overrides nothing protected override def d2(): Unit = println("Inner12349b#d2()") // overrides nothing diff --git a/test/files/neg/t12349/t12349c.scala b/test/files/neg/t12349/t12349c.scala index 3ad062d33472..942991a22430 100644 --- a/test/files/neg/t12349/t12349c.scala +++ b/test/files/neg/t12349/t12349c.scala @@ -15,7 +15,7 @@ package pkg { protected[pkg] override def a6(): Unit = println("Inner12349c#a6()") // weaker access privileges private[pkg] override def a7(): Unit = println("Inner12349c#a7()") // weaker access privileges protected[this] override def a8(): Unit = println("Inner12349c#a8()") // weaker access privileges - private[this] override def a9(): Unit = println("Inner12349c#a9()") // [#9334] + private[this] override def a9(): Unit = println("Inner12349c#a9()") // weaker access privileges override def b1(): Unit = println("Inner12349c#b1()") protected override def b2(): Unit = println("Inner12349c#b2()") @@ -25,7 +25,7 @@ package pkg { protected[pkg] override def b6(): Unit = println("Inner12349c#b6()") private[pkg] override def b7(): Unit = println("Inner12349c#b7()") // weaker access privileges protected[this] override def b8(): Unit = println("Inner12349c#b8()") // [#12349] - not fixed by PR #9525 - private[this] override def b9(): Unit = println("Inner12349c#b9()") // [#9334] + private[this] override def b9(): Unit = println("Inner12349c#b9()") // weaker access privileges override def c1(): Unit = println("Inner12349c#c1()") // overrides nothing (invisible) protected override def c2(): Unit = println("Inner12349c#c2()") // weaker access privileges diff --git a/test/files/neg/t4762.check b/test/files/neg/t4762.check index bd1c9ebff690..aa7bdcec39eb 100644 --- a/test/files/neg/t4762.check +++ b/test/files/neg/t4762.check @@ -4,6 +4,10 @@ t4762.scala:17: warning: private[this] value x in class B shadows mutable x inhe t4762.scala:50: warning: private[this] value x in class Derived shadows mutable x inherited from class Base. Changes to x will not be visible within class Derived - you may want to give them distinct names. class Derived( x : Int ) extends Base( x ) { override def toString = x.toString } ^ -error: No warnings can be incurred under -Werror. +t4762.scala:13: error: weaker access privileges in overriding +val y: Int (defined in class A) + override should not be private + private[this] def y: Int = 99 + ^ 2 warnings 1 error diff --git a/test/files/neg/t9334.check b/test/files/neg/t9334.check new file mode 100644 index 000000000000..e5fe6ef6d0ed --- /dev/null +++ b/test/files/neg/t9334.check @@ -0,0 +1,6 @@ +t9334.scala:5: error: weaker access privileges in overriding +def aaa: Int (defined in class A) + override should not be private + private[this] def aaa: Int = 42 + ^ +1 error diff --git a/test/files/neg/t9334.scala b/test/files/neg/t9334.scala new file mode 100644 index 000000000000..c8838e855db2 --- /dev/null +++ b/test/files/neg/t9334.scala @@ -0,0 +1,6 @@ +class A { + def aaa: Int = 10 +} +class B extends A { + private[this] def aaa: Int = 42 +} From be68acc777fe1ebb0393ab9c03810d40ecbc6fb9 Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Thu, 29 Aug 2019 01:28:07 -0700 Subject: [PATCH 040/769] Byte and Short get signed toHexString Document sign extension in conversion to int for purposes of enrichment. Similarly for toOctalString and toBinaryString. --- test/files/run/richs.check | 58 ++++++++++++- test/files/run/richs.scala | 173 +++++++++++++++++++++++++------------ 2 files changed, 174 insertions(+), 57 deletions(-) diff --git a/test/files/run/richs.check b/test/files/run/richs.check index 48812eb28939..ff005ad6b165 100644 --- a/test/files/run/richs.check +++ b/test/files/run/richs.check @@ -1,7 +1,47 @@ -RichCharTest1: +RichByteTest: +10000 +10 +20 +1111111 +7f +177 +11111111111111111111111110000000 +ffffff80 +37777777600 +11111111111111111111111111111111 +ffffffff +37777777777 + +RichShortTest: +10000 +10 +20 +111111111111111 +7fff +77777 +11111111111111111000000000000000 +ffff8000 +37777700000 +11111111111111111111111111111111 +ffffffff +37777777777 + +RichCharTest: true true +10000 +10 +20 +111111111111111 +7fff +77777 +1000000000000000 +8000 +100000 +1111111111111111 +ffff +177777 RichIntTest: 10 @@ -16,6 +56,20 @@ RichIntTest: 10001 ffffffff +RichLongTest: +10000 +10 +20 +111111111111111 +7fff +77777 +1000000000000000 +8000 +100000 +1111111111111111111111111111111111111111111111111111111111111111 +ffffffffffffffff +1777777777777777777777 + RichStringTest1: s1: abc s2: abc\txyz\n @@ -67,6 +121,8 @@ s4: abc |xyz s5: abc xyz + +RichStringTest6: List(a, b, c, d) List(a, b, c, d) List(a, b, c, d) diff --git a/test/files/run/richs.scala b/test/files/run/richs.scala index 560de183ff74..d757be3f39d9 100644 --- a/test/files/run/richs.scala +++ b/test/files/run/richs.scala @@ -1,4 +1,4 @@ -trait RichTest { +trait RichTest extends Runnable { val s1 = """abc""" val s2 = """abc\txyz\n""" val s3 = """abc @@ -7,44 +7,108 @@ trait RichTest { |xyz""" val s5 = """abc #xyz""" - def getObjectName: String = { - val cn = this.getClass().getName() - cn.substring(0, cn.length-1) + def getObjectName: String = getClass.getName.init + def test(): Unit + override final def run() = { + println(s"\n$getObjectName:") + test() } def length[A](it: Iterator[A]) = it.toList.length def length[A](it: Iterable[A]) = it.toList.length - def run(): Unit } -object RichCharTest1 extends RichTest { - def run(): Unit = { - println("\n" + getObjectName + ":") + +// documents undesirable sign extension +object RichByteTest extends RichTest { + override def test() = { + val sixteen = 16.toByte + println(sixteen.toBinaryString) + println(sixteen.toHexString) + println(sixteen.toOctalString) + val max = 0x7F.toByte + println(max.toBinaryString) + println(max.toHexString) + println(max.toOctalString) + val extended = 0x80.toByte + println(extended.toBinaryString) + println(extended.toHexString) + println(extended.toOctalString) + val neg = -1.toByte + println(neg.toBinaryString) + println(neg.toHexString) + println(neg.toOctalString) + } +} + +object RichCharTest extends RichTest { + override def test() = { println('1'.asDigit == 1) println('A'.asDigit == 10) + val sixteen = 16.toChar + println(sixteen.toBinaryString) + println(sixteen.toHexString) + println(sixteen.toOctalString) + val max = 0x7FFF.toChar + println(max.toBinaryString) + println(max.toHexString) + println(max.toOctalString) + val extended = 0x8000.toChar + println(extended.toBinaryString) + println(extended.toHexString) + println(extended.toOctalString) + val neg = -1.toChar + println(neg.toBinaryString) + println(neg.toHexString) + println(neg.toOctalString) + } +} + +// documents undesirable sign extension +object RichShortTest extends RichTest { + override def test() = { + val sixteen = 16.toShort + println(sixteen.toBinaryString) + println(sixteen.toHexString) + println(sixteen.toOctalString) + val max = 0x7FFF.toShort + println(max.toBinaryString) + println(max.toHexString) + println(max.toOctalString) + val extended = 0x8000.toShort + println(extended.toBinaryString) + println(extended.toHexString) + println(extended.toOctalString) + val neg = -1.toShort + println(neg.toBinaryString) + println(neg.toHexString) + println(neg.toOctalString) + } +} + +object RichLongTest extends RichTest { + override def test() = { + val sixteen = 16L + println(sixteen.toBinaryString) + println(sixteen.toHexString) + println(sixteen.toOctalString) + val max = 0x7FFFL + println(max.toBinaryString) + println(max.toHexString) + println(max.toOctalString) + val extended = 0x8000L + println(extended.toBinaryString) + println(extended.toHexString) + println(extended.toOctalString) + val neg = -1L + println(neg.toBinaryString) + println(neg.toHexString) + println(neg.toOctalString) } } -// object RichCharTest2 extends RichTest { -// case class C(s: String) { -// private val it = s.iterator -// private var c: Char = _ -// def ch(): Char = c -// def nextch(): Unit = { c = if (it.hasNext) it.next() else ';' } -// def err(msg: String) = println(msg) -// nextch() -// } -// def run { -// println("\n" + getObjectName + ":") -// val c1 = C("x4A;") -// val s1 = xml.Utility.parseCharRef(c1.ch, c1.nextch, c1.err) -// val c2 = C("74;") -// val s2 = xml.Utility.parseCharRef(c2.ch, c2.nextch, c2.err) -// println(s1 == s2) -// } -// } + object RichIntTest extends RichTest { private val n = 10 private val m = -2 - def run(): Unit = { - println("\n" + getObjectName + ":") + def test(): Unit = { println(length(0 until n)) println(length(0 to n)) println(length(m until n)) @@ -52,17 +116,16 @@ object RichIntTest extends RichTest { println(length(n until m)) println(length(n to m)) - println(16.toBinaryString) // should be "10000" - println(16.toHexString) // should be "10" - println(16.toOctalString) // should be "20" + println(16.toBinaryString) + println(16.toHexString) + println(16.toOctalString) - println(65537.toHexString) // should be "10001" - println((-1).toHexString) // should be "ffffffff" + println(65537.toHexString) + println((-1).toHexString) } } object RichStringTest1 extends RichTest { - def run(): Unit = { - println("\n" + getObjectName + ":") + def test(): Unit = { println("s1: " + s1) println("s2: " + s2) println("s3: " + s3) @@ -71,8 +134,7 @@ object RichStringTest1 extends RichTest { } } object RichStringTest2 extends RichTest { - def run(): Unit = { - println("\n" + getObjectName + ":") + def test(): Unit = { Console.print("s1: "); s1.linesIterator foreach println Console.print("s2: "); s2.linesIterator foreach println Console.print("s3: "); s3.linesIterator foreach println @@ -81,8 +143,7 @@ object RichStringTest2 extends RichTest { } } object RichStringTest3 extends RichTest { - def run(): Unit = { - println("\n" + getObjectName + ":") + def test(): Unit = { println("s1: " + s1.stripLineEnd) println("s2: " + s2.stripLineEnd) println("s3: " + s3.stripLineEnd) @@ -91,8 +152,7 @@ object RichStringTest3 extends RichTest { } } object RichStringTest4 extends RichTest { - def run(): Unit = { - println("\n" + getObjectName + ":") + def test(): Unit = { println("s1: " + s1.stripMargin) println("s2: " + s2.stripMargin) println("s3: " + s3.stripMargin) @@ -101,8 +161,7 @@ object RichStringTest4 extends RichTest { } } object RichStringTest5 extends RichTest { - def run(): Unit = { - println("\n" + getObjectName + ":") + def test(): Unit = { println("s1: " + s3.stripMargin('#')) println("s2: " + s3.stripMargin('#')) println("s3: " + s3.stripMargin('#')) @@ -111,7 +170,7 @@ object RichStringTest5 extends RichTest { } } object RichStringTest6 extends RichTest { - def run(): Unit = { + def test(): Unit = { println("a:b:c:d".split(':').toList) println("a.b.c.d".split('.').toList) println("a$b$c$d".split('$').toList) @@ -121,17 +180,19 @@ object RichStringTest6 extends RichTest { println("a:b.c$d".split(Array(':', '.', '$')).toList) } } -/** xxx */ object Test { - def main(args: Array[String]): Unit = { - RichCharTest1.run() - //RichCharTest2.run - RichIntTest.run() - RichStringTest1.run() - RichStringTest2.run() - RichStringTest3.run() - RichStringTest4.run() - RichStringTest5.run() - RichStringTest6.run() - } + def main(args: Array[String]): Unit = + List( + RichByteTest, + RichShortTest, + RichCharTest, + RichIntTest, + RichLongTest, + RichStringTest1, + RichStringTest2, + RichStringTest3, + RichStringTest4, + RichStringTest5, + RichStringTest6, + ).foreach(_.run()) } From f81e1a91aed32be5d3cd8bd2de5bb6ce4dc7369b Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Mon, 15 Mar 2021 22:39:47 -0700 Subject: [PATCH 041/769] Convert enrichments tests to junit --- test/files/run/richs.check | 132 ----------------- test/files/run/richs.scala | 198 ------------------------- test/junit/scala/lang/RicherTest.scala | 144 ++++++++++++++++++ 3 files changed, 144 insertions(+), 330 deletions(-) delete mode 100644 test/files/run/richs.check delete mode 100644 test/files/run/richs.scala create mode 100644 test/junit/scala/lang/RicherTest.scala diff --git a/test/files/run/richs.check b/test/files/run/richs.check deleted file mode 100644 index ff005ad6b165..000000000000 --- a/test/files/run/richs.check +++ /dev/null @@ -1,132 +0,0 @@ - -RichByteTest: -10000 -10 -20 -1111111 -7f -177 -11111111111111111111111110000000 -ffffff80 -37777777600 -11111111111111111111111111111111 -ffffffff -37777777777 - -RichShortTest: -10000 -10 -20 -111111111111111 -7fff -77777 -11111111111111111000000000000000 -ffff8000 -37777700000 -11111111111111111111111111111111 -ffffffff -37777777777 - -RichCharTest: -true -true -10000 -10 -20 -111111111111111 -7fff -77777 -1000000000000000 -8000 -100000 -1111111111111111 -ffff -177777 - -RichIntTest: -10 -11 -12 -13 -0 -0 -10000 -10 -20 -10001 -ffffffff - -RichLongTest: -10000 -10 -20 -111111111111111 -7fff -77777 -1000000000000000 -8000 -100000 -1111111111111111111111111111111111111111111111111111111111111111 -ffffffffffffffff -1777777777777777777777 - -RichStringTest1: -s1: abc -s2: abc\txyz\n -s3: abc - xyz -s4: abc - |xyz -s5: abc - #xyz - -RichStringTest2: -s1: abc -s2: abc\txyz\n -s3: abc - xyz -s4: abc - |xyz -s5: abc - #xyz - -RichStringTest3: -s1: abc -s2: abc\txyz\n -s3: abc - xyz -s4: abc - |xyz -s5: abc - #xyz - -RichStringTest4: -s1: abc -s2: abc\txyz\n -s3: abc - xyz -s4: abc -xyz -s5: abc - #xyz - -RichStringTest5: -s1: abc - xyz -s2: abc - xyz -s3: abc - xyz -s4: abc - |xyz -s5: abc -xyz - -RichStringTest6: -List(a, b, c, d) -List(a, b, c, d) -List(a, b, c, d) -List(a, b, c, d) -List(a, b, c, d) -List(a, b, c, d) -List(a, b, c, d) diff --git a/test/files/run/richs.scala b/test/files/run/richs.scala deleted file mode 100644 index d757be3f39d9..000000000000 --- a/test/files/run/richs.scala +++ /dev/null @@ -1,198 +0,0 @@ -trait RichTest extends Runnable { - val s1 = """abc""" - val s2 = """abc\txyz\n""" - val s3 = """abc - xyz""" - val s4 = """abc - |xyz""" - val s5 = """abc - #xyz""" - def getObjectName: String = getClass.getName.init - def test(): Unit - override final def run() = { - println(s"\n$getObjectName:") - test() - } - def length[A](it: Iterator[A]) = it.toList.length - def length[A](it: Iterable[A]) = it.toList.length -} - -// documents undesirable sign extension -object RichByteTest extends RichTest { - override def test() = { - val sixteen = 16.toByte - println(sixteen.toBinaryString) - println(sixteen.toHexString) - println(sixteen.toOctalString) - val max = 0x7F.toByte - println(max.toBinaryString) - println(max.toHexString) - println(max.toOctalString) - val extended = 0x80.toByte - println(extended.toBinaryString) - println(extended.toHexString) - println(extended.toOctalString) - val neg = -1.toByte - println(neg.toBinaryString) - println(neg.toHexString) - println(neg.toOctalString) - } -} - -object RichCharTest extends RichTest { - override def test() = { - println('1'.asDigit == 1) - println('A'.asDigit == 10) - val sixteen = 16.toChar - println(sixteen.toBinaryString) - println(sixteen.toHexString) - println(sixteen.toOctalString) - val max = 0x7FFF.toChar - println(max.toBinaryString) - println(max.toHexString) - println(max.toOctalString) - val extended = 0x8000.toChar - println(extended.toBinaryString) - println(extended.toHexString) - println(extended.toOctalString) - val neg = -1.toChar - println(neg.toBinaryString) - println(neg.toHexString) - println(neg.toOctalString) - } -} - -// documents undesirable sign extension -object RichShortTest extends RichTest { - override def test() = { - val sixteen = 16.toShort - println(sixteen.toBinaryString) - println(sixteen.toHexString) - println(sixteen.toOctalString) - val max = 0x7FFF.toShort - println(max.toBinaryString) - println(max.toHexString) - println(max.toOctalString) - val extended = 0x8000.toShort - println(extended.toBinaryString) - println(extended.toHexString) - println(extended.toOctalString) - val neg = -1.toShort - println(neg.toBinaryString) - println(neg.toHexString) - println(neg.toOctalString) - } -} - -object RichLongTest extends RichTest { - override def test() = { - val sixteen = 16L - println(sixteen.toBinaryString) - println(sixteen.toHexString) - println(sixteen.toOctalString) - val max = 0x7FFFL - println(max.toBinaryString) - println(max.toHexString) - println(max.toOctalString) - val extended = 0x8000L - println(extended.toBinaryString) - println(extended.toHexString) - println(extended.toOctalString) - val neg = -1L - println(neg.toBinaryString) - println(neg.toHexString) - println(neg.toOctalString) - } -} - -object RichIntTest extends RichTest { - private val n = 10 - private val m = -2 - def test(): Unit = { - println(length(0 until n)) - println(length(0 to n)) - println(length(m until n)) - println(length(m to n)) - println(length(n until m)) - println(length(n to m)) - - println(16.toBinaryString) - println(16.toHexString) - println(16.toOctalString) - - println(65537.toHexString) - println((-1).toHexString) - } -} -object RichStringTest1 extends RichTest { - def test(): Unit = { - println("s1: " + s1) - println("s2: " + s2) - println("s3: " + s3) - println("s4: " + s4) - println("s5: " + s5) - } -} -object RichStringTest2 extends RichTest { - def test(): Unit = { - Console.print("s1: "); s1.linesIterator foreach println - Console.print("s2: "); s2.linesIterator foreach println - Console.print("s3: "); s3.linesIterator foreach println - Console.print("s4: "); s4.linesIterator foreach println - Console.print("s5: "); s5.linesIterator foreach println - } -} -object RichStringTest3 extends RichTest { - def test(): Unit = { - println("s1: " + s1.stripLineEnd) - println("s2: " + s2.stripLineEnd) - println("s3: " + s3.stripLineEnd) - println("s4: " + s4.stripLineEnd) - println("s5: " + s5.stripLineEnd) - } -} -object RichStringTest4 extends RichTest { - def test(): Unit = { - println("s1: " + s1.stripMargin) - println("s2: " + s2.stripMargin) - println("s3: " + s3.stripMargin) - println("s4: " + s4.stripMargin) - println("s5: " + s5.stripMargin) - } -} -object RichStringTest5 extends RichTest { - def test(): Unit = { - println("s1: " + s3.stripMargin('#')) - println("s2: " + s3.stripMargin('#')) - println("s3: " + s3.stripMargin('#')) - println("s4: " + s4.stripMargin('#')) - println("s5: " + s5.stripMargin('#')) - } -} -object RichStringTest6 extends RichTest { - def test(): Unit = { - println("a:b:c:d".split(':').toList) - println("a.b.c.d".split('.').toList) - println("a$b$c$d".split('$').toList) - println("a^b^c^d".split('^').toList) - println("a\\b\\c\\d".split('\\').toList) - println("a:b:c.d".split(Array(':', '.')).toList) - println("a:b.c$d".split(Array(':', '.', '$')).toList) - } -} -object Test { - def main(args: Array[String]): Unit = - List( - RichByteTest, - RichShortTest, - RichCharTest, - RichIntTest, - RichLongTest, - RichStringTest1, - RichStringTest2, - RichStringTest3, - RichStringTest4, - RichStringTest5, - RichStringTest6, - ).foreach(_.run()) -} diff --git a/test/junit/scala/lang/RicherTest.scala b/test/junit/scala/lang/RicherTest.scala new file mode 100644 index 000000000000..d6557e4fde44 --- /dev/null +++ b/test/junit/scala/lang/RicherTest.scala @@ -0,0 +1,144 @@ + +package scala + +import org.junit.{Assert, Test} +import scala.util.chaining._ + +class RicherTest { + import RicherTest._ + + private def assertEqualTo(expected: String)(actual: String) = Assert.assertEquals(expected, actual) + private def assertEqualTo(expected: Int)(actual: Int) = Assert.assertEquals(expected, actual) + private def assertEqualTo[A](expected: List[A])(actual: List[A]) = Assert.assertEquals(expected, actual) + + @Test def `Byte expansions should be byte-sized`(): Unit = { + val sixteen = 16.toByte + assertEqualTo(x"1_0000")(sixteen.toBinaryString) + assertEqualTo("10")(sixteen.toHexString) + assertEqualTo("20")(sixteen.toOctalString) + val max = 0x7F.toByte + assertEqualTo(x"111_1111")(max.toBinaryString) + assertEqualTo("7f")(max.toHexString) + assertEqualTo("177")(max.toOctalString) + val extended = 0x80.toByte + assertEqualTo("1" * 24 + x"1000_0000")(extended.toBinaryString) + assertEqualTo(x"ffff_ff80")(extended.toHexString) + assertEqualTo("37777777600")(extended.toOctalString) + val neg = -1.toByte + assertEqualTo("1" * 32)(neg.toBinaryString) + assertEqualTo("f" * 8)(neg.toHexString) + assertEqualTo("3" + "7" * 10)(neg.toOctalString) + } + @Test def `Short expansions should be short-sized`(): Unit = { + val sixteen = 16.toShort + assertEqualTo(x"1_0000")(sixteen.toBinaryString) + assertEqualTo("10")(sixteen.toHexString) + assertEqualTo("20")(sixteen.toOctalString) + val max = 0x7FFF.toShort + assertEqualTo(x"111_1111_1111_1111")(max.toBinaryString) + assertEqualTo("7fff")(max.toHexString) + assertEqualTo("77777")(max.toOctalString) + val extended = 0x8000.toShort + assertEqualTo(x"1111_1111_1111_1111_1000_0000_0000_0000")(extended.toBinaryString) + assertEqualTo(x"ffff_8000")(extended.toHexString) + assertEqualTo(x"37777700000")(extended.toOctalString) + val neg = -1.toShort + assertEqualTo("1" * 32)(neg.toBinaryString) + assertEqualTo(x"ffff_ffff")(neg.toHexString) + assertEqualTo(x"37777777777")(neg.toOctalString) + } + // same as short, but uses int conversion because unsigned + @Test def `Char expansions should be char-sized`(): Unit = { + val sixteen = 16.toChar + assertEqualTo(x"1_0000")(sixteen.toBinaryString) + assertEqualTo("10")(sixteen.toHexString) + assertEqualTo("20")(sixteen.toOctalString) + val max = 0x7FFF.toChar + assertEqualTo(x"111_1111_1111_1111")(max.toBinaryString) + assertEqualTo("7fff")(max.toHexString) + assertEqualTo("77777")(max.toOctalString) + val extended = 0x8000.toChar + assertEqualTo(x"1000_0000_0000_0000")(extended.toBinaryString) + assertEqualTo("8000")(extended.toHexString) + assertEqualTo(x"10_0000")(extended.toOctalString) + val neg = -1.toChar + assertEqualTo("1" * 16)(neg.toBinaryString) + assertEqualTo("ffff")(neg.toHexString) + assertEqualTo(x"17_7777")(neg.toOctalString) + } + @Test def `Chars are digits`(): Unit = { + assertEqualTo(1)('1'.asDigit) + assertEqualTo(10)('A'.asDigit) + } + @Test def `Ints are ranged`(): Unit = { + assertEqualTo(10)((0 until 10).length) + assertEqualTo(11)((0 to 10).length) + assertEqualTo(12)((-2 until 10).length) + assertEqualTo(13)((-2 to 10).length) + assertEqualTo(0)((10 until -2).length) + assertEqualTo(0)((10 to -2).length) + } + @Test def `Int strings`(): Unit = { + assertEqualTo(x"1_0000")(16.toBinaryString) + assertEqualTo("10")(16.toHexString) + assertEqualTo("20")(16.toOctalString) + assertEqualTo("10001")(65537.toHexString) + assertEqualTo("f" * 8)(-1.toHexString) + } + + // see also StringLikeTest + val s1 = """abc""" + val s2 = """abc\txyz\n""" + val s3 = """abc + xyz""" + val s4 = """abc + |xyz""" + val s5 = """abc + #xyz""" + @Test def `linesIterator iterates lines`(): Unit = { + assertEqualTo(1)(s1.linesIterator.length) + assertEqualTo(s1)(s1.linesIterator.next()) + assertEqualTo(1)(s2.linesIterator.length) + assertEqualTo(s2)(s2.linesIterator.next()) + assertEqualTo(2)(s3.linesIterator.length) + assertEqualTo("abc")(s3.linesIterator.next()) + assertEqualTo(" xyz")(s3.linesIterator.pipe { it => it.next(); it.next() }) + } + @Test def `stripLineEnd strips lines ends`(): Unit = { + assertEqualTo(s1)(s1.stripLineEnd) + assertEqualTo(s2)(s2.stripLineEnd) + assertEqualTo(s3)(s3.stripLineEnd) + assertEqualTo(s4)(s4.stripLineEnd) + assertEqualTo(s5)(s5.stripLineEnd) + assertEqualTo("abc")("abc\n".stripLineEnd) + } + @Test def `stripMargin strips lines margins`(): Unit = { + assertEqualTo(s1)(s1.stripMargin) + assertEqualTo(s2)(s2.stripMargin) + assertEqualTo(s3)(s3.stripMargin) + assertEqualTo("abc\nxyz")(s4.stripMargin) + assertEqualTo(s5)(s5.stripMargin) + } + @Test def `stripMargin strips custom margins`(): Unit = { + assertEqualTo(s1)(s1.stripMargin('#')) + assertEqualTo(s2)(s2.stripMargin('#')) + assertEqualTo(s3)(s3.stripMargin('#')) + assertEqualTo(s4)(s4.stripMargin('#')) + assertEqualTo("abc\nxyz")(s5.stripMargin('#')) + } + @Test def `split splits strings`(): Unit = { + assertEqualTo(List("a","b","c","d"))("a:b:c:d".split(':').toList) + assertEqualTo(List("a","b","c","d"))("a.b.c.d".split('.').toList) + assertEqualTo(List("a","b","c","d"))("a$b$c$d".split('$').toList) + assertEqualTo(List("a","b","c","d"))("a^b^c^d".split('^').toList) + assertEqualTo(List("a","b","c","d"))("a\\b\\c\\d".split('\\').toList) + assertEqualTo(List("a","b","c","d"))("a:b:c.d".split(Array(':','.')).toList) + assertEqualTo(List("a","b","c","d"))("a:b.c$d".split(Array(':','.','$')).toList) + } +} + +object RicherTest { + implicit class stripper(private val sc: StringContext) extends AnyVal { + def x(args: Any*) = StringContext.standardInterpolator(_.replace("_", ""), args, sc.parts) + } +} From b9e49f9b45d6a0dff299e512142bee703cc83e33 Mon Sep 17 00:00:00 2001 From: Martijn Hoekstra Date: Mon, 8 Mar 2021 10:45:53 +0100 Subject: [PATCH 042/769] allow $ escaping double quotes in interpolations --- src/compiler/scala/tools/nsc/ast/parser/Scanners.scala | 5 +++-- test/files/neg/t5856.check | 7 ++----- test/files/run/interpolation.check | 6 ++++++ test/files/run/interpolation.scala | 8 ++++++++ 4 files changed, 19 insertions(+), 7 deletions(-) diff --git a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala index 99e08ab4bce6..a4f8efc43eea 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala @@ -912,7 +912,7 @@ trait Scanners extends ScannersCommon { } } else if (ch == '$') { nextRawChar() - if (ch == '$') { + if (ch == '$' || ch == '"') { putChar(ch) nextRawChar() getStringPart(multiLine) @@ -938,7 +938,8 @@ trait Scanners extends ScannersCommon { next.token = kwArray(idx) } } else { - syntaxError(s"invalid string interpolation $$$ch, expected: $$$$, $$identifier or $${expression}") + val expectations = "$$, $\", $identifier or ${expression}" + syntaxError(s"invalid string interpolation $$$ch, expected: $expectations") } } else { val isUnclosedLiteral = (ch == SU || (!multiLine && (ch == CR || ch == LF))) diff --git a/test/files/neg/t5856.check b/test/files/neg/t5856.check index 8b968f173f9c..3d035a87e15c 100644 --- a/test/files/neg/t5856.check +++ b/test/files/neg/t5856.check @@ -1,9 +1,6 @@ -t5856.scala:10: error: invalid string interpolation $", expected: $$, $identifier or ${expression} - val s9 = s"$" - ^ t5856.scala:10: error: unclosed string literal val s9 = s"$" - ^ + ^ t5856.scala:2: error: error in interpolated string: identifier or block expected val s1 = s"$null" ^ @@ -28,4 +25,4 @@ t5856.scala:8: error: error in interpolated string: identifier or block expected t5856.scala:9: error: error in interpolated string: identifier or block expected val s8 = s"$super" ^ -10 errors +9 errors diff --git a/test/files/run/interpolation.check b/test/files/run/interpolation.check index 997abb449726..2ab952f46f75 100644 --- a/test/files/run/interpolation.check +++ b/test/files/run/interpolation.check @@ -30,3 +30,9 @@ Best price: 13.35 0 00 +"everybody loves escaped quotes" is a common sentiment. +hi"$" +hi"$" +hi"$" +hi"$" +hi"$" diff --git a/test/files/run/interpolation.scala b/test/files/run/interpolation.scala index 14d981934895..4dc85e9f1f56 100644 --- a/test/files/run/interpolation.scala +++ b/test/files/run/interpolation.scala @@ -29,4 +29,12 @@ object Test extends App { println(f"") println(f"${0}") println(f"${0}${0}") + + println(s"$"everybody loves escaped quotes$" is a common sentiment.") + println(f"hi$"$$$"") + println(raw"hi$"$$$"") + + println(s"""hi$"$$$"""") + println(f"""hi$"$$$"""") + println(raw"""hi$"$$$"""") } From 5126a0a97aae26738f060c74ada3fe5768fa2b21 Mon Sep 17 00:00:00 2001 From: Martijn Hoekstra Date: Thu, 19 Jul 2018 10:15:09 +0200 Subject: [PATCH 043/769] include spec updates for change --- spec/01-lexical-syntax.md | 10 ++++++---- spec/13-syntax-summary.md | 5 +++-- 2 files changed, 9 insertions(+), 6 deletions(-) diff --git a/spec/01-lexical-syntax.md b/spec/01-lexical-syntax.md index e240ef372ffa..c345935941b9 100644 --- a/spec/01-lexical-syntax.md +++ b/spec/01-lexical-syntax.md @@ -503,9 +503,10 @@ not processed, except for Unicode escapes. #### Interpolated string ```ebnf -interpolatedString ::= alphaid ‘"’ {printableChar \ (‘"’ | ‘$’) | escape} ‘"’ +interpolatedString ::= alphaid ‘"’ {printableChar \ (‘"’ | ‘$’) | escape} ‘"’ | alphaid ‘"""’ {[‘"’] [‘"’] char \ (‘"’ | ‘$’) | escape} {‘"’} ‘"""’ -escape ::= ‘$$’ +escape ::= ‘$$’ + | ‘$"’ | ‘$’ id | ‘$’ BlockExpr alphaid ::= upper idrest @@ -522,13 +523,14 @@ or multi-line (triple quote). Inside a interpolated string none of the usual escape characters are interpreted (except for unicode escapes) no matter whether the string literal is normal (enclosed in single quotes) or multi-line (enclosed in triple quotes). -Instead, there are two new forms of dollar sign escape. +Instead, there are three new forms of dollar sign escape. The most general form encloses an expression in `${` and `}`, i.e. `${expr}`. The expression enclosed in the braces that follow the leading `$` character is of syntactical category BlockExpr. Hence, it can contain multiple statements, and newlines are significant. Single ‘$’-signs are not permitted in isolation in a interpolated string. A single ‘$’-sign can still be obtained by doubling the ‘$’ -character: ‘$$’. +character: ‘$$’. A single ‘"’-sign in a single quoted interpolation would end the +interpolation. A single ‘"’-sign can be obtained by the sequence ‘\$"’. The simpler form consists of a ‘$’-sign followed by an identifier starting with a letter and followed only by letters, digits, and underscore characters, diff --git a/spec/13-syntax-summary.md b/spec/13-syntax-summary.md index 837054f5a772..442d76adb7a4 100644 --- a/spec/13-syntax-summary.md +++ b/spec/13-syntax-summary.md @@ -63,8 +63,9 @@ multiLineChars ::= {[‘"’] [‘"’] charNoDoubleQuote} {‘"’} interpolatedString ::= alphaid ‘"’ {printableChar \ (‘"’ | ‘\$’) | escape} ‘"’ | alphaid ‘"""’ {[‘"’] [‘"’] char \ (‘"’ | ‘\$’) | escape} {‘"’} ‘"""’ -escape ::= ‘\$\$’ - | ‘\$’ id +escape ::= ‘\$\$’ + | ‘\$"’ + | ‘\$’ id | ‘\$’ BlockExpr alphaid ::= upper idrest | varid From 0947ccd0b88483801fc8b986a60762fce78abfaa Mon Sep 17 00:00:00 2001 From: Eugene Yokota Date: Thu, 26 Mar 2020 18:51:04 -0400 Subject: [PATCH 044/769] Allow \" in single-quoted string interpolations Changing `"Hello, \"World\""` to `s"Hello, \"$who\""` no longer breaks. Before this change, `\"` terminated single-quoted interpolated string literals, now the string remains open. The scanner doesn't interpret the escape sequence, string interpolators can do so (`s` and `f` do). Breaking changes: - `raw"c:\"` no longer compiles, it's now an unclosed string - `raw"c:\" // uh"` used to evaluate to `"""c:\"""`, now it's `"""c:\" // uh"""` --- .../scala/tools/nsc/ast/parser/Scanners.scala | 30 ++++++++++++++----- test/files/neg/t6476.check | 4 +++ test/files/neg/t6476.scala | 9 ++++++ test/files/neg/t6476b.check | 7 +++++ test/files/neg/t6476b.scala | 8 +++++ test/files/neg/t8266-invalid-interp.check | 4 +-- test/files/neg/t8266-invalid-interp.scala | 2 +- test/files/pos/t11966.scala | 2 +- test/files/run/interpolation-repl.check | 12 ++++++++ test/files/run/interpolation-repl.scala | 9 ++++++ test/files/run/t6476.check | 13 ++++++++ test/files/run/t6476.scala | 23 ++++++++++++++ 12 files changed, 112 insertions(+), 11 deletions(-) create mode 100644 test/files/neg/t6476.check create mode 100644 test/files/neg/t6476.scala create mode 100644 test/files/neg/t6476b.check create mode 100644 test/files/neg/t6476b.scala create mode 100644 test/files/run/interpolation-repl.check create mode 100644 test/files/run/interpolation-repl.scala create mode 100644 test/files/run/t6476.check create mode 100644 test/files/run/t6476.scala diff --git a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala index a4f8efc43eea..5c165a6dfed0 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala @@ -854,7 +854,12 @@ trait Scanners extends ScannersCommon { } else unclosedStringLit() } - private def unclosedStringLit(): Unit = syntaxError("unclosed string literal") + private def unclosedStringLit(seenEscapedQuoteInInterpolation: Boolean = false): Unit = { + val note = + if (seenEscapedQuoteInInterpolation) "; note that `\\\"` no longer closes single-quoted interpolated string literals since 2.13.6, you can use a triple-quoted string instead" + else "" + syntaxError(s"unclosed string literal$note") + } private def replaceUnicodeEscapesInTriple(): Unit = if(strVal != null) { @@ -890,7 +895,8 @@ trait Scanners extends ScannersCommon { } } - @tailrec private def getStringPart(multiLine: Boolean): Unit = { + // for interpolated strings + @tailrec private def getStringPart(multiLine: Boolean, seenEscapedQuote: Boolean = false): Unit = { def finishStringPart() = { setStrVal() token = STRINGPART @@ -904,18 +910,27 @@ trait Scanners extends ScannersCommon { setStrVal() token = STRINGLIT } else - getStringPart(multiLine) + getStringPart(multiLine, seenEscapedQuote) } else { nextChar() setStrVal() token = STRINGLIT } + } else if (ch == '\\' && !multiLine) { + putChar(ch) + nextRawChar() + val q = ch == '"' + if (q || ch == '\\') { + putChar(ch) + nextRawChar() + } + getStringPart(multiLine, seenEscapedQuote || q) } else if (ch == '$') { nextRawChar() if (ch == '$' || ch == '"') { putChar(ch) nextRawChar() - getStringPart(multiLine) + getStringPart(multiLine, seenEscapedQuote) } else if (ch == '{') { finishStringPart() nextRawChar() @@ -946,13 +961,14 @@ trait Scanners extends ScannersCommon { if (isUnclosedLiteral) { if (multiLine) incompleteInputError("unclosed multi-line string literal") - else - unclosedStringLit() + else { + unclosedStringLit(seenEscapedQuote) + } } else { putChar(ch) nextRawChar() - getStringPart(multiLine) + getStringPart(multiLine, seenEscapedQuote) } } } diff --git a/test/files/neg/t6476.check b/test/files/neg/t6476.check new file mode 100644 index 000000000000..bf0c65efc6b8 --- /dev/null +++ b/test/files/neg/t6476.check @@ -0,0 +1,4 @@ +t6476.scala:8: error: unclosed string literal; note that `\"` no longer closes single-quoted interpolated string literals since 2.13.6, you can use a triple-quoted string instead + mimi"\" + ^ +1 error diff --git a/test/files/neg/t6476.scala b/test/files/neg/t6476.scala new file mode 100644 index 000000000000..9b88e43593cb --- /dev/null +++ b/test/files/neg/t6476.scala @@ -0,0 +1,9 @@ +// only the last one doesn't parse +class C { + mimi"""\ """ + mimi"""\\""" + mimi"""\""" + mimi"\ " + mimi"\\" + mimi"\" +} diff --git a/test/files/neg/t6476b.check b/test/files/neg/t6476b.check new file mode 100644 index 000000000000..e6aa3e441214 --- /dev/null +++ b/test/files/neg/t6476b.check @@ -0,0 +1,7 @@ +t6476b.scala:2: error: invalid escape at terminal index 0 in "\". Use \\ for literal \. + val sa = s"""\""" + ^ +t6476b.scala:4: error: invalid escape '\ ' not one of [\b, \t, \n, \f, \r, \\, \", \', \uxxxx] at index 0 in "\ ". Use \\ for literal \. + val sc = s"""\ """ + ^ +2 errors diff --git a/test/files/neg/t6476b.scala b/test/files/neg/t6476b.scala new file mode 100644 index 000000000000..d601091972ce --- /dev/null +++ b/test/files/neg/t6476b.scala @@ -0,0 +1,8 @@ +class C { + val sa = s"""\""" + val sb = s"""\\""" + val sc = s"""\ """ + val ra = raw"""\""" + val rb = raw"""\\""" + val rc = raw"""\ """ +} diff --git a/test/files/neg/t8266-invalid-interp.check b/test/files/neg/t8266-invalid-interp.check index 0f55ef3eaf42..bdfcd97d6039 100644 --- a/test/files/neg/t8266-invalid-interp.check +++ b/test/files/neg/t8266-invalid-interp.check @@ -1,6 +1,6 @@ t8266-invalid-interp.scala:4: error: Trailing '\' escapes nothing. - f"a\", - ^ + f"""a\""", + ^ t8266-invalid-interp.scala:5: error: invalid escape '\x' not one of [\b, \t, \n, \f, \r, \\, \", \', \uxxxx] at index 1 in "a\xc". Use \\ for literal \. f"a\xc", ^ diff --git a/test/files/neg/t8266-invalid-interp.scala b/test/files/neg/t8266-invalid-interp.scala index 4b26546880a3..87579a68691b 100644 --- a/test/files/neg/t8266-invalid-interp.scala +++ b/test/files/neg/t8266-invalid-interp.scala @@ -1,7 +1,7 @@ trait X { def f = Seq( - f"a\", + f"""a\""", f"a\xc", // following could suggest \u000b for vertical tab, similar for \a alert f"a\vc" diff --git a/test/files/pos/t11966.scala b/test/files/pos/t11966.scala index 2e9632a34869..b662e71322da 100644 --- a/test/files/pos/t11966.scala +++ b/test/files/pos/t11966.scala @@ -3,5 +3,5 @@ object Test { val original = """\/ \/ /\""" val minimal = """\1234\""" - val alternative = raw"\1234\" + val alternative = raw"""\1234\""" } \ No newline at end of file diff --git a/test/files/run/interpolation-repl.check b/test/files/run/interpolation-repl.check new file mode 100644 index 000000000000..c6e246c806b1 --- /dev/null +++ b/test/files/run/interpolation-repl.check @@ -0,0 +1,12 @@ + +scala> raw"\"" +val res0: String = \" + +scala> raw"\" // this used to be a comment, but after scala/pull#8830 it's part of the string! " +val res1: String = "\" // this used to be a comment, but after scala/pull#8830 it's part of the string! " + +scala> raw"\" // this used to compile, now it's unclosed + ^ + error: unclosed string literal; note that `\"` no longer closes single-quoted interpolated string literals since 2.13.6, you can use a triple-quoted string instead + +scala> :quit diff --git a/test/files/run/interpolation-repl.scala b/test/files/run/interpolation-repl.scala new file mode 100644 index 000000000000..ba84178ce92c --- /dev/null +++ b/test/files/run/interpolation-repl.scala @@ -0,0 +1,9 @@ +import scala.tools.partest.ReplTest + +object Test extends ReplTest { + def code = """ +raw"\"" +raw"\" // this used to be a comment, but after scala/pull#8830 it's part of the string! " +raw"\" // this used to compile, now it's unclosed +""" +} diff --git a/test/files/run/t6476.check b/test/files/run/t6476.check new file mode 100644 index 000000000000..b7be3ae88a91 --- /dev/null +++ b/test/files/run/t6476.check @@ -0,0 +1,13 @@ +"Hello", Alice +"Hello", Alice +"Hello", Alice +"Hello", Alice +\"Hello\", Alice +\"Hello\", Alice +\TILT\ +\TILT\ +\\TILT\\ +\TILT\ +\TILT\ +\\TILT\\ +\TILT\ diff --git a/test/files/run/t6476.scala b/test/files/run/t6476.scala new file mode 100644 index 000000000000..a04645065a2a --- /dev/null +++ b/test/files/run/t6476.scala @@ -0,0 +1,23 @@ +object Test { + def main(args: Array[String]): Unit = { + val person = "Alice" + println(s"\"Hello\", $person") + println(s"""\"Hello\", $person""") + + println(f"\"Hello\", $person") + println(f"""\"Hello\", $person""") + + println(raw"\"Hello\", $person") + println(raw"""\"Hello\", $person""") + + println(s"\\TILT\\") + println(f"\\TILT\\") + println(raw"\\TILT\\") + + println(s"""\\TILT\\""") + println(f"""\\TILT\\""") + println(raw"""\\TILT\\""") + + println(raw"""\TILT\""") + } +} From 62f515d0d9a4c82c4cf681035a0ee73e918c2cf5 Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Mon, 8 Mar 2021 16:09:07 +0100 Subject: [PATCH 045/769] Spec for \" in interpolated strings Also, unicode escapes are no longer interpreted in interpolated strings. Interpolators can still interpret them, but that's not in the spec. --- spec/01-lexical-syntax.md | 28 +++++++++++++++------------- spec/13-syntax-summary.md | 6 ++++-- 2 files changed, 19 insertions(+), 15 deletions(-) diff --git a/spec/01-lexical-syntax.md b/spec/01-lexical-syntax.md index c345935941b9..718950b171a1 100644 --- a/spec/01-lexical-syntax.md +++ b/spec/01-lexical-syntax.md @@ -463,7 +463,7 @@ arbitrary, except that it may contain three or more consecutive quote characters only at the very end. Characters must not necessarily be printable; newlines or other control characters are also permitted. [Escape sequences](#escape-sequences) are -not processed, except for Unicode escapes. +not processed, except for Unicode escapes (this is deprecated since 2.13.2). > ```scala > """the present string @@ -503,8 +503,9 @@ not processed, except for Unicode escapes. #### Interpolated string ```ebnf -interpolatedString ::= alphaid ‘"’ {printableChar \ (‘"’ | ‘$’) | escape} ‘"’ - | alphaid ‘"""’ {[‘"’] [‘"’] char \ (‘"’ | ‘$’) | escape} {‘"’} ‘"""’ +interpolatedString ::= alphaid ‘"’ {[‘\’] interpolatedStringPart | ‘\\’ | ‘\"’} ‘"’ + | alphaid ‘"""’ {[‘"’] [‘"’] char \ (‘"’ | ‘$’) | escape} {‘"’} ‘"""’ +interpolatedStringPart ::= printableChar \ (‘"’ | ‘$’ | ‘\’) | escape escape ::= ‘$$’ | ‘$"’ | ‘$’ id @@ -514,23 +515,24 @@ alphaid ::= upper idrest ``` -Interpolated string consist of an identifier starting with a letter immediately +An interpolated string consists of an identifier starting with a letter immediately followed by a string literal. There may be no whitespace characters or comments -between the leading identifier and the opening quote ‘”’ of the string. -The string literal in a interpolated string can be standard (single quote) +between the leading identifier and the opening quote `"` of the string. +The string literal in an interpolated string can be standard (single quote) or multi-line (triple quote). -Inside a interpolated string none of the usual escape characters are interpreted -(except for unicode escapes) no matter whether the string literal is normal -(enclosed in single quotes) or multi-line (enclosed in triple quotes). -Instead, there are three new forms of dollar sign escape. +Inside an interpolated string none of the usual escape characters are interpreted +no matter whether the string literal is normal (enclosed in single quotes) or +multi-line (enclosed in triple quotes). Note that the sequence `\"` does not +close a normal string literal (enclosed in single quotes). + +There are three forms of dollar sign escape. The most general form encloses an expression in `${` and `}`, i.e. `${expr}`. The expression enclosed in the braces that follow the leading `$` character is of syntactical category BlockExpr. Hence, it can contain multiple statements, and newlines are significant. Single ‘$’-signs are not permitted in isolation -in a interpolated string. A single ‘$’-sign can still be obtained by doubling the ‘$’ -character: ‘$$’. A single ‘"’-sign in a single quoted interpolation would end the -interpolation. A single ‘"’-sign can be obtained by the sequence ‘\$"’. +in an interpolated string. A single ‘$’-sign can still be obtained by doubling the ‘$’ +character: ‘$$’. A single ‘"’-sign can be obtained by the sequence ‘\$"’. The simpler form consists of a ‘$’-sign followed by an identifier starting with a letter and followed only by letters, digits, and underscore characters, diff --git a/spec/13-syntax-summary.md b/spec/13-syntax-summary.md index 442d76adb7a4..aec631beb45f 100644 --- a/spec/13-syntax-summary.md +++ b/spec/13-syntax-summary.md @@ -60,9 +60,11 @@ stringElement ::= charNoDoubleQuoteOrNewline | escapeSeq multiLineChars ::= {[‘"’] [‘"’] charNoDoubleQuote} {‘"’} -interpolatedString - ::= alphaid ‘"’ {printableChar \ (‘"’ | ‘\$’) | escape} ‘"’ +interpolatedString + ::= alphaid ‘"’ {[‘\’] interpolatedStringPart | ‘\\’ | ‘\"’} ‘"’ | alphaid ‘"""’ {[‘"’] [‘"’] char \ (‘"’ | ‘\$’) | escape} {‘"’} ‘"""’ +interpolatedStringPart + ::= printableChar \ (‘"’ | ‘$’ | ‘\’) | escape escape ::= ‘\$\$’ | ‘\$"’ | ‘\$’ id From 024daf2b6e6c2c62bb328eba33858cc5311fb8e9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Luis=20Miguel=20Mej=C3=ADa=20Su=C3=A1rez?= Date: Sat, 21 Nov 2020 18:01:36 -0500 Subject: [PATCH 046/769] Make more annotations extend ConstantAnnotation (now that it's possible to do so, after #9463) In this case of e.g. `implicitNotFound`, this makes it clearer that the custom error message must be a literal value. Fixes #10424 Co-authored-by: Seth Tisue --- project/MimaFilters.scala | 10 +++++++++- src/library/scala/annotation/elidable.scala | 2 +- src/library/scala/annotation/implicitAmbiguous.scala | 2 +- src/library/scala/annotation/implicitNotFound.scala | 2 +- src/library/scala/annotation/migration.scala | 2 +- src/library/scala/deprecated.scala | 2 +- src/library/scala/deprecatedInheritance.scala | 2 +- src/library/scala/deprecatedName.scala | 3 ++- src/library/scala/deprecatedOverriding.scala | 2 +- test/files/run/t5225_2.check | 2 +- 10 files changed, 19 insertions(+), 10 deletions(-) diff --git a/project/MimaFilters.scala b/project/MimaFilters.scala index d0313ad8a3ff..71d9d7c65c03 100644 --- a/project/MimaFilters.scala +++ b/project/MimaFilters.scala @@ -17,7 +17,7 @@ object MimaFilters extends AutoPlugin { ) val mimaFilters: Seq[ProblemFilter] = Seq[ProblemFilter]( - // KEEP: we don't the reflect internal API isn't public API + // KEEP: the reflect internal API isn't public API ProblemFilters.exclude[Problem]("scala.reflect.internal.*"), // KEEP: java.util.Enumeration.asIterator only exists in later JDK versions (11 at least). If you build @@ -25,6 +25,14 @@ object MimaFilters extends AutoPlugin { // don't publish the artifact built with JDK 11 anyways ProblemFilters.exclude[DirectMissingMethodProblem]("scala.collection.convert.JavaCollectionWrappers#IteratorWrapper.asIterator"), + // PR: https://github.com/scala/scala/pull/9336; remove after re-STARR + ProblemFilters.exclude[MissingTypesProblem]("scala.deprecatedOverriding"), + ProblemFilters.exclude[MissingTypesProblem]("scala.deprecatedInheritance"), + ProblemFilters.exclude[MissingTypesProblem]("scala.deprecated"), + ProblemFilters.exclude[MissingTypesProblem]("scala.annotation.elidable"), + ProblemFilters.exclude[MissingTypesProblem]("scala.annotation.implicitAmbiguous"), + ProblemFilters.exclude[MissingTypesProblem]("scala.annotation.implicitNotFound"), + ProblemFilters.exclude[MissingTypesProblem]("scala.annotation.migration"), ) override val buildSettings = Seq( diff --git a/src/library/scala/annotation/elidable.scala b/src/library/scala/annotation/elidable.scala index 7f8db33d9c4b..9d15449fac18 100644 --- a/src/library/scala/annotation/elidable.scala +++ b/src/library/scala/annotation/elidable.scala @@ -76,7 +76,7 @@ package scala.annotation * } * }}} */ -final class elidable(final val level: Int) extends scala.annotation.StaticAnnotation +final class elidable(final val level: Int) extends scala.annotation.ConstantAnnotation /** This useless appearing code was necessary to allow people to use * named constants for the elidable annotation. This is what it takes diff --git a/src/library/scala/annotation/implicitAmbiguous.scala b/src/library/scala/annotation/implicitAmbiguous.scala index dbe8d2ab936d..87788588c5af 100644 --- a/src/library/scala/annotation/implicitAmbiguous.scala +++ b/src/library/scala/annotation/implicitAmbiguous.scala @@ -39,4 +39,4 @@ package scala.annotation * }}} */ @meta.getter -final class implicitAmbiguous(msg: String) extends scala.annotation.StaticAnnotation +final class implicitAmbiguous(msg: String) extends scala.annotation.ConstantAnnotation diff --git a/src/library/scala/annotation/implicitNotFound.scala b/src/library/scala/annotation/implicitNotFound.scala index e3833bcd428b..9eba5c2c9f3a 100644 --- a/src/library/scala/annotation/implicitNotFound.scala +++ b/src/library/scala/annotation/implicitNotFound.scala @@ -53,4 +53,4 @@ package scala.annotation * ^ * */ -final class implicitNotFound(msg: String) extends scala.annotation.StaticAnnotation {} +final class implicitNotFound(msg: String) extends scala.annotation.ConstantAnnotation diff --git a/src/library/scala/annotation/migration.scala b/src/library/scala/annotation/migration.scala index 99e6dc253bbc..37b2a9edfda0 100644 --- a/src/library/scala/annotation/migration.scala +++ b/src/library/scala/annotation/migration.scala @@ -27,4 +27,4 @@ package scala.annotation * @param changedIn The version, in which the behaviour change was * introduced. */ -private[scala] final class migration(message: String, changedIn: String) extends scala.annotation.StaticAnnotation +private[scala] final class migration(message: String, changedIn: String) extends scala.annotation.ConstantAnnotation diff --git a/src/library/scala/deprecated.scala b/src/library/scala/deprecated.scala index 0c22f549afb6..1459cd819220 100644 --- a/src/library/scala/deprecated.scala +++ b/src/library/scala/deprecated.scala @@ -58,4 +58,4 @@ import scala.annotation.meta._ */ @getter @setter @beanGetter @beanSetter @field @deprecatedInheritance("Scheduled for being final in the future", "2.13.0") -class deprecated(message: String = "", since: String = "") extends scala.annotation.StaticAnnotation +class deprecated(message: String = "", since: String = "") extends scala.annotation.ConstantAnnotation diff --git a/src/library/scala/deprecatedInheritance.scala b/src/library/scala/deprecatedInheritance.scala index 14ccdeabc340..21e3932d97df 100644 --- a/src/library/scala/deprecatedInheritance.scala +++ b/src/library/scala/deprecatedInheritance.scala @@ -47,4 +47,4 @@ import scala.annotation.meta._ * @see [[scala.deprecatedName]] */ @getter @setter @beanGetter @beanSetter -final class deprecatedInheritance(message: String = "", since: String = "") extends scala.annotation.StaticAnnotation +final class deprecatedInheritance(message: String = "", since: String = "") extends scala.annotation.ConstantAnnotation diff --git a/src/library/scala/deprecatedName.scala b/src/library/scala/deprecatedName.scala index 24b9ac4e6ad0..ee5eafd69b9b 100644 --- a/src/library/scala/deprecatedName.scala +++ b/src/library/scala/deprecatedName.scala @@ -14,7 +14,6 @@ package scala import scala.annotation.meta._ - /** An annotation that designates that the name of a parameter is deprecated. * * Using this name in a named argument generates a deprecation warning. @@ -43,6 +42,8 @@ import scala.annotation.meta._ @param @deprecatedInheritance("Scheduled for being final in the future", "2.13.0") class deprecatedName(name: String = "", since: String = "") extends scala.annotation.StaticAnnotation { + // at the time we remove these constructors, we should also change this from a StaticAnnotation to + // a ConstantAnnotation; for now, the presence of auxiliary constructors blocks that change @deprecated("The parameter name should be a String, not a symbol.", "2.13.0") def this(name: Symbol, since: String) = this(name.name, since) @deprecated("The parameter name should be a String, not a symbol.", "2.13.0") def this(name: Symbol) = this(name.name, "") } diff --git a/src/library/scala/deprecatedOverriding.scala b/src/library/scala/deprecatedOverriding.scala index d88f29e53a1c..b6c75819785a 100644 --- a/src/library/scala/deprecatedOverriding.scala +++ b/src/library/scala/deprecatedOverriding.scala @@ -49,4 +49,4 @@ import scala.annotation.meta._ */ @getter @setter @beanGetter @beanSetter @deprecatedInheritance("Scheduled for being final in the future", "2.13.0") -class deprecatedOverriding(message: String = "", since: String = "") extends scala.annotation.StaticAnnotation +class deprecatedOverriding(message: String = "", since: String = "") extends scala.annotation.ConstantAnnotation diff --git a/test/files/run/t5225_2.check b/test/files/run/t5225_2.check index 477ea4eb6d41..1333b31b2347 100644 --- a/test/files/run/t5225_2.check +++ b/test/files/run/t5225_2.check @@ -1,4 +1,4 @@ { - def foo(@new elidable(0) x: Int) = ""; + def foo(@new elidable(level = 0) x: Int) = ""; () } From ae7519ba430d54bab2f41a8894e9e36318ebdbeb Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Wed, 17 Mar 2021 22:47:17 -0700 Subject: [PATCH 047/769] Typo in test --- test/files/neg/t2509-3.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/files/neg/t2509-3.scala b/test/files/neg/t2509-3.scala index c141066a94ad..619be4e439b2 100644 --- a/test/files/neg/t2509-3.scala +++ b/test/files/neg/t2509-3.scala @@ -17,7 +17,7 @@ object ZA extends Z[A] { } object XB extends X[B] { - def y(b: B) = new Y { def value = s"S{b.getClass}: BValue" } + def y(b: B) = new Y { def value = s"${b.getClass}: BValue" } } object Test { From 6ea02061738830ee1b5630c87a1212bc5b0ffe56 Mon Sep 17 00:00:00 2001 From: Seth Tisue Date: Mon, 22 Mar 2021 12:23:01 -0700 Subject: [PATCH 048/769] in user-facing contexts, call it REPL not interpreter --- doc/README | 2 +- src/manual/scala/man1/scalac.scala | 2 +- src/manual/scala/man1/scaladoc.scala | 2 +- src/partest/scala/tools/partest/nest/RunnerSpec.scala | 2 +- .../scala/tools/nsc/interpreter/shell/ILoop.scala | 8 ++++---- test/files/run/repl-no-imports-no-predef.check | 2 +- test/files/run/repl-reset.check | 2 +- test/files/run/t7747-repl.check | 2 +- 8 files changed, 11 insertions(+), 11 deletions(-) diff --git a/doc/README b/doc/README index 3361044f73d4..f7d3d44ab721 100644 --- a/doc/README +++ b/doc/README @@ -9,7 +9,7 @@ We welcome contributions at https://github.com/scala/scala! Scala Tools ----------- -- scala Scala interactive interpreter +- scala Scala REPL (interactive shell) - scalac Scala compiler - fsc Scala resident compiler - scaladoc Scala API documentation generator diff --git a/src/manual/scala/man1/scalac.scala b/src/manual/scala/man1/scalac.scala index 337d608f5fcf..b4a83e3cbf37 100644 --- a/src/manual/scala/man1/scalac.scala +++ b/src/manual/scala/man1/scalac.scala @@ -94,7 +94,7 @@ object scalac extends Command { "Specify character encoding used by source files.", "The default value is platform-specific (Linux: " & Mono("\"UTF8\"") & ", Windows: " & Mono("\"Cp1252\"") & "). Executing the following " & - "code in the Scala interpreter will return the default value " & + "code in the Scala REPL will return the default value " & "on your system:", MBold(" scala> ") & Mono("new java.io.InputStreamReader(System.in).getEncoding"))), diff --git a/src/manual/scala/man1/scaladoc.scala b/src/manual/scala/man1/scaladoc.scala index e742c64cbd8c..675bb4ec01c1 100644 --- a/src/manual/scala/man1/scaladoc.scala +++ b/src/manual/scala/man1/scaladoc.scala @@ -124,7 +124,7 @@ object scaladoc extends Command { "Specify character encoding used by source files.", "The default value is platform-specific (Linux: " & Mono("\"UTF8\"") & ", Windows: " & Mono("\"Cp1252\"") & "). Executing the following " & - "code in the Scala interpreter will return the default value " & + "code in the Scala REPL will return the default value " & "on your system:", MBold(" scala> ") & Mono("new java.io.InputStreamReader(System.in).getEncoding")))))) diff --git a/src/partest/scala/tools/partest/nest/RunnerSpec.scala b/src/partest/scala/tools/partest/nest/RunnerSpec.scala index a83eaa209999..80c1bae94c02 100644 --- a/src/partest/scala/tools/partest/nest/RunnerSpec.scala +++ b/src/partest/scala/tools/partest/nest/RunnerSpec.scala @@ -25,7 +25,7 @@ trait RunnerSpec extends Spec with Meta.StdOpts with Interpolation { heading("Test categories:") val optPos = "pos" / "run compilation tests (success)" --? val optNeg = "neg" / "run compilation tests (failure)" --? - val optRun = "run" / "run interpreter and backend tests" --? + val optRun = "run" / "run REPL and backend tests" --? val optJvm = "jvm" / "run JVM backend tests" --? val optRes = "res" / "run resident compiler tests" --? val optScalap = "scalap" / "run scalap tests" --? diff --git a/src/repl-frontend/scala/tools/nsc/interpreter/shell/ILoop.scala b/src/repl-frontend/scala/tools/nsc/interpreter/shell/ILoop.scala index 202e36b25450..aece63c03b50 100644 --- a/src/repl-frontend/scala/tools/nsc/interpreter/shell/ILoop.scala +++ b/src/repl-frontend/scala/tools/nsc/interpreter/shell/ILoop.scala @@ -200,10 +200,10 @@ class ILoop(config: ShellConfig, inOverride: BufferedReader = null, cmd("load", "", "interpret lines in a file", loadCommand, fileCompletion), cmd("paste", "[-raw] [path]", "enter paste mode or paste a file", pasteCommand, fileCompletion), nullary("power", "enable power user mode", () => powerCmd()), - nullary("quit", "exit the interpreter", () => Result(keepRunning = false, None)), - cmd("replay", "[options]", "reset the repl and replay all previous commands", replayCommand, settingsCompletion), + nullary("quit", "exit the REPL", () => Result(keepRunning = false, None)), + cmd("replay", "[options]", "reset the REPL and replay all previous commands", replayCommand, settingsCompletion), cmd("require", "", "add a jar to the classpath", require), - cmd("reset", "[options]", "reset the repl to its initial state, forgetting all session entries", resetCommand, settingsCompletion), + cmd("reset", "[options]", "reset the REPL to its initial state, forgetting all session entries", resetCommand, settingsCompletion), cmd("save", "", "save replayable session to a file", saveCommand, fileCompletion), shCommand, cmd("settings", "", "update compiler options, if possible; see reset", changeSettings, settingsCompletion), @@ -512,7 +512,7 @@ class ILoop(config: ShellConfig, inOverride: BufferedReader = null, */ def resetCommand(line: String): Unit = { def run(destructive: Boolean): Unit = { - echo("Resetting interpreter state.") + echo("Resetting REPL state.") if (replayCommandStack.nonEmpty) { echo("Forgetting this session history:\n") replayCommands foreach echo diff --git a/test/files/run/repl-no-imports-no-predef.check b/test/files/run/repl-no-imports-no-predef.check index 01751daa0f5f..380a21a41ff7 100644 --- a/test/files/run/repl-no-imports-no-predef.check +++ b/test/files/run/repl-no-imports-no-predef.check @@ -251,7 +251,7 @@ scala> x1 + x2 + x3 val res35: Int = 6 scala> :reset -Resetting interpreter state. +Resetting REPL state. Forgetting this session history: 1 diff --git a/test/files/run/repl-reset.check b/test/files/run/repl-reset.check index 21acb4a8ebf1..d9541c01cc01 100644 --- a/test/files/run/repl-reset.check +++ b/test/files/run/repl-reset.check @@ -15,7 +15,7 @@ scala> x1 + x2 + x3 val res0: Int = 6 scala> :reset -Resetting interpreter state. +Resetting REPL state. Forgetting this session history: val x1 = 1 diff --git a/test/files/run/t7747-repl.check b/test/files/run/t7747-repl.check index ba77d518fc7d..9691e4db43d4 100644 --- a/test/files/run/t7747-repl.check +++ b/test/files/run/t7747-repl.check @@ -153,7 +153,7 @@ scala> x1 + x2 + x3 val res23: Int = 6 scala> :reset -Resetting interpreter state. +Resetting REPL state. Forgetting this session history: var x = 10 From a7be7063e308fcdb06fb65e8b47098f055da401c Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Wed, 10 Feb 2021 22:06:03 +0100 Subject: [PATCH 049/769] Don't pickle `@nowarn` annotations... ...by special-casing them. In principle we have `extends Annotation` vs `extends StaticAnnotation` for that, but sadly `ClassfileAnnotation extends StaticAnnotation`, so we don't get to choose for those :-/ Backport of 522a5c6e08 --- src/reflect/scala/reflect/internal/AnnotationInfos.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/reflect/scala/reflect/internal/AnnotationInfos.scala b/src/reflect/scala/reflect/internal/AnnotationInfos.scala index 29b5e21e145a..79f09e67306d 100644 --- a/src/reflect/scala/reflect/internal/AnnotationInfos.scala +++ b/src/reflect/scala/reflect/internal/AnnotationInfos.scala @@ -331,7 +331,7 @@ trait AnnotationInfos extends api.Annotations { self: SymbolTable => /** Check whether the type or any of the arguments are erroneous */ def isErroneous = atp.isErroneous || args.exists(_.isErroneous) - def isStatic = symbol isNonBottomSubClass StaticAnnotationClass + def isStatic = symbol.isNonBottomSubClass(StaticAnnotationClass) && symbol != NowarnClass /** Check whether any of the arguments mention a symbol */ def refsSymbol(sym: Symbol) = hasArgWhich(_.symbol == sym) From f758d79e690b4d6096b80e7a333777419da625e3 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Wed, 24 Mar 2021 10:31:29 +1000 Subject: [PATCH 050/769] Avoid classfile parsing of specialized variants just to unlink them Since 8ae0fdab, the specializion phase eagerly info transforms all of FunctionN and TupleN. This was done to let us turn off needless specialization info transforms (which incurs classfile parsing up the base classes looking for @specialized annotations) of types after the specialization tree transform is done. However, in combination with an old fix for scala/bug#5545, we end up parsing all of the class files of all the variants, just to unlink them in favour of the info-transformed types. I note that the test for scala/bug#5545 no longer crashes if the fix is removed. I have have not investigated the reason. This commit reworks the scala/bug#5545 to just unlink the stale symbols directly, rather than calling `.info` to parse them and do the same after noticing the ScalaRaw attribute. --- .../tools/nsc/transform/SpecializeTypes.scala | 14 +++++++++++--- 1 file changed, 11 insertions(+), 3 deletions(-) diff --git a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala index 7e0b0af64406..a5c117af8486 100644 --- a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala +++ b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala @@ -603,14 +603,22 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { * specialized subclass of "clazz" throughout this file. */ + val clazzName = specializedName(clazz, env0).toTypeName // scala/bug#5545: Eliminate classes with the same name loaded from the bytecode already present - all we need to do is // to force .info on them, as their lazy type will be evaluated and the symbols will be eliminated. Unfortunately // evaluating the info after creating the specialized class will mess the specialized class signature, so we'd - // better evaluate it before creating the new class symbol - val clazzName = specializedName(clazz, env0).toTypeName + // better unlink the the class-file backed symbol before creating the new class symbol val bytecodeClazz = clazz.owner.info.decl(clazzName) // debuglog("Specializing " + clazz + ", but found " + bytecodeClazz + " already there") - bytecodeClazz.info + def unlink(sym: Symbol): Unit = if (sym != NoSymbol) { + devWarningIf(sym.hasCompleteInfo)("Stale specialized symbol has been accessed: " + sym) + sym.setInfo(NoType) + sym.owner.info.decls.unlink(sym) + } + unlink(bytecodeClazz) + val companionModule = bytecodeClazz.companionModule + unlink(companionModule.moduleClass) + unlink(companionModule) val sClass = clazz.owner.newClass(clazzName, clazz.pos, (clazz.flags | SPECIALIZED) & ~CASE) sClass.setAnnotations(clazz.annotations) // scala/bug#8574 important that the subclass picks up @SerialVersionUID, @strictfp, etc. From d78040a3944cef4cac96c4c7043b89ee4da99cae Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E6=A2=A6=E5=A2=83=E8=BF=B7=E7=A6=BB?= Date: Fri, 19 Mar 2021 10:51:48 +0800 Subject: [PATCH 051/769] Fixes scala/bug#12201 --- .../scala/tools/nsc/transform/CleanUp.scala | 7 +++++ .../scala/reflect/internal/Definitions.scala | 12 ++++++++ test/files/instrumented/t12201.check | 3 ++ test/files/instrumented/t12201.scala | 29 +++++++++++++++++++ 4 files changed, 51 insertions(+) create mode 100644 test/files/instrumented/t12201.check create mode 100644 test/files/instrumented/t12201.scala diff --git a/src/compiler/scala/tools/nsc/transform/CleanUp.scala b/src/compiler/scala/tools/nsc/transform/CleanUp.scala index cbb403ddc465..aaec0a0a314b 100644 --- a/src/compiler/scala/tools/nsc/transform/CleanUp.scala +++ b/src/compiler/scala/tools/nsc/transform/CleanUp.scala @@ -620,6 +620,13 @@ abstract class CleanUp extends Statics with Transform with ast.TreeDSL { case Apply(appMeth @ Select(appMethQual, _), elem0 :: Apply(wrapArrayMeth, (rest @ ArrayValue(elemtpt, _)) :: Nil) :: Nil) if wrapArrayMeth.symbol == wrapVarargsArrayMethod(elemtpt.tpe) && appMeth.symbol == ArrayModule_apply(elemtpt.tpe) && treeInfo.isQualifierSafeToElide(appMethQual) => treeCopy.ArrayValue(rest, rest.elemtpt, elem0 :: rest.elems).transform(this) + // See scala/bug#12201, should be rewrite as Primitive Array. + // Match Array + case Apply(appMeth @ Select(appMethQual, _), Apply(wrapRefArrayMeth, StripCast(ArrayValue(elemtpt, elems)) :: Nil) :: _ :: Nil) + if appMeth.symbol == ArrayModule_genericApply && treeInfo.isQualifierSafeToElide(appMethQual) && currentRun.runDefinitions.primitiveWrapArrayMethod.contains(wrapRefArrayMeth.symbol) => + localTyper.typedPos(elemtpt.pos) { + ArrayValue(TypeTree(elemtpt.tpe), elems) + } transform this case Apply(appMeth @ Select(appMethQual, _), elem :: (nil: RefTree) :: Nil) if nil.symbol == NilModule && appMeth.symbol == ArrayModule_apply(elem.tpe.widen) && treeInfo.isExprSafeToInline(nil) && treeInfo.isQualifierSafeToElide(appMethQual) => localTyper.typedPos(elem.pos) { diff --git a/src/reflect/scala/reflect/internal/Definitions.scala b/src/reflect/scala/reflect/internal/Definitions.scala index 56e7445a0cad..1727c94fe8d3 100644 --- a/src/reflect/scala/reflect/internal/Definitions.scala +++ b/src/reflect/scala/reflect/internal/Definitions.scala @@ -1784,6 +1784,18 @@ trait Definitions extends api.StandardDefinitions { lazy val arrayClassMethod = getMemberMethod(ScalaRunTimeModule, nme.arrayClass) lazy val wrapVarargsRefArrayMethod = getMemberMethod(ScalaRunTimeModule, nme.wrapRefArray) lazy val genericWrapVarargsRefArrayMethod = getMemberMethod(ScalaRunTimeModule, nme.genericWrapArray) + lazy val primitiveWrapArrayMethod = Seq[Symbol]( + getMemberMethod(ScalaRunTimeModule, nme.wrapBooleanArray), + getMemberMethod(ScalaRunTimeModule, nme.wrapByteArray), + getMemberMethod(ScalaRunTimeModule, nme.wrapCharArray), + getMemberMethod(ScalaRunTimeModule, nme.wrapIntArray), + getMemberMethod(ScalaRunTimeModule, nme.wrapDoubleArray), + getMemberMethod(ScalaRunTimeModule, nme.wrapFloatArray), + getMemberMethod(ScalaRunTimeModule, nme.wrapLongArray), + getMemberMethod(ScalaRunTimeModule, nme.wrapShortArray), + getMemberMethod(ScalaRunTimeModule, nme.wrapUnitArray) + ) + lazy val RuntimeStatics_ioobe = getMemberMethod(RuntimeStaticsModule, nme.ioobe) diff --git a/test/files/instrumented/t12201.check b/test/files/instrumented/t12201.check new file mode 100644 index 000000000000..ba4c268ba7ac --- /dev/null +++ b/test/files/instrumented/t12201.check @@ -0,0 +1,3 @@ +Method call statistics: + 1 scala/runtime/BoxedUnit.()V + 1 scala/runtime/BoxedUnit.()V diff --git a/test/files/instrumented/t12201.scala b/test/files/instrumented/t12201.scala new file mode 100644 index 000000000000..a5a1d1860bdb --- /dev/null +++ b/test/files/instrumented/t12201.scala @@ -0,0 +1,29 @@ +import scala.tools.partest.instrumented.Instrumentation._ + +object Test { + def main(args: Array[String]): Unit = { + startProfiling() + + // to optimized + val x = Array[Double](1) + val y = Array[Double](1.0) + + // Currently correctly optimized + val i = Array(1.0) + val j: Array[Double] = Array(1) + + //others case + val a: Array[Double] = Array[Double](1.0) + val b: Array[Double] = Array[Double](1) + val c: Array[Double] = Array[Double](1: Double) + val d: Array[Double] = Array(1: Double) + val e = Array(1: Double) + val f = Array(1: Int) + val g = Array[Int](1) + val h = Array(1) + val k = Array[Unit](()) + + stopProfiling() + printStatistics() + } +} From 51c0eb1d8da327d2091b3663c06f7786b20e0c9e Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Wed, 24 Mar 2021 16:26:24 +0100 Subject: [PATCH 052/769] fix #12357: enter late all inline methods from TASTy The issue here was that in order to replace a Scala 3 macro with a matching Scala 2 macro we have to wait until we have seen all definitions in the scope - before this PR, only Scala 3 macros were considered for eviction, now all inline methods are. --- .../tools/nsc/tasty/bridge/ContextOps.scala | 2 +- .../scala/tools/nsc/tasty/bridge/SymbolOps.scala | 1 - .../scala/tools/nsc/tasty/bridge/TreeOps.scala | 1 + test/tasty/neg/src-2/TestHello.check | 5 ++++- test/tasty/neg/src-2/TestHello_2.check | 7 +++++++ test/tasty/neg/src-2/TestHello_2_fail.scala | 6 ++++++ test/tasty/neg/src-2/TestHello_fail.scala | 1 + test/tasty/neg/src-3/HelloWorld.scala | 3 ++- .../run/src-2/tastytest/TestInlineCompat.scala | 7 +++++++ .../run/src-2/tastytest/TestInlineCompat2.scala | 7 +++++++ .../tasty/run/src-3/tastytest/InlineCompat.scala | 16 ++++++++++++++++ .../run/src-3/tastytest/InlineCompat2.scala | 16 ++++++++++++++++ 12 files changed, 68 insertions(+), 4 deletions(-) create mode 100644 test/tasty/neg/src-2/TestHello_2.check create mode 100644 test/tasty/neg/src-2/TestHello_2_fail.scala create mode 100644 test/tasty/run/src-2/tastytest/TestInlineCompat.scala create mode 100644 test/tasty/run/src-2/tastytest/TestInlineCompat2.scala create mode 100644 test/tasty/run/src-3/tastytest/InlineCompat.scala create mode 100644 test/tasty/run/src-3/tastytest/InlineCompat2.scala diff --git a/src/compiler/scala/tools/nsc/tasty/bridge/ContextOps.scala b/src/compiler/scala/tools/nsc/tasty/bridge/ContextOps.scala index 7ceb9c3a082e..de66f846786e 100644 --- a/src/compiler/scala/tools/nsc/tasty/bridge/ContextOps.scala +++ b/src/compiler/scala/tools/nsc/tasty/bridge/ContextOps.scala @@ -146,7 +146,7 @@ trait ContextOps { self: TastyUniverse => final def ignoreAnnotations: Boolean = u.settings.YtastyNoAnnotations final def verboseDebug: Boolean = u.settings.debug - def requiresLatentEntry(decl: Symbol): Boolean = decl.isScala3Macro + def requiresLatentEntry(decl: Symbol): Boolean = decl.isScala3Inline def neverEntered(decl: Symbol): Boolean = decl.isPureMixinCtor def canEnterOverload(decl: Symbol): Boolean = { diff --git a/src/compiler/scala/tools/nsc/tasty/bridge/SymbolOps.scala b/src/compiler/scala/tools/nsc/tasty/bridge/SymbolOps.scala index 3127ede4df3f..2dccefa5a129 100644 --- a/src/compiler/scala/tools/nsc/tasty/bridge/SymbolOps.scala +++ b/src/compiler/scala/tools/nsc/tasty/bridge/SymbolOps.scala @@ -46,7 +46,6 @@ trait SymbolOps { self: TastyUniverse => implicit final class SymbolDecorator(val sym: Symbol) { - def isScala3Macro: Boolean = repr.originalFlagSet.is(Inline | Macro) def isScala3Inline: Boolean = repr.originalFlagSet.is(Inline) def isScala2Macro: Boolean = repr.originalFlagSet.is(Erased | Macro) diff --git a/src/compiler/scala/tools/nsc/tasty/bridge/TreeOps.scala b/src/compiler/scala/tools/nsc/tasty/bridge/TreeOps.scala index be9ba985c945..6f6edd0de981 100644 --- a/src/compiler/scala/tools/nsc/tasty/bridge/TreeOps.scala +++ b/src/compiler/scala/tools/nsc/tasty/bridge/TreeOps.scala @@ -116,6 +116,7 @@ trait TreeOps { self: TastyUniverse => val sym = tree.tpe match { case u.SingleType(_, sym) => sym case u.TypeRef(_, sym, _) => sym + case u.ThisType(sym) => sym case x => throw new MatchError(x) } if (tree.tpe.prefix === u.NoPrefix && (sym.hasFlag(Flags.PACKAGE) && !sym.isPackageObjectOrClass || sym.isLocalToBlock)) { diff --git a/test/tasty/neg/src-2/TestHello.check b/test/tasty/neg/src-2/TestHello.check index 947231704f7c..7bc3fcecd052 100644 --- a/test/tasty/neg/src-2/TestHello.check +++ b/test/tasty/neg/src-2/TestHello.check @@ -32,4 +32,7 @@ List's type parameters do not match type F's expected parameters: type A is covariant, but type _ is declared contravariant HelloWorld.higherBounded6[List] ^ -8 errors +TestHello_fail.scala:12: error: Unsupported Scala 3 inline value msg1; found in object helloworld.HelloWorld. + HelloWorld.msg1 + ^ +9 errors diff --git a/test/tasty/neg/src-2/TestHello_2.check b/test/tasty/neg/src-2/TestHello_2.check new file mode 100644 index 000000000000..09ba893b845c --- /dev/null +++ b/test/tasty/neg/src-2/TestHello_2.check @@ -0,0 +1,7 @@ +TestHello_2_fail.scala:4: error: Unsupported Scala 3 inline value msg1; found in object helloworld.HelloWorld. + HelloWorld.acceptsOnlyMsg1(HelloWorld.msg1) + ^ +TestHello_2_fail.scala:5: error: Unsupported Scala 3 inline method inlineMethod; found in object helloworld.HelloWorld. + HelloWorld.inlineMethod(1) + ^ +2 errors diff --git a/test/tasty/neg/src-2/TestHello_2_fail.scala b/test/tasty/neg/src-2/TestHello_2_fail.scala new file mode 100644 index 000000000000..99caab29a3dd --- /dev/null +++ b/test/tasty/neg/src-2/TestHello_2_fail.scala @@ -0,0 +1,6 @@ +package helloworld + +object TestHello_2 { + HelloWorld.acceptsOnlyMsg1(HelloWorld.msg1) + HelloWorld.inlineMethod(1) +} diff --git a/test/tasty/neg/src-2/TestHello_fail.scala b/test/tasty/neg/src-2/TestHello_fail.scala index 62e686411202..5920eeaed244 100644 --- a/test/tasty/neg/src-2/TestHello_fail.scala +++ b/test/tasty/neg/src-2/TestHello_fail.scala @@ -9,4 +9,5 @@ object TestHello { HelloWorld.higherBounded5[Show] HelloWorld.higherBounded6[List] + HelloWorld.msg1 } diff --git a/test/tasty/neg/src-3/HelloWorld.scala b/test/tasty/neg/src-3/HelloWorld.scala index 3ea81a78f0a4..3f03c01e2925 100644 --- a/test/tasty/neg/src-3/HelloWorld.scala +++ b/test/tasty/neg/src-3/HelloWorld.scala @@ -1,11 +1,12 @@ package helloworld object HelloWorld { - final val msg1 = "Hello, World!" + inline val msg1 = "Hello, World!" def acceptsOnlyMsg1(m: msg1.type): String = m + m def higherBounded2[T <: List[_ <: Int]](f: T): T = f def higherBounded3[T <: List[List[_ <: Int]]](f: T): T = f def higherBounded4[T <: Either[_ <: Int, String]](f: T): T = f def higherBounded5[F[+_]] = ??? def higherBounded6[F[-_]] = ??? + inline def inlineMethod(inline i: Int): Int = i } diff --git a/test/tasty/run/src-2/tastytest/TestInlineCompat.scala b/test/tasty/run/src-2/tastytest/TestInlineCompat.scala new file mode 100644 index 000000000000..4c3c9612c86e --- /dev/null +++ b/test/tasty/run/src-2/tastytest/TestInlineCompat.scala @@ -0,0 +1,7 @@ +package tastytest + +import InlineCompat._ + +object TestInlineCompat extends Suite("TestInlineCompat") { + test(assert(foo("Hello, World!") == "Hello, World!")) +} diff --git a/test/tasty/run/src-2/tastytest/TestInlineCompat2.scala b/test/tasty/run/src-2/tastytest/TestInlineCompat2.scala new file mode 100644 index 000000000000..54e31e954c64 --- /dev/null +++ b/test/tasty/run/src-2/tastytest/TestInlineCompat2.scala @@ -0,0 +1,7 @@ +package tastytest + +import InlineCompat2._ + +object TestInlineCompat2 extends Suite("TestInlineCompat2") { + test(assert(foo("Hello, World!") == "Hello, World!")) +} diff --git a/test/tasty/run/src-3/tastytest/InlineCompat.scala b/test/tasty/run/src-3/tastytest/InlineCompat.scala new file mode 100644 index 000000000000..286a30dd0f46 --- /dev/null +++ b/test/tasty/run/src-3/tastytest/InlineCompat.scala @@ -0,0 +1,16 @@ +package tastytest + +import scala.language.experimental.macros + +import scala.reflect.macros.blackbox.Context + +object InlineCompat { + + def foo(code: String): String = macro InlineCompatScala2Macro.foo + inline def foo(inline code: String): String = code // inline method, not macro + +} + +object InlineCompatScala2Macro { + def foo(c: Context)(code: c.Tree): c.Tree = code +} diff --git a/test/tasty/run/src-3/tastytest/InlineCompat2.scala b/test/tasty/run/src-3/tastytest/InlineCompat2.scala new file mode 100644 index 000000000000..c6fcbd6090fa --- /dev/null +++ b/test/tasty/run/src-3/tastytest/InlineCompat2.scala @@ -0,0 +1,16 @@ +package tastytest + +import scala.language.experimental.macros + +import scala.reflect.macros.blackbox.Context + +object InlineCompat2 { + + def foo(code: String): String = macro InnerScala2MacroImpl.fooImpl + inline def foo(inline code: String): String = code // inline method, not macro + + object InnerScala2MacroImpl { + def fooImpl(c: Context)(code: c.Tree): c.Tree = code + } + +} From 9bb0abf6bc2ffc69f79bf0dc87f1a568b065a86f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E6=A2=A6=E5=A2=83=E8=BF=B7=E7=A6=BB?= Date: Thu, 25 Mar 2021 17:47:25 +0800 Subject: [PATCH 053/769] test case for scala/bug#7994 --- test/files/jvm/t7994s.check | 4 ++++ test/files/jvm/t7994s.scala | 12 ++++++++++++ 2 files changed, 16 insertions(+) create mode 100644 test/files/jvm/t7994s.check create mode 100644 test/files/jvm/t7994s.scala diff --git a/test/files/jvm/t7994s.check b/test/files/jvm/t7994s.check new file mode 100644 index 000000000000..5f68d930550c --- /dev/null +++ b/test/files/jvm/t7994s.check @@ -0,0 +1,4 @@ +Test$$anon$1 +null +Test$$anon$1$$anon$2 +null diff --git a/test/files/jvm/t7994s.scala b/test/files/jvm/t7994s.scala new file mode 100644 index 000000000000..36b8068018a5 --- /dev/null +++ b/test/files/jvm/t7994s.scala @@ -0,0 +1,12 @@ +object Test { + def main(args: Array[String]): Unit = { + val o = new MyTest() { + val i: MyTest = new MyTest() {} + } + } +} + +class MyTest { + println(this.getClass.getName) + println(this.getClass.getDeclaringClass) +} \ No newline at end of file From e6be22c68e75f72db8df649d1d72b5391beb2402 Mon Sep 17 00:00:00 2001 From: Martijn Hoekstra Date: Tue, 23 Mar 2021 20:14:17 +0100 Subject: [PATCH 054/769] clarify what it means to 'usually' evaluate in that order --- spec/06-expressions.md | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/spec/06-expressions.md b/spec/06-expressions.md index 6acff3bd3e6d..6dc8ba20c6ed 100644 --- a/spec/06-expressions.md +++ b/spec/06-expressions.md @@ -261,13 +261,13 @@ If ´f´ has some value type, the application is taken to be equivalent to `´f i.e. the application of an `apply` method defined by ´f´. The value `´f´` is applicable to the given arguments if `´f´.apply` is applicable. -Evaluation of `´f´(´e_1 , \ldots , e_n´)` usually entails evaluation of -´f´ and ´e_1 , \ldots , e_n´ in that order. Each argument expression -is converted to the type of its corresponding formal parameter. After -that, the application is rewritten to the function's right hand side, -with actual arguments substituted for formal parameters. The result -of evaluating the rewritten right-hand side is finally converted to -the function's declared result type, if one is given. +The application `´f´(´e_1 , \ldots , e_n´)` evaluates ´f´ and then each argument +´e_1 , \ldots , e_n´ from left to right, except for arguments that correspond to +a by-name parameter (see below). Each argument expression is converted to the +type of its corresponding formal parameter. After that, the application is +rewritten to the function's right hand side, with actual arguments substituted +for formal parameters. The result of evaluating the rewritten right-hand side +is finally converted to the function's declared result type, if one is given. The case of a formal parameter with a parameterless method type `=> ´T´` is treated specially. In this case, the From decec2ea35c3762189378e989091b26425ed0b59 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E6=A2=A6=E5=A2=83=E8=BF=B7=E7=A6=BB?= Date: Sat, 27 Mar 2021 10:30:29 +0800 Subject: [PATCH 055/769] Only when all methods are Deprecated should they be displayed `Deprecated` to users --- .../interpreter/shell/ReplCompletion.scala | 19 ++++++++++++-- .../nsc/interpreter/CompletionTest.scala | 26 +++++++++++++++++-- 2 files changed, 41 insertions(+), 4 deletions(-) diff --git a/src/repl-frontend/scala/tools/nsc/interpreter/shell/ReplCompletion.scala b/src/repl-frontend/scala/tools/nsc/interpreter/shell/ReplCompletion.scala index 3baa8d1a66e0..afbc38103e4d 100644 --- a/src/repl-frontend/scala/tools/nsc/interpreter/shell/ReplCompletion.scala +++ b/src/repl-frontend/scala/tools/nsc/interpreter/shell/ReplCompletion.scala @@ -57,8 +57,23 @@ class ReplCompletion(intp: Repl, val accumulator: Accumulator = new Accumulator) case _ => // under JLine 3, we no longer use the tabCount concept, so tabCount is always 1 // which always gives us all completions - val (c, r) = result.completionCandidates(tabCount = 1) - CompletionResult(buf, c, r) + val (c, r) = result.completionCandidates(tabCount = 1) + // scala/bug#12238 + // Currently, only when all methods are Deprecated should they be displayed `Deprecated` to users. Only handle result of PresentationCompilation#toCandidates. + // We don't handle result of PresentationCompilation#defStringCandidates, because we need to show the deprecated here. + if (r.nonEmpty && r.forall(!_.defString.startsWith("def"))) { + val groupByDef = r.groupBy(_.defString) + val allOverrideIsUniversal = groupByDef.filter(f => f._2.forall(_.isUniversal)).keySet + val allOverrideIsDeprecated = groupByDef.filter(f => f._2.forall(_.isDeprecated)).keySet + def isOverrideMethod(candidate: CompletionCandidate): Boolean = groupByDef(candidate.defString).size > 1 + val rewriteDecr = r.map(candidate => { + // If not all overloaded methods are deprecated, but they are overloaded methods, they (all) should be set to false. + val isUniv = if (!allOverrideIsUniversal.contains(candidate.defString) && isOverrideMethod(candidate)) false else candidate.isUniversal + val isDepr = if (!allOverrideIsDeprecated.contains(candidate.defString) && isOverrideMethod(candidate)) false else candidate.isDeprecated + candidate.copy(isUniversal = isUniv, isDeprecated = isDepr) + }) + CompletionResult(buf, c, rewriteDecr) + } else CompletionResult(buf, c, r) } } finally result.cleanup() } diff --git a/test/junit/scala/tools/nsc/interpreter/CompletionTest.scala b/test/junit/scala/tools/nsc/interpreter/CompletionTest.scala index 0ce5a40ab4f8..870b9e987bb1 100644 --- a/test/junit/scala/tools/nsc/interpreter/CompletionTest.scala +++ b/test/junit/scala/tools/nsc/interpreter/CompletionTest.scala @@ -238,6 +238,28 @@ class CompletionTest { assertTrue(candidates2.forall(_.isDeprecated)) } + @Test + def isDeprecatedOverrideMethod(): Unit = { + val (completer, _, _) = interpretLines( + """object Stale { def oldie(i: Int) = ???; @deprecated("","") def oldie = ??? }""" + ) + val candidates1 = completer.complete("Stale.ol").candidates + assertEquals(2, candidates1.size) + assertEquals(candidates1.head.isDeprecated, false) + assertEquals(candidates1.last.isDeprecated, false) + } + + @Test + def isDeprecatedOverrideMethodDefString(): Unit = { + val (completer, _, _) = interpretLines( + """object Stale { def oldie(i: Int) = ???; @deprecated("","") def oldie = ??? }""" + ) + val candidates1 = completer.complete("Stale.oldie").candidates + assertEquals(3, candidates1.size) + assertEquals(candidates1.filter(_.isDeprecated).map(_.defString.contains("deprecated")).head, true) + assertEquals(candidates1.last.isDeprecated, false) + } + @Test def isDeprecatedInMethodDesc(): Unit = { val (completer, _, _) = interpretLines( @@ -246,10 +268,10 @@ class CompletionTest { ) val candidates1 = completer.complete("Stale.oldie").candidates assertEquals(2, candidates1.size) // When exactly matched, there is an empty character - assertTrue(candidates1.last.defString.contains("deprecated")) + assertTrue(candidates1.filter(_.defString.contains("oldie")).head.defString.contains("deprecated")) val candidates2 = completer.complete("Stuff.that").candidates assertEquals(2, candidates2.size) - assertTrue(candidates2.last.defString.contains("deprecated")) + assertTrue(candidates2.filter(_.defString.contains("that")).head.defString.contains("deprecated")) } @Test From a6a9cbe26894de7fe2ad8f5f0847169170d2d96a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E6=A2=A6=E5=A2=83=E8=BF=B7=E7=A6=BB?= Date: Mon, 29 Mar 2021 10:17:47 +0800 Subject: [PATCH 056/769] handle .inputtrc scala/bug#12269 --- build.sbt | 1 + .../tools/nsc/interpreter/jline/Reader.scala | 36 ++++++++++++++++++- 2 files changed, 36 insertions(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index f1d5434c5130..648e3f002253 100644 --- a/build.sbt +++ b/build.sbt @@ -487,6 +487,7 @@ lazy val compiler = configureAsSubproject(project) |org.jline.terminal.impl.jna.*;resolution:=optional |org.jline.terminal.spi;resolution:=optional |org.jline.utils;resolution:=optional + |org.jline.builtins;resolution:=optional |scala.*;version="$${range;[==,=+);$${ver}}" |*""".stripMargin.linesIterator.mkString(","), "Class-Path" -> "scala-reflect.jar scala-library.jar" diff --git a/src/repl-frontend/scala/tools/nsc/interpreter/jline/Reader.scala b/src/repl-frontend/scala/tools/nsc/interpreter/jline/Reader.scala index d03cb7c83de8..7302966ac16d 100644 --- a/src/repl-frontend/scala/tools/nsc/interpreter/jline/Reader.scala +++ b/src/repl-frontend/scala/tools/nsc/interpreter/jline/Reader.scala @@ -13,13 +13,19 @@ package scala.tools.nsc.interpreter package jline +import org.jline.builtins.InputRC import org.jline.reader.Parser.ParseContext -import org.jline.reader.impl.{DefaultParser, LineReaderImpl} import org.jline.reader._ +import org.jline.reader.impl.{DefaultParser, LineReaderImpl} import org.jline.terminal.Terminal +import java.io.{ByteArrayInputStream, File} +import java.net.{MalformedURLException, URL} import java.util.{List => JList} +import scala.io.Source import scala.tools.nsc.interpreter.shell.{Accumulator, ShellConfig} +import scala.util.Using +import scala.util.control.NonFatal /** A Reader that delegates to JLine3. */ @@ -68,6 +74,31 @@ object Reader { System.setProperty(LineReader.PROP_SUPPORT_PARSEDLINE, java.lang.Boolean.TRUE.toString()) + def inputrcFileUrl(): Option[URL] = { + sys.props + .get("jline.inputrc") + .flatMap { path => + try Some(new URL(path)) + catch { + case _: MalformedURLException => + Some(new File(path).toURI.toURL) + } + }.orElse { + sys.props.get("user.home").map { home => + val f = new File(home).toPath.resolve(".inputrc").toFile + (if (f.isFile) f else new File("/etc/inputrc")).toURI.toURL + } + } + } + + def urlByteArray(url: URL): Array[Byte] = { + Using.resource(Source.fromURL(url).bufferedReader()) { + bufferedReader => + LazyList.continually(bufferedReader.read).takeWhile(_ != -1).map(_.toByte).toArray + } + } + + lazy val inputrcFileContents: Option[Array[Byte]] = inputrcFileUrl().map(in => urlByteArray(in)) val jlineTerminal = TerminalBuilder.builder().jna(true).build() val completer = new Completion(completion) val parser = new ReplParser(repl) @@ -94,6 +125,9 @@ object Reader { } val reader = builder.build() + try inputrcFileContents.foreach(f => InputRC.configure(reader, new ByteArrayInputStream(f))) catch { + case NonFatal(_) => + } //ignore locally { import LineReader._ // VIINS, VICMD, EMACS From 09b5ded130587e5336a0686e1b24a6536580c7e3 Mon Sep 17 00:00:00 2001 From: Guillaume Martres Date: Tue, 30 Mar 2021 14:15:08 +0200 Subject: [PATCH 057/769] Support `case` in pattern bindings under -Xsource:3 Just like in Scala 3.0, adding this keyword doesn't change anything, but it will be required in future versions of Scala 3 for non-exhaustive patterns in a for comprehension. We would like to start issuing warnings by default in Scala 3 for code which does not use `case` in those situations, but to not hamper cross-compilation we need Scala 2 to also support that keyword. For details, see: https://dotty.epfl.ch/docs/reference/changed-features/pattern-bindings.html --- .../scala/tools/nsc/ast/parser/Parsers.scala | 8 ++++++- .../scala/tools/nsc/ast/parser/Scanners.scala | 10 ++++++++ .../neg/for-comprehension-case-future.check | 7 ++++++ .../neg/for-comprehension-case-future.scala | 24 +++++++++++++++++++ test/files/neg/for-comprehension-case.check | 13 ++++++++++ test/files/neg/for-comprehension-case.scala | 14 +++++++++++ 6 files changed, 75 insertions(+), 1 deletion(-) create mode 100644 test/files/neg/for-comprehension-case-future.check create mode 100644 test/files/neg/for-comprehension-case-future.scala create mode 100644 test/files/neg/for-comprehension-case.check create mode 100644 test/files/neg/for-comprehension-case.scala diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala index 055ed8c8fb39..7866f6d40b5d 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala @@ -1926,6 +1926,12 @@ self => */ def generator(eqOK: Boolean, allowNestedIf: Boolean = true): List[Tree] = { val start = in.offset + val hasCase = in.token == CASE + if (hasCase) { + if (!currentRun.isScala3) syntaxError(in.offset, s"`case` keyword in for comprehension requires the -Xsource:3 flag.") + in.skipCASE() + } + val hasVal = in.token == VAL if (hasVal) in.nextToken() @@ -1944,7 +1950,7 @@ self => else syntaxError(in.offset, msg("unsupported", "just remove `val`")) } - if (hasEq && eqOK) in.nextToken() + if (hasEq && eqOK && !hasCase) in.nextToken() else accept(LARROW) val rhs = expr() diff --git a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala index a4f8efc43eea..0f41d3903c16 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala @@ -393,6 +393,16 @@ trait Scanners extends ScannersCommon { case _ => } + /** Advance beyond a case token without marking the CASE in sepRegions. + * This method should be called to skip beyond CASE tokens that are + * not part of matches, i.e. no ARROW is expected after them. + */ + def skipCASE(): Unit = { + assert(token == CASE, s"Internal error: skipCASE() called on non-case token $token") + nextToken() + sepRegions = sepRegions.tail + } + /** Produce next token, filling TokenData fields of Scanner. */ def nextToken(): Unit = { diff --git a/test/files/neg/for-comprehension-case-future.check b/test/files/neg/for-comprehension-case-future.check new file mode 100644 index 000000000000..9ce9a9456882 --- /dev/null +++ b/test/files/neg/for-comprehension-case-future.check @@ -0,0 +1,7 @@ +for-comprehension-case-future.scala:22: error: '<-' expected but '=' found. + case y = x + 1 + ^ +for-comprehension-case-future.scala:23: error: illegal start of simple expression + } yield x + y + ^ +2 errors diff --git a/test/files/neg/for-comprehension-case-future.scala b/test/files/neg/for-comprehension-case-future.scala new file mode 100644 index 000000000000..05602e537759 --- /dev/null +++ b/test/files/neg/for-comprehension-case-future.scala @@ -0,0 +1,24 @@ +// scalac: -Xsource:3 +// +class A { + // ok + val a = + for { + case Some(x) <- List(Some(1), None) + y = x + 1 + } yield x + y + + // ok + val b = + for { + Some(x) <- List(Some(1), None) + Some(y) <- List(None, Some(2)) + } yield x+y + + // fail + val c = + for { + case Some(x) <- List(Some(1), None) + case y = x + 1 + } yield x + y +} diff --git a/test/files/neg/for-comprehension-case.check b/test/files/neg/for-comprehension-case.check new file mode 100644 index 000000000000..2e86e5d367b0 --- /dev/null +++ b/test/files/neg/for-comprehension-case.check @@ -0,0 +1,13 @@ +for-comprehension-case.scala:5: error: `case` keyword in for comprehension requires the -Xsource:3 flag. + case Some(x) <- List(Some(1), None) + ^ +for-comprehension-case.scala:12: error: `case` keyword in for comprehension requires the -Xsource:3 flag. + case y = x + 1 + ^ +for-comprehension-case.scala:12: error: '<-' expected but '=' found. + case y = x + 1 + ^ +for-comprehension-case.scala:13: error: illegal start of simple expression + } yield x+y + ^ +4 errors diff --git a/test/files/neg/for-comprehension-case.scala b/test/files/neg/for-comprehension-case.scala new file mode 100644 index 000000000000..55e8d44a40e3 --- /dev/null +++ b/test/files/neg/for-comprehension-case.scala @@ -0,0 +1,14 @@ +class A { + // fail + val a = + for { + case Some(x) <- List(Some(1), None) + } yield x + + // fail + val b = + for { + Some(x) <- List(Some(1), None) + case y = x + 1 + } yield x+y +} From ab1818b7c121918e7fcf5a4f328926cf7c49d1d3 Mon Sep 17 00:00:00 2001 From: Anatolii Kmetiuk Date: Tue, 30 Mar 2021 21:10:45 +0200 Subject: [PATCH 058/769] Upgrade Dotty to 3.0.0-RC2 --- project/DottySupport.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/DottySupport.scala b/project/DottySupport.scala index d234470addbc..369fa420d31a 100644 --- a/project/DottySupport.scala +++ b/project/DottySupport.scala @@ -22,7 +22,7 @@ object TastySupport { * Dotty in .travis.yml. */ object DottySupport { - val dottyVersion = "3.0.0-RC1" + val dottyVersion = "3.0.0-RC2" val compileWithDotty: Boolean = Option(System.getProperty("scala.build.compileWithDotty")).map(_.toBoolean).getOrElse(false) lazy val commonSettings = Seq( From 3c5f6aa621e102c6925da135f30c2dd60a760794 Mon Sep 17 00:00:00 2001 From: Guillaume Martres Date: Wed, 31 Mar 2021 15:16:54 +0200 Subject: [PATCH 059/769] Support `?` as wildcard marker under -Xsource:3 Like in Scala 3.0, this allows `?` to be used as a type argument in all situations where `_` could be used as a wildcard previously. This should allow us to deprecate the use of `_` as a wildcard in Scala 3 to be able to eventually repurpose it as explained in http://dotty.epfl.ch/docs/reference/changed-features/wildcards.html This is a source-breaking change since a type named `?` is legal in Scala 2 (but not in Scala 3 unless -source 3.0-migration is used). `?` also has a special meaning when the kind-projector plugin is used, but that usage has been deprecated in favor of `*` for a while now. --- .../scala/tools/nsc/ast/parser/Parsers.scala | 32 +++++++++++-------- .../scala/reflect/internal/StdNames.scala | 1 + test/files/pos/wildcards-future.scala | 21 ++++++++++++ 3 files changed, 40 insertions(+), 14 deletions(-) create mode 100644 test/files/pos/wildcards-future.scala diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala index 055ed8c8fb39..22e4dc86e691 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala @@ -705,6 +705,10 @@ self => def isRawBar = isRawIdent && in.name == raw.BAR def isRawIdent = in.token == IDENTIFIER + def isWildcardType = + in.token == USCORE || + settings.isScala3 && isRawIdent && in.name == raw.QMARK + def isIdent = in.token == IDENTIFIER || in.token == BACKQUOTED_IDENT def isMacro = in.token == IDENTIFIER && in.name == nme.MACROkw @@ -1105,12 +1109,14 @@ self => } else atPos(start)(makeSafeTupleType(inParens(types()))) - case USCORE => wildcardType(in.skipToken()) case _ => - path(thisOK = false, typeOK = true) match { - case r @ SingletonTypeTree(_) => r - case r => convertToTypeId(r) - } + if (isWildcardType) + wildcardType(in.skipToken()) + else + path(thisOK = false, typeOK = true) match { + case r @ SingletonTypeTree(_) => r + case r => convertToTypeId(r) + } }) } } @@ -1976,18 +1982,16 @@ self => final def functionArgType(): Tree = argType() final def argType(): Tree = { val start = in.offset - in.token match { - case USCORE => + if (isWildcardType) { in.nextToken() if (in.token == SUBTYPE || in.token == SUPERTYPE) wildcardType(start) else atPos(start) { Bind(tpnme.WILDCARD, EmptyTree) } - case _ => - typ() match { - case Ident(name: TypeName) if nme.isVariableName(name) => - atPos(start) { Bind(name, EmptyTree) } - case t => t - } - } + } else + typ() match { + case Ident(name: TypeName) if nme.isVariableName(name) => + atPos(start) { Bind(name, EmptyTree) } + case t => t + } } /** {{{ diff --git a/src/reflect/scala/reflect/internal/StdNames.scala b/src/reflect/scala/reflect/internal/StdNames.scala index 0c550505f360..1906a2f3028f 100644 --- a/src/reflect/scala/reflect/internal/StdNames.scala +++ b/src/reflect/scala/reflect/internal/StdNames.scala @@ -960,6 +960,7 @@ trait StdNames { final val PLUS : NameType = nameType("+") final val STAR : NameType = nameType("*") final val TILDE: NameType = nameType("~") + final val QMARK: NameType = nameType("?") final val isUnary: Set[Name] = Set(MINUS, PLUS, TILDE, BANG) } diff --git a/test/files/pos/wildcards-future.scala b/test/files/pos/wildcards-future.scala new file mode 100644 index 000000000000..928cab3648b0 --- /dev/null +++ b/test/files/pos/wildcards-future.scala @@ -0,0 +1,21 @@ +// scalac: -Xsource:3 +// +object Test { + val xs: List[?] = List(1, 2, 3) + val ys: Map[? <: AnyRef, ? >: Null] = Map() + + def foo(x: Any) = x match { + case x: List[?] => x + case _ => x + } + + // Only allowed in Scala 3 under -source 3.0-migration + type ? = Int + + val xs2: List[`?`] = List(1) + val xs3: List[Int] = xs2 + + def foo2(x: List[`?`]): List[Int] = x match { + case x: List[`?`] => x + } +} From 76ae53adf30ecfa25cd0a3b48a86a95db3ff8159 Mon Sep 17 00:00:00 2001 From: Martijn Hoekstra Date: Thu, 25 Mar 2021 16:41:23 +0100 Subject: [PATCH 060/769] Spec: add block expression to function application --- spec/06-expressions.md | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/spec/06-expressions.md b/spec/06-expressions.md index 6acff3bd3e6d..13dfe48b6c54 100644 --- a/spec/06-expressions.md +++ b/spec/06-expressions.md @@ -295,6 +295,11 @@ must be the same). Furthermore, the type of ´e´ must conform to sequence ´e´ with its elements. When the application uses named arguments, the vararg parameter has to be specified exactly once. +If only a single argument is supplied, it may be supplied as a block expression +and parentheses can be omitted, in the form `´f´ { block }`. This is valid when +`f` has a single formal parameter or when all other formal parameters have +default values. + A function application usually allocates a new frame on the program's run-time stack. However, if a local method or a final method calls itself as its last action, the call is executed using the stack-frame From 6afd43100d5988c04ca584e9506024178bac2524 Mon Sep 17 00:00:00 2001 From: Martijn Hoekstra Date: Thu, 1 Apr 2021 12:23:17 +0200 Subject: [PATCH 061/769] spec: include pattern matching function in block expression --- spec/06-expressions.md | 3 +++ 1 file changed, 3 insertions(+) diff --git a/spec/06-expressions.md b/spec/06-expressions.md index 6dc8ba20c6ed..c13c9b13a200 100644 --- a/spec/06-expressions.md +++ b/spec/06-expressions.md @@ -590,6 +590,9 @@ Evaluation of the block entails evaluation of its statement sequence, followed by an evaluation of the final expression ´e´, which defines the result of the block. +A block expression `{´c_1´; ´\ldots´; ´c_n´; ´}` where ´s_1 , \ldots , s_n´ are +case clauses forms a [pattern matching anonymous function](08-pattern-matching.html#pattern-matching-anonymous-functions). + ###### Example Assuming a class `Ref[T](x: T)`, the block From 9d811a9752f537351abf9ab10817722b837810ad Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E6=A2=A6=E5=A2=83=E8=BF=B7=E7=A6=BB?= Date: Fri, 2 Apr 2021 15:47:23 +0800 Subject: [PATCH 062/769] scala/bug#11896 --- src/reflect/scala/reflect/api/Types.scala | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/reflect/scala/reflect/api/Types.scala b/src/reflect/scala/reflect/api/Types.scala index 2fc29f0bb382..d59241927674 100644 --- a/src/reflect/scala/reflect/api/Types.scala +++ b/src/reflect/scala/reflect/api/Types.scala @@ -137,7 +137,7 @@ trait Types { * Unlike `members` this method doesn't returns inherited members. * * Members in the returned scope might appear in arbitrary order. - * Use `declarations.sorted` to get an ordered list of members. + * Use `decls.sorted` to get an ordered list of members. */ def decls: MemberScope @@ -150,7 +150,7 @@ trait Types { * Unlike `declarations` this method also returns inherited members. * * Members in the returned scope might appear in arbitrary order. - * Use `declarations.sorted` to get an ordered list of members. + * Use `members.sorted` to get an ordered list of members. */ def members: MemberScope From b58568a470386f7aaceb223d6ea1a550ad39f02d Mon Sep 17 00:00:00 2001 From: Seth Tisue Date: Mon, 15 Mar 2021 08:38:06 -0700 Subject: [PATCH 063/769] sbt 1.5.0 (was 1.4.9) --- build.sbt | 2 +- project/JitWatch.scala | 10 +++---- project/ScriptCommands.scala | 36 ++++++++++++------------ project/build.properties | 2 +- project/plugins.sbt | 6 ---- scripts/common | 2 +- src/intellij/scala.ipr.SAMPLE | 36 ++++++++++++------------ test/benchmarks/project/build.properties | 2 +- test/jcstress/project/build.properties | 2 +- 9 files changed, 46 insertions(+), 52 deletions(-) diff --git a/build.sbt b/build.sbt index f1d5434c5130..4bffeca7cf91 100644 --- a/build.sbt +++ b/build.sbt @@ -112,7 +112,7 @@ lazy val instanceSettings = Seq[Setting[_]]( // We create a managed copy to prevent sbt from putting it on the classpath where we don't want it if(s.isManagedVersion) s else { import sbt.internal.inc.ScalaInstance - val s2 = new ScalaInstance(s.version, s.loader, s.loaderLibraryOnly, s.libraryJars, s.compilerJar, s.allJars, Some(s.actualVersion)) + val s2 = new ScalaInstance(s.version, s.loader, s.loaderCompilerOnly, s.loaderLibraryOnly, s.libraryJars, s.compilerJars, s.allJars, Some(s.actualVersion)) assert(s2.isManagedVersion) s2 } diff --git a/project/JitWatch.scala b/project/JitWatch.scala index 8bd483cc618f..d14c43765102 100644 --- a/project/JitWatch.scala +++ b/project/JitWatch.scala @@ -34,14 +34,14 @@ object JitWatchFilePlugin extends AutoPlugin { // Transitive sources from the projects that contribute to this classpath. val projects: Seq[ProjectRef] = buildDependencies.value.classpathTransitiveRefs(thisProjectRef.value) :+ thisProjectRef.value - val projectArtifacts: Map[ProjectRef, Seq[Artifact]] = projects.map(project => (project -> (Keys.artifacts in project get settingsData.value).getOrElse(Nil))).toMap - val artifactNameToProject: Map[String, Seq[ProjectRef]] = projects.groupBy(project => (Keys.name in project get settingsData.value).getOrElse("")) + val projectArtifacts: Map[ProjectRef, Seq[Artifact]] = projects.map(project => (project -> (project / Keys.artifacts get settingsData.value).getOrElse(Nil))).toMap + val artifactNameToProject: Map[String, Seq[ProjectRef]] = projects.groupBy(project => (project / Keys.name get settingsData.value).getOrElse("")) val transitiveSourceDirectories = projects.flatMap { project => - val projectArtifacts: Seq[Artifact] = (Keys.artifacts in project get settingsData.value).getOrElse(Nil) + val projectArtifacts: Seq[Artifact] = (project / Keys.artifacts get settingsData.value).getOrElse(Nil) val matching = projectArtifacts.filter(artifacts.contains(_)) val configs = matching.flatMap(artifact => artifact.configurations).distinct val sourceDirectories: Seq[File] = configs.flatMap { configRef => - (Keys.sourceDirectories in project in sbt.Configuration.of(configRef.name.capitalize, configRef.name)).get(settingsData.value).toList.flatten + (project / sbt.Configuration.of(configRef.name.capitalize, configRef.name) / Keys.sourceDirectories).get(settingsData.value).toList.flatten } sourceDirectories }.distinct @@ -50,7 +50,7 @@ object JitWatchFilePlugin extends AutoPlugin { projects.flatMap { project: ProjectRef => val configs = artifact.configurations val sourceDirectories: Seq[File] = configs.toList.flatMap { configRef => - (Keys.sourceDirectories in project in sbt.Configuration.of(configRef.name.capitalize, configRef.name)).get(settingsData.value).toList.flatten + (project / sbt.Configuration.of(configRef.name.capitalize, configRef.name) / Keys.sourceDirectories).get(settingsData.value).toList.flatten } sourceDirectories } diff --git a/project/ScriptCommands.scala b/project/ScriptCommands.scala index 9ee4beafe60c..82cc51f38561 100644 --- a/project/ScriptCommands.scala +++ b/project/ScriptCommands.scala @@ -27,7 +27,7 @@ object ScriptCommands { * The optional argument is the Artifactory snapshot repository URL. */ def setupPublishCoreNonOpt = setup("setupPublishCoreNonOpt") { args => Seq( - baseVersionSuffix in Global := "SHA-SNAPSHOT" + Global / baseVersionSuffix := "SHA-SNAPSHOT" ) ++ (args match { case Seq(url) => publishTarget(url) case Nil => Nil @@ -38,7 +38,7 @@ object ScriptCommands { * The optional argument is the Artifactory snapshot repository URL. */ def setupPublishCore = setup("setupPublishCore") { args => Seq( - baseVersionSuffix in Global := "SHA-SNAPSHOT" + Global / baseVersionSuffix := "SHA-SNAPSHOT" ) ++ (args match { case Seq(url) => publishTarget(url) case Nil => Nil @@ -49,9 +49,9 @@ object ScriptCommands { * The optional argument is the Artifactory snapshot repository URL. */ def setupValidateTest = setup("setupValidateTest") { args => Seq( - testOptions in IntegrationTest in LocalProject("test") ++= Seq(Tests.Argument("--show-log"), Tests.Argument("--show-diff")) + LocalProject("test") / IntegrationTest / testOptions ++= Seq(Tests.Argument("--show-log"), Tests.Argument("--show-diff")) ) ++ (args match { - case Seq(url) => Seq(resolvers in Global += "scala-pr" at url) + case Seq(url) => Seq(Global / resolvers += "scala-pr" at url) case Nil => Nil }) ++ enableOptimizer } @@ -62,8 +62,8 @@ object ScriptCommands { def setupBootstrapStarr = setup("setupBootstrapStarr") { case Seq(fileOrUrl, ver) => val url = fileToUrl(fileOrUrl) Seq( - baseVersion in Global := ver, - baseVersionSuffix in Global := "SPLIT" + Global / baseVersion := ver, + Global / baseVersionSuffix := "SPLIT" ) ++ publishTarget(url) ++ noDocs ++ enableOptimizer } @@ -73,9 +73,9 @@ object ScriptCommands { def setupBootstrapLocker = setup("setupBootstrapLocker") { case Seq(fileOrUrl, ver) => val url = fileToUrl(fileOrUrl) Seq( - baseVersion in Global := ver, - baseVersionSuffix in Global := "SPLIT", - resolvers in Global += "scala-pr" at url + Global / baseVersion := ver, + Global / baseVersionSuffix := "SPLIT", + Global / resolvers += "scala-pr" at url ) ++ publishTarget(url) ++ noDocs ++ enableOptimizer } @@ -89,10 +89,10 @@ object ScriptCommands { val targetUrl = fileToUrl(targetFileOrUrl) val resolverUrl = fileToUrl(resolverFileOrUrl) Seq( - baseVersion in Global := ver, - baseVersionSuffix in Global := "SPLIT", - resolvers in Global += "scala-pr" at resolverUrl, - testOptions in IntegrationTest in LocalProject("test") ++= Seq(Tests.Argument("--show-log"), Tests.Argument("--show-diff")) + Global / baseVersion := ver, + Global / baseVersionSuffix := "SPLIT", + Global / resolvers += "scala-pr" at resolverUrl, + LocalProject("test") / IntegrationTest / testOptions ++= Seq(Tests.Argument("--show-log"), Tests.Argument("--show-diff")) ) ++ publishTarget(targetUrl) ++ enableOptimizer } @@ -103,11 +103,11 @@ object ScriptCommands { def setupBootstrapPublish = setup("setupBootstrapPublish") { case Seq(fileOrUrl, ver) => val url = fileToUrl(fileOrUrl) Seq( - baseVersion in Global := ver, - baseVersionSuffix in Global := "SPLIT", - resolvers in Global += "scala-pr" at url, - publishTo in Global := Some("sonatype-releases" at "https://oss.sonatype.org/service/local/staging/deploy/maven2"), - credentials in Global += Credentials("Sonatype Nexus Repository Manager", "oss.sonatype.org", env("SONA_USER"), env("SONA_PASS")) + Global / baseVersion := ver, + Global / baseVersionSuffix := "SPLIT", + Global / resolvers += "scala-pr" at url, + Global / publishTo := Some("sonatype-releases" at "https://oss.sonatype.org/service/local/staging/deploy/maven2"), + Global / credentials += Credentials("Sonatype Nexus Repository Manager", "oss.sonatype.org", env("SONA_USER"), env("SONA_PASS")) // pgpSigningKey and pgpPassphrase are set externally by travis / the bootstrap script, as the sbt-pgp plugin is not enabled by default ) ++ enableOptimizer } diff --git a/project/build.properties b/project/build.properties index dbae93bcfd51..e67343ae796c 100644 --- a/project/build.properties +++ b/project/build.properties @@ -1 +1 @@ -sbt.version=1.4.9 +sbt.version=1.5.0 diff --git a/project/plugins.sbt b/project/plugins.sbt index 9294ca79ba70..73ce8dc22df5 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -33,10 +33,4 @@ addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.3.4") addSbtPlugin("de.heikoseeberger" % "sbt-header" % "5.0.0") -// See DottySupport.scala -if (Option(System.getProperty("scala.build.compileWithDotty")).map(_.toBoolean).getOrElse(false)) - Seq(addSbtPlugin("ch.epfl.lamp" % "sbt-dotty" % "0.5.2")) -else - Seq() - addSbtPlugin("com.lightbend" % "sbt-whitesource" % "0.1.18") diff --git a/scripts/common b/scripts/common index d8645a48af42..2584d10574ef 100644 --- a/scripts/common +++ b/scripts/common @@ -11,7 +11,7 @@ else fi SBT_CMD=${SBT_CMD-sbt} -SBT_CMD="$SBT_CMD -sbt-version 1.4.9" +SBT_CMD="$SBT_CMD -sbt-version 1.5.0" # repo to publish builds integrationRepoUrl=${integrationRepoUrl-"https://scala-ci.typesafe.com/artifactory/scala-integration/"} diff --git a/src/intellij/scala.ipr.SAMPLE b/src/intellij/scala.ipr.SAMPLE index a688c8d8e945..c6b626692a57 100644 --- a/src/intellij/scala.ipr.SAMPLE +++ b/src/intellij/scala.ipr.SAMPLE @@ -325,13 +325,13 @@ - + - + - - + + @@ -355,7 +355,7 @@ - + @@ -369,8 +369,8 @@ - - + + @@ -382,13 +382,13 @@ - + - + @@ -402,16 +402,16 @@ - + - + - + - + @@ -435,18 +435,18 @@ - - + + - + - + - + diff --git a/test/benchmarks/project/build.properties b/test/benchmarks/project/build.properties index dbae93bcfd51..e67343ae796c 100644 --- a/test/benchmarks/project/build.properties +++ b/test/benchmarks/project/build.properties @@ -1 +1 @@ -sbt.version=1.4.9 +sbt.version=1.5.0 diff --git a/test/jcstress/project/build.properties b/test/jcstress/project/build.properties index dbae93bcfd51..e67343ae796c 100644 --- a/test/jcstress/project/build.properties +++ b/test/jcstress/project/build.properties @@ -1 +1 @@ -sbt.version=1.4.9 +sbt.version=1.5.0 From 67b5edcfa3cf28f755b25c39c655adeb08a3dc08 Mon Sep 17 00:00:00 2001 From: Mario Galic Date: Thu, 8 Apr 2021 18:12:48 +0100 Subject: [PATCH 064/769] Replace 0.asInstanceOf[B] with null.asInstanceOf[B] - This seems to be the only place `0.asInstanceOf[T]` is used, usually it is expressed as `null.asInstanceOf[T]`. - Emulates [Expression for all zero bits #8767](https://github.com/scala/scala/pull/8767) --- src/library/scala/collection/TraversableOnce.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/library/scala/collection/TraversableOnce.scala b/src/library/scala/collection/TraversableOnce.scala index aa6441684536..187ab7ac407d 100644 --- a/src/library/scala/collection/TraversableOnce.scala +++ b/src/library/scala/collection/TraversableOnce.scala @@ -224,7 +224,7 @@ trait TraversableOnce[+A] extends Any with GenTraversableOnce[A] { //avoid the LazyRef as we don't have an @eager object class reducer extends AbstractFunction1[A, Unit] { var first = true - var acc: B = 0.asInstanceOf[B] + var acc: B = null.asInstanceOf[B] override def apply(x: A): Unit = if (first) { From b0931a142c8291a87c0ccc1d4dfb413b3cc604aa Mon Sep 17 00:00:00 2001 From: mkeskells Date: Tue, 19 Jan 2021 10:39:00 +0000 Subject: [PATCH 065/769] Fix a 2.12-only regression in ListSet's ordering * special case of isEmpty * size the array appropriately and efficiently * add some tests and comments * partial reformat --- .../scala/collection/immutable/ListSet.scala | 59 +++++++++++++--- .../collection/immutable/ListSetTest.scala | 67 ++++++++++++++++++- 2 files changed, 114 insertions(+), 12 deletions(-) diff --git a/src/library/scala/collection/immutable/ListSet.scala b/src/library/scala/collection/immutable/ListSet.scala index ef0fc78fa1ef..338fca6c8d72 100644 --- a/src/library/scala/collection/immutable/ListSet.scala +++ b/src/library/scala/collection/immutable/ListSet.scala @@ -81,14 +81,16 @@ sealed class ListSet[A] extends AbstractSet[A] def -(elem: A): ListSet[A] = this override def ++(xs: GenTraversableOnce[A]): ListSet[A] = - xs match { - // we want to avoid to use of iterator as it causes allocations - // during reverseList + xs match { + // we want to avoid to use of iterator as it causes allocations + // during reverseList case ls: ListSet[A] => if (ls eq this) this else { val lsSize = ls.size - if (lsSize == 0) this else { + if (lsSize == 0) this + else if (isEmpty) ls + else { @tailrec def skip(ls: ListSet[A], count: Int): ListSet[A] = { if (count == 0) ls else skip(ls.next, count - 1) } @@ -96,32 +98,67 @@ sealed class ListSet[A] extends AbstractSet[A] @tailrec def containsLimited(n: ListSet[A], e: A, end: ListSet[A]): Boolean = (n ne end) && (e == n.elem || containsLimited(n.next, e, end)) + @tailrec def distanceTo(n: ListSet[A], end: ListSet[A], soFar: Int): Int = + if (n eq end) soFar else distanceTo(n.next, end, soFar + 1) + // We hope to get some structural sharing so find the tail of the // ListSet that are `eq` (or if there are not any then the ends of the lists), // and we optimise the add to only iterate until we reach the common end - val thisSize = this.size + val thisSize = this.size val remaining = Math.min(thisSize, lsSize) - var thisTail = skip(this, thisSize - remaining) - var lsTail = skip(ls, lsSize - remaining) + var thisTail = skip(this, thisSize - remaining) + var lsTail = skip(ls, lsSize - remaining) + //find out what part of the the ListSet is sharable + //as we can ignore the shared elements while ((thisTail ne lsTail) && !lsTail.isEmpty) { thisTail = thisTail.next lsTail = lsTail.next } - var toAdd = ls + var toAdd = ls var result: ListSet[A] = this + // Its quite a common case that we are just adding a few elements, so it there are less than 5 elements we + // hold them in pending0..3 + // if there are more than these 4 we hold the rest in pending + var pending : Array[A] = null + var pending0, pending1, pending2, pending3: A = null.asInstanceOf[A] + var pendingCount = 0 while (toAdd ne lsTail) { val elem = toAdd.elem if (!containsLimited(result, elem, lsTail)) { - val r = result - result = new r.Node(elem) + pendingCount match { + case 0 => pending0 = elem + case 1 => pending1 = elem + case 2 => pending2 = elem + case 3 => pending3 = elem + case _ => + if (pending eq null) + pending = new Array[AnyRef](distanceTo(toAdd, lsTail, 0)).asInstanceOf[Array[A]] + pending(pendingCount - 4) = elem + } + pendingCount += 1 } toAdd = toAdd.next } + // add the extra values. They are added in reverse order so as to ensure that the iteration order is correct + // remembering that the content is in the reverse order to the iteration order + // i.e. this.next is really the previous value + while (pendingCount > 0) { + val elem: A = pendingCount match { + case 1 => pending0 + case 2 => pending1 + case 3 => pending2 + case 4 => pending3 + case _ => pending(pendingCount - 5) + } + val r = result + result = new r.Node(elem) + pendingCount -= 1 + } result } } - case _ => + case _ => if (xs.isEmpty) this else (repr /: xs) (_ + _) } diff --git a/test/junit/scala/collection/immutable/ListSetTest.scala b/test/junit/scala/collection/immutable/ListSetTest.scala index 395da88c75b6..4ce4fc5a6206 100644 --- a/test/junit/scala/collection/immutable/ListSetTest.scala +++ b/test/junit/scala/collection/immutable/ListSetTest.scala @@ -23,7 +23,9 @@ class ListSetTest { @Test def hasTailRecursiveDelete(): Unit = { val s = ListSet(1 to 50000: _*) - try s - 25000 catch { case e: StackOverflowError => fail("A stack overflow occurred") } + try s - 25000 catch { + case e: StackOverflowError => fail("A stack overflow occurred") + } } @Test @@ -50,4 +52,67 @@ class ListSetTest { val s = ListSet(1, 2, 3, 5, 4) assertEquals(List(1, 2, 3, 5, 4), s.iterator.toList) } + + @Test + def hasCorrectOrderAfterPlusPlus(): Unit = { + val foo = ListSet(1) + var bar = foo ++ ListSet() + assertEquals(List(1), bar.iterator.toList) + + bar = foo ++ ListSet(1) + assertEquals(List(1), bar.iterator.toList) + + bar = foo ++ ListSet(2) + assertEquals(List(1, 2), bar.iterator.toList) + + bar = foo ++ ListSet(1, 2) + assertEquals(List(1, 2), bar.iterator.toList) + + bar = foo ++ ListSet(1, 2, 3) + assertEquals(List(1, 2, 3), bar.iterator.toList) + + bar = foo ++ ListSet(1, 2, 3, 4) + assertEquals(List(1, 2, 3, 4), bar.iterator.toList) + + bar = foo ++ ListSet(1, 2, 3, 4, 5) + assertEquals(List(1, 2, 3, 4, 5), bar.iterator.toList) + + bar = foo ++ ListSet(1, 2, 3, 4, 5, 6) + assertEquals(List(1, 2, 3, 4, 5, 6), bar.iterator.toList) + } + + @Test + def smallPlusPlus1(): Unit = { + def check(l1: ListSet[Int], l2: ListSet[Int]) = { + val expected = l1.iterator.toList ++ l2.iterator.filterNot(l1).toList + val actual = (l1 ++ l2).iterator.toList + assertEquals(expected, actual) + } + + for (start0 <- 0 until 6; + end0 <- start0 until 6; + start1 <- 0 until 6; + end1 <- start1 until 6) { + val ls0 = ListSet((start0 until end0): _*) + val ls1 = ListSet((start1 until end1): _*) + check(ls0, ls1) + } + } + @Test + def smallPlusPlusAfter(): Unit = { + def check(l1: ListSet[Int], l2: ListSet[Int]) = { + val expected = l1.iterator.toList ++ l2.iterator.filterNot(l1).toList + val actual = (l1 ++ l2).iterator.toList + assertEquals(expected, actual) + } + + for (start0 <- 0 until 9; + end0 <- start0 until 9; + start1 <- 10 until 19; + end1 <- start1 until 19) { + val ls0 = ListSet((start0 until end0): _*) + val ls1 = ListSet((start1 until end1): _*) + check(ls0, ls1) + } + } } From 48f5a791599279da70975659327ac8ed034bfc66 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Mon, 12 Apr 2021 10:03:46 +1000 Subject: [PATCH 066/769] Support class constants as invokedynamic boostrap args --- .../nsc/backend/jvm/BTypesFromSymbols.scala | 8 ++++- .../indy-via-macro-class-constant-bsa.check | 1 + .../Bootstrap.java | 14 +++++++++ .../Test_2.scala | 6 ++++ .../macro_1.scala | 29 +++++++++++++++++++ 5 files changed, 57 insertions(+), 1 deletion(-) create mode 100644 test/files/run/indy-via-macro-class-constant-bsa.check create mode 100644 test/files/run/indy-via-macro-class-constant-bsa/Bootstrap.java create mode 100644 test/files/run/indy-via-macro-class-constant-bsa/Test_2.scala create mode 100644 test/files/run/indy-via-macro-class-constant-bsa/macro_1.scala diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala b/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala index 8554304cb7c3..9ebec8891631 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala @@ -130,7 +130,13 @@ abstract class BTypesFromSymbols[G <: Global](val global: G) extends BTypes { } def bootstrapMethodArg(t: Constant, pos: Position): AnyRef = t match { - case Constant(mt: Type) => methodBTypeFromMethodType(transformedType(mt), isConstructor = false).toASMType + case Constant(mt: Type) => + transformedType(mt) match { + case mt1: MethodType => + methodBTypeFromMethodType(mt1, isConstructor = false).toASMType + case t => + typeToBType(t).toASMType + } case c @ Constant(sym: Symbol) if sym.owner.isJavaDefined && sym.isStaticMember => staticHandleFromSymbol(sym) case c @ Constant(sym: Symbol) => handleFromMethodSymbol(sym) case c @ Constant(value: String) => value diff --git a/test/files/run/indy-via-macro-class-constant-bsa.check b/test/files/run/indy-via-macro-class-constant-bsa.check new file mode 100644 index 000000000000..ecb48be612a5 --- /dev/null +++ b/test/files/run/indy-via-macro-class-constant-bsa.check @@ -0,0 +1 @@ +Test$C diff --git a/test/files/run/indy-via-macro-class-constant-bsa/Bootstrap.java b/test/files/run/indy-via-macro-class-constant-bsa/Bootstrap.java new file mode 100644 index 000000000000..3457910e8b71 --- /dev/null +++ b/test/files/run/indy-via-macro-class-constant-bsa/Bootstrap.java @@ -0,0 +1,14 @@ +package test; + +import java.lang.invoke.*; + +public final class Bootstrap { + private Bootstrap() { + } + + public static CallSite bootstrap(MethodHandles.Lookup lookup, String invokedName, + MethodType invokedType, + Class cls) throws Throwable { + return new java.lang.invoke.ConstantCallSite(java.lang.invoke.MethodHandles.constant(String.class, cls.getName())); + } +} diff --git a/test/files/run/indy-via-macro-class-constant-bsa/Test_2.scala b/test/files/run/indy-via-macro-class-constant-bsa/Test_2.scala new file mode 100644 index 000000000000..49610031412e --- /dev/null +++ b/test/files/run/indy-via-macro-class-constant-bsa/Test_2.scala @@ -0,0 +1,6 @@ +object Test { + def main(args: Array[String]) { + println(Macro.classNameOf(classOf[C])) + } + class C(val x: Int) extends AnyVal +} diff --git a/test/files/run/indy-via-macro-class-constant-bsa/macro_1.scala b/test/files/run/indy-via-macro-class-constant-bsa/macro_1.scala new file mode 100644 index 000000000000..366dd7ff03f9 --- /dev/null +++ b/test/files/run/indy-via-macro-class-constant-bsa/macro_1.scala @@ -0,0 +1,29 @@ +import java.util.regex._ +import scala.reflect.internal.SymbolTable +import scala.reflect.macros.blackbox._ +import language.experimental.macros +import java.lang.invoke._ + +object Macro { + def classNameOf(expr: Class[_]): String = macro Impl.classNameOf +} + + +class Impl(val c: Context) { + def classNameOf(expr: c.Tree): c.Tree = { + { + val symtab = c.universe.asInstanceOf[SymbolTable] + import symtab._ + val bootstrapMethod = typeOf[test.Bootstrap].companion.member(TermName("bootstrap")) + val paramSym = NoSymbol.newTermSymbol(TermName("x")).setInfo(typeOf[String]) + val dummySymbol = NoSymbol.newTermSymbol(TermName("classNameOf")).setInfo(internal.nullaryMethodType(typeOf[String])) + val bootstrapArgTrees: List[Tree] = List( + Literal(Constant(bootstrapMethod)).setType(NoType), + expr.asInstanceOf[Tree], + ) + val result = ApplyDynamic(Ident(dummySymbol).setType(dummySymbol.info), bootstrapArgTrees) + result.setType(dummySymbol.info.resultType) + result.asInstanceOf[c.Tree] + } + } +} From c494fc1818d80d7fef3d507b2537e7d8277e3c9c Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Mon, 22 Mar 2021 16:56:11 +1000 Subject: [PATCH 067/769] Another test for semantics of local objects --- test/async/jvm/lazyval.scala | 30 ++++++++++++++++++++++++++++++ 1 file changed, 30 insertions(+) diff --git a/test/async/jvm/lazyval.scala b/test/async/jvm/lazyval.scala index d3ed6fb0ad24..0f308ab6161b 100644 --- a/test/async/jvm/lazyval.scala +++ b/test/async/jvm/lazyval.scala @@ -5,9 +5,11 @@ package scala.async.run.lazyval { import org.junit.Test import org.junit.Assert._ + import scala.concurrent._ import scala.concurrent.duration._ import ExecutionContext.Implicits.global + import scala.collection.mutable.ListBuffer import scala.tools.partest.async.Async.{async, await} object TestUtil { import language.implicitConversions @@ -33,6 +35,34 @@ package scala.async.run.lazyval { assertEquals(43, result) } + + @Test + def localObject(): Unit = { + val result = block(async { + val log = ListBuffer[String]() + object O { + log += "O" + } + await(1) + O + await(1) + O + var i = 0 + while (i <= 2) { + object W { + log += "W(" + i + ")" + } + await(1) + W + await(1) + W + i += 1 + } + log.mkString(",") + }) + + assertEquals("O,W(0),W(1),W(2)", result) + } } } From f921912389a664b4bc767ada9204f040b48b657a Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Mon, 22 Mar 2021 16:58:17 +1000 Subject: [PATCH 068/769] Support async trees fsm() as a sibling of class statemachine This aligns closely with the tree shape of lambdas and sets the stage for a post-async compiler transform to turn the anonymous inner class into a invokedynamic metafactory call. --- .../nsc/transform/async/AsyncPhase.scala | 34 ++++++++++++------- .../async/AsyncTransformStates.scala | 13 +++++-- .../nsc/transform/async/ExprBuilder.scala | 6 ++-- .../tools/nsc/transform/async/Lifter.scala | 14 +++++--- .../nsc/async/AnnotationDrivenAsync.scala | 31 ++++++++++++----- 5 files changed, 66 insertions(+), 32 deletions(-) diff --git a/src/compiler/scala/tools/nsc/transform/async/AsyncPhase.scala b/src/compiler/scala/tools/nsc/transform/async/AsyncPhase.scala index abd065d86ba1..67dde5d0d560 100644 --- a/src/compiler/scala/tools/nsc/transform/async/AsyncPhase.scala +++ b/src/compiler/scala/tools/nsc/transform/async/AsyncPhase.scala @@ -113,7 +113,7 @@ abstract class AsyncPhase extends Transform with TypingTransformers with AnfTran }) } assert(localTyper.context.owner == cd.symbol.owner) - val withFields = new UseFields(localTyper, cd.symbol, applySym, liftedSyms).transform(cd1) + val withFields = new UseFields(localTyper, cd.symbol, applySym, liftedSyms, NoSymbol).transform(cd1) withFields case dd: DefDef if dd.hasAttachment[AsyncAttachment] => @@ -123,14 +123,17 @@ abstract class AsyncPhase extends Transform with TypingTransformers with AnfTran } atOwner(dd, dd.symbol) { - val trSym = dd.vparamss.head.head.symbol + val trSym = dd.vparamss.head.last.symbol + val selfSym = if (dd.symbol.owner.isTerm) dd.vparamss.head.head.symbol else NoSymbol val saved = currentTransformState currentTransformState = new AsyncTransformState(asyncAttachment.awaitSymbol, - asyncAttachment.postAnfTransform, asyncAttachment.stateDiagram, this, trSym, asyncBody.tpe, asyncNames) + asyncAttachment.postAnfTransform, asyncAttachment.stateDiagram, this, selfSym, trSym, asyncBody.tpe, asyncNames) try { - val (newRhs, liftableFields) = asyncTransform(asyncBody) - liftableMap(dd.symbol.owner) = (dd.symbol, liftableFields) - deriveDefDef(dd)(_ => newRhs) + val (newRhs, liftedTrees) = asyncTransform(asyncBody) + liftableMap(currentTransformState.stateMachineClass) = (dd.symbol, liftedTrees) + val liftedSyms = liftedTrees.iterator.map(_.symbol).toSet + val withFields = new UseFields(localTyper, currentTransformState.stateMachineClass, dd.symbol, liftedSyms, selfSym).transform(newRhs) + deriveDefDef(dd)(_ => withFields) } finally { currentTransformState = saved } @@ -192,15 +195,20 @@ abstract class AsyncPhase extends Transform with TypingTransformers with AnfTran // - references to them are rewritten as referencs to the fields. // - the rhs of ValDefs that initialize such fields is turned into an assignment to the field private class UseFields(initLocalTyper: analyzer.Typer, stateMachineClass: Symbol, - applySym: Symbol, liftedSyms: Set[Symbol]) extends explicitOuter.OuterPathTransformer(initLocalTyper) { + applySym: Symbol, liftedSyms: Set[Symbol], selfSym: Symbol) extends explicitOuter.OuterPathTransformer(initLocalTyper) { private def fieldSel(tree: Tree) = { assert(currentOwner != NoSymbol) - val outerOrThis = if (stateMachineClass == currentClass) gen.mkAttributedThis(stateMachineClass) else { - // These references need to be selected from an outer reference, because explicitouter - // has already run we must perform this transform explicitly here. - tree.symbol.makeNotPrivate(tree.symbol.owner) - outerPath(outerValue, currentClass.outerClass, stateMachineClass) - } + val outerOrThis = + if (selfSym != NoSymbol) + gen.mkAttributedIdent(selfSym) + else if (stateMachineClass == currentClass) + gen.mkAttributedThis(stateMachineClass) + else { + // These references need to be selected from an outer reference, because explicitouter + // has already run we must perform this transform explicitly here. + tree.symbol.makeNotPrivate(tree.symbol.owner) + outerPath(outerValue, currentClass.outerClass, stateMachineClass) + } atPos(tree.pos)(Select(outerOrThis.setType(stateMachineClass.tpe), tree.symbol).setType(tree.symbol.tpe)) } override def transform(tree: Tree): Tree = tree match { diff --git a/src/compiler/scala/tools/nsc/transform/async/AsyncTransformStates.scala b/src/compiler/scala/tools/nsc/transform/async/AsyncTransformStates.scala index 4e2d3b8161d9..de93da7f26f8 100644 --- a/src/compiler/scala/tools/nsc/transform/async/AsyncTransformStates.scala +++ b/src/compiler/scala/tools/nsc/transform/async/AsyncTransformStates.scala @@ -25,6 +25,7 @@ trait AsyncTransformStates extends TypingTransformers { val postAnfTransform: Block => Block, val dotDiagram: (Symbol, Tree) => Option[String => Unit], val typingTransformer: TypingTransformer, + val exteralFsmSelfParam: Symbol, val applyTrParam: Symbol, val asyncType: Type, val asyncNames: AsyncNames[global.type]) { @@ -39,7 +40,7 @@ trait AsyncTransformStates extends TypingTransformers { val applySym: Symbol = applyTr.owner var currentPos: Position = applySym.pos - lazy val stateMachineClass: Symbol = applySym.owner + lazy val stateMachineClass: Symbol = if (exteralFsmSelfParam != NoSymbol) exteralFsmSelfParam.info.typeSymbol else applySym.owner lazy val stateGetter: Symbol = stateMachineMember(nme.state) lazy val stateSetter: Symbol = stateMachineMember(nme.state.setterName) lazy val stateOnComplete: Symbol = stateMachineMember(TermName("onComplete")) @@ -52,7 +53,15 @@ trait AsyncTransformStates extends TypingTransformers { def stateMachineMember(name: TermName): Symbol = stateMachineClass.info.member(name) def memberRef(sym: Symbol): Tree = - gen.mkAttributedRef(stateMachineClass.typeConstructor, sym) + if (exteralFsmSelfParam == NoSymbol) + gen.mkAttributedRef(stateMachineClass.typeConstructor, sym) + else + gen.mkAttributedSelect(gen.mkAttributedIdent(exteralFsmSelfParam), sym) + def stateMachineRef(): Tree = + if (exteralFsmSelfParam == NoSymbol) + gen.mkAttributedThis(stateMachineClass) + else + gen.mkAttributedIdent(exteralFsmSelfParam) } } \ No newline at end of file diff --git a/src/compiler/scala/tools/nsc/transform/async/ExprBuilder.scala b/src/compiler/scala/tools/nsc/transform/async/ExprBuilder.scala index 59949526eb56..597396b0eec1 100644 --- a/src/compiler/scala/tools/nsc/transform/async/ExprBuilder.scala +++ b/src/compiler/scala/tools/nsc/transform/async/ExprBuilder.scala @@ -51,7 +51,7 @@ trait ExprBuilder extends TransformUtils with AsyncAnalysis { val stats1 = mutable.ListBuffer[Tree]() def addNullAssigments(syms: Iterator[Symbol]): Unit = { for (fieldSym <- syms) { - stats1 += typed(Assign(gen.mkAttributedStableRef(fieldSym.owner.thisPrefix, fieldSym), gen.mkZero(fieldSym.info))) + stats1 += typed(Assign(currentTransformState.memberRef(fieldSym), gen.mkZero(fieldSym.info))) } } // Add pre-state null assigments at the beginning. @@ -539,7 +539,7 @@ trait ExprBuilder extends TransformUtils with AsyncAnalysis { val tempVd = ValDef(temp, gen.mkMethodCall(currentTransformState.memberRef(currentTransformState.stateTryGet), tryyReference :: Nil)) typed(Block( tempVd :: Nil, - If(Apply(gen.mkAttributedSelect(gen.mkAttributedThis(currentTransformState.stateMachineClass), definitions.Any_==), gen.mkAttributedIdent(temp) :: Nil), + If(Apply(gen.mkAttributedSelect(currentTransformState.stateMachineRef(), definitions.Any_==), gen.mkAttributedIdent(temp) :: Nil), Return(literalUnit), gen.mkCast(gen.mkAttributedIdent(temp), tempVd.symbol.info) ) @@ -598,7 +598,7 @@ trait ExprBuilder extends TransformUtils with AsyncAnalysis { // (_without_ consuming an extra stack frome!) def callOnComplete(fut: Tree): Tree = - Apply(Select(This(currentTransformState.stateMachineClass), transformState.stateOnComplete), fut :: Nil) + Apply(currentTransformState.memberRef(transformState.stateOnComplete), fut :: Nil) val runCompletedOnSameThread = transformState.stateGetCompleted != NoSymbol if (runCompletedOnSameThread) { diff --git a/src/compiler/scala/tools/nsc/transform/async/Lifter.scala b/src/compiler/scala/tools/nsc/transform/async/Lifter.scala index 1f88d586dcec..c9183527b745 100644 --- a/src/compiler/scala/tools/nsc/transform/async/Lifter.scala +++ b/src/compiler/scala/tools/nsc/transform/async/Lifter.scala @@ -150,15 +150,19 @@ trait Lifter extends ExprBuilder { val treeLifted = t match { case vd@ValDef(_, _, tpt, rhs) => val isLazy = sym.isLazy - sym.setFlag(STABLE | PRIVATE | LOCAL) - if (isLazy) sym.resetFlag(LAZY) else sym.setFlag(MUTABLE) + sym.setFlag(STABLE) + if (currentTransformState.exteralFsmSelfParam == NoSymbol) + sym.setFlag(PRIVATE | LOCAL) + + if (isLazy) sym.resetFlag(LAZY) + sym.setFlag(MUTABLE) sym.setName(currentTransformState.name.freshenIfNeeded(sym.name.toTermName)) sym.setInfo(sym.info.deconst) - val rhs1 = if (isLazy) rhs else EmptyTree - treeCopy.ValDef(vd, Modifiers(sym.flags), sym.name, TypeTree(sym.info).setPos(t.pos), rhs1) + treeCopy.ValDef(vd, Modifiers(sym.flags), sym.name, TypeTree(sym.info).setPos(t.pos), EmptyTree) case dd@DefDef(_, _, tparams, vparamss, tpt, rhs) => sym.setName(currentTransformState.name.freshen(sym.name.toTermName)) - sym.setFlag(PRIVATE | LOCAL) + if (currentTransformState.exteralFsmSelfParam == NoSymbol) + sym.setFlag(PRIVATE | LOCAL) // Was `DefDef(sym, rhs)`, but this ran afoul of `ToughTypeSpec.nestedMethodWithInconsistencyTreeAndInfoParamSymbols` // due to the handling of type parameter skolems in `thisMethodType` in `Namers` treeCopy.DefDef(dd, Modifiers(sym.flags), sym.name, tparams, vparamss, tpt, rhs) diff --git a/test/junit/scala/tools/nsc/async/AnnotationDrivenAsync.scala b/test/junit/scala/tools/nsc/async/AnnotationDrivenAsync.scala index 0afd8d555cb6..e418b1899224 100644 --- a/test/junit/scala/tools/nsc/async/AnnotationDrivenAsync.scala +++ b/test/junit/scala/tools/nsc/async/AnnotationDrivenAsync.scala @@ -448,11 +448,9 @@ class AnnotationDrivenAsync { } } catch { case ve: VerifyError => - val asm = out.listFiles().filter(_.getName.contains("stateMachine")).flatMap { file => - import scala.sys.process._ - val javap = List("/usr/local/bin/javap", "-v", file.getAbsolutePath).!! + val asm = out.listFiles().flatMap { file => val asmp = AsmUtils.textify(AsmUtils.readClass(file.getAbsolutePath)) - javap :: asmp :: Nil + asmp :: Nil }.mkString("\n\n") throw new AssertionError(asm, ve) } finally { @@ -490,17 +488,32 @@ abstract class AnnotationDrivenAsyncPlugin extends Plugin { case dd: DefDef if dd.symbol.hasAnnotation(customAsyncSym) => deriveDefDef(dd) { rhs => - val applyMethod = - q"""def apply(tr: _root_.scala.util.Either[_root_.scala.Throwable, _root_.scala.AnyRef]): _root_.scala.Unit = $rhs""" - val applyMethodMarked = global.async.markForAsyncTransform(dd.symbol, applyMethod, awaitSym, Map.empty) + val fsmImplName = currentUnit.freshTermName("fsm$") + val externalFsmMethod = true val name = TypeName("stateMachine$async") - val wrapped = + val wrapped = if (!externalFsmMethod) { + val applyMethod = + q"""def apply(tr: _root_.scala.util.Either[_root_.scala.Throwable, _root_.scala.AnyRef]): _root_.scala.Unit = $rhs""" + val applyMethodMarked = global.async.markForAsyncTransform(dd.symbol, applyMethod, awaitSym, Map.empty) q""" class $name extends _root_.scala.tools.nsc.async.CustomFutureStateMachine { - $applyMethodMarked + $applyMethodMarked } new $name().start() """ + } else { + val applyMethod = + q"""def $fsmImplName(self: $name, tr: _root_.scala.util.Either[_root_.scala.Throwable, _root_.scala.AnyRef]): _root_.scala.Unit = $rhs""" + val applyMethodMarked = global.async.markForAsyncTransform(dd.symbol, applyMethod, awaitSym, Map.empty) + q""" + $applyMethodMarked + class $name extends _root_.scala.tools.nsc.async.CustomFutureStateMachine { + def apply(tr: _root_.scala.util.Either[_root_.scala.Throwable, _root_.scala.AnyRef]): _root_.scala.Unit = + $fsmImplName(this, tr) + } + new $name().start() + """ + } val tree = q""" From d12f66c7fbdf24dcfc8934ec72f6b91469a8ad70 Mon Sep 17 00:00:00 2001 From: Ikko Ashimine Date: Mon, 12 Apr 2021 21:07:02 +0900 Subject: [PATCH 069/769] Fix typo in TastyFormat.scala preceeding -> preceding --- src/compiler/scala/tools/tasty/TastyFormat.scala | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/compiler/scala/tools/tasty/TastyFormat.scala b/src/compiler/scala/tools/tasty/TastyFormat.scala index 7aae96aebc15..73415a13199f 100644 --- a/src/compiler/scala/tools/tasty/TastyFormat.scala +++ b/src/compiler/scala/tools/tasty/TastyFormat.scala @@ -25,7 +25,7 @@ object TastyFormat { /**Natural number. Each increment of the `MajorVersion` begins a * new series of backward compatible TASTy versions. * - * A TASTy file in either the preceeding or succeeding series is + * A TASTy file in either the preceding or succeeding series is * incompatible with the current value. */ final val MajorVersion: Int = 28 @@ -33,7 +33,7 @@ object TastyFormat { /**Natural number. Each increment of the `MinorVersion`, within * a series declared by the `MajorVersion`, breaks forward * compatibility, but remains backwards compatible, with all - * preceeding `MinorVersion`. + * preceding `MinorVersion`. */ final val MinorVersion: Int = 0 From 06cb813fc60d4950f098ff21139275d63c510a22 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E6=A2=A6=E5=A2=83=E8=BF=B7=E7=A6=BB?= Date: Thu, 25 Mar 2021 15:42:17 +0800 Subject: [PATCH 070/769] fix p12366 --- .../nsc/typechecker/MacroAnnotationNamers.scala | 12 +++++++++++- test/files/pos/macro-annot/t12366.check | 1 + test/files/pos/macro-annot/t12366.scala | 15 +++++++++++++++ 3 files changed, 27 insertions(+), 1 deletion(-) create mode 100644 test/files/pos/macro-annot/t12366.check create mode 100644 test/files/pos/macro-annot/t12366.scala diff --git a/src/compiler/scala/tools/nsc/typechecker/MacroAnnotationNamers.scala b/src/compiler/scala/tools/nsc/typechecker/MacroAnnotationNamers.scala index 31eeedf2853e..267501f23175 100644 --- a/src/compiler/scala/tools/nsc/typechecker/MacroAnnotationNamers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/MacroAnnotationNamers.scala @@ -166,7 +166,17 @@ trait MacroAnnotationNamers { self: Analyzer => protected def weakEnsureCompanionObject(cdef: ClassDef, creator: ClassDef => Tree = companionModuleDef(_)): Symbol = { val m = patchedCompanionSymbolOf(cdef.symbol, context) if (m != NoSymbol && currentRun.compiles(m)) m - else { val mdef = atPos(cdef.pos.focus)(creator(cdef)); enterSym(mdef); markWeak(mdef.symbol) } + else { + val existsVal = context.tree.children.find { + case ValDef(_, term, _, _) if cdef.getterName == term => true + case _ => false + } + if (existsVal.isDefined) NoSymbol else { + val mdef = atPos(cdef.pos.focus)(creator(cdef)) + enterSym(mdef) + markWeak(mdef.symbol) + } + } } protected def finishSymbol(tree: Tree): Unit = { diff --git a/test/files/pos/macro-annot/t12366.check b/test/files/pos/macro-annot/t12366.check new file mode 100644 index 000000000000..de47a31a6b4e --- /dev/null +++ b/test/files/pos/macro-annot/t12366.check @@ -0,0 +1 @@ +warning: 2 deprecations; re-run with -deprecation for details diff --git a/test/files/pos/macro-annot/t12366.scala b/test/files/pos/macro-annot/t12366.scala new file mode 100644 index 000000000000..9b75bb3c6d1f --- /dev/null +++ b/test/files/pos/macro-annot/t12366.scala @@ -0,0 +1,15 @@ +// scalac: -Ymacro-annotations +object Test extends App { + + @deprecated + class Inner() { + } + + lazy val Inner = new Inner() + + @deprecated + class Inner2() { + } + + val Inner2 = new Inner2() +} From 8e330aa43ee86ef3f360d6357d324bdbb5191949 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Mon, 12 Apr 2021 10:38:53 +1000 Subject: [PATCH 071/769] Use eq rather than == for sentinel check in async generated code --- src/compiler/scala/tools/nsc/transform/async/ExprBuilder.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/compiler/scala/tools/nsc/transform/async/ExprBuilder.scala b/src/compiler/scala/tools/nsc/transform/async/ExprBuilder.scala index 597396b0eec1..d0919cf5831e 100644 --- a/src/compiler/scala/tools/nsc/transform/async/ExprBuilder.scala +++ b/src/compiler/scala/tools/nsc/transform/async/ExprBuilder.scala @@ -539,7 +539,7 @@ trait ExprBuilder extends TransformUtils with AsyncAnalysis { val tempVd = ValDef(temp, gen.mkMethodCall(currentTransformState.memberRef(currentTransformState.stateTryGet), tryyReference :: Nil)) typed(Block( tempVd :: Nil, - If(Apply(gen.mkAttributedSelect(currentTransformState.stateMachineRef(), definitions.Any_==), gen.mkAttributedIdent(temp) :: Nil), + If(Apply(gen.mkAttributedSelect(currentTransformState.stateMachineRef(), definitions.Object_eq), gen.mkAttributedIdent(temp) :: Nil), Return(literalUnit), gen.mkCast(gen.mkAttributedIdent(temp), tempVd.symbol.info) ) From f14bc5976bb2ee017fab76f4e2d8c740f71fce76 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Mon, 12 Apr 2021 10:42:29 +1000 Subject: [PATCH 072/769] Minor cleanups in async phase --- .../tools/nsc/transform/async/ExprBuilder.scala | 13 ++----------- src/reflect/scala/reflect/internal/StdNames.scala | 1 + 2 files changed, 3 insertions(+), 11 deletions(-) diff --git a/src/compiler/scala/tools/nsc/transform/async/ExprBuilder.scala b/src/compiler/scala/tools/nsc/transform/async/ExprBuilder.scala index d0919cf5831e..95e83e1a95d1 100644 --- a/src/compiler/scala/tools/nsc/transform/async/ExprBuilder.scala +++ b/src/compiler/scala/tools/nsc/transform/async/ExprBuilder.scala @@ -535,7 +535,7 @@ trait ExprBuilder extends TransformUtils with AsyncAnalysis { private def resumeTree(awaitableResult: ValDef): Tree = { def tryyReference = gen.mkAttributedIdent(currentTransformState.applyTrParam) deriveValDef(awaitableResult) { _ => - val temp = awaitableResult.symbol.newTermSymbol(TermName("tryGetResult$async")).setInfo(definitions.ObjectTpe) + val temp = awaitableResult.symbol.newTermSymbol(nme.trGetResult).setInfo(definitions.ObjectTpe) val tempVd = ValDef(temp, gen.mkMethodCall(currentTransformState.memberRef(currentTransformState.stateTryGet), tryyReference :: Nil)) typed(Block( tempVd :: Nil, @@ -560,16 +560,7 @@ trait ExprBuilder extends TransformUtils with AsyncAnalysis { protected def mkStateTree(nextState: Int): Tree = { val transformState = currentTransformState val callSetter = Apply(transformState.memberRef(transformState.stateSetter), Literal(Constant(nextState)) :: Nil) - val printStateUpdates = false - val tree = if (printStateUpdates) { - Block( - callSetter :: Nil, - gen.mkMethodCall(definitions.PredefModule.info.member(TermName("println")), - currentTransformState.localTyper.typed(gen.mkApplyIfNeeded(transformState.memberRef(transformState.stateGetter)), definitions.ObjectTpe) :: Nil) - ) - } - else callSetter - typed(tree.updateAttachment(StateTransitionTree)) + typed(callSetter.updateAttachment(StateTransitionTree)) } } diff --git a/src/reflect/scala/reflect/internal/StdNames.scala b/src/reflect/scala/reflect/internal/StdNames.scala index 3b75e95e7dd4..ab988783bd4e 100644 --- a/src/reflect/scala/reflect/internal/StdNames.scala +++ b/src/reflect/scala/reflect/internal/StdNames.scala @@ -864,6 +864,7 @@ trait StdNames { val state : NameType = "state" val tr : NameType = "tr$async" val t : NameType = "throwable$async" + val trGetResult : NameType = "tryGetResult$async" // quasiquote interpolators: val q: NameType = "q" From d998b1956c92ae7a28a0970bf3ee9d83555d91f6 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Mon, 12 Apr 2021 10:51:35 +1000 Subject: [PATCH 073/769] Allow custom async implementations to elide try/catch in generated code This makes sense if the scheduler maintains back-links to the futures that await the currently running future, and can propagate the failure. --- .../nsc/transform/async/AsyncPhase.scala | 19 ++++++++-- .../async/AsyncTransformStates.scala | 1 + .../nsc/transform/async/ExprBuilder.scala | 37 ++++++++++--------- 3 files changed, 36 insertions(+), 21 deletions(-) diff --git a/src/compiler/scala/tools/nsc/transform/async/AsyncPhase.scala b/src/compiler/scala/tools/nsc/transform/async/AsyncPhase.scala index 67dde5d0d560..4fe45306bd93 100644 --- a/src/compiler/scala/tools/nsc/transform/async/AsyncPhase.scala +++ b/src/compiler/scala/tools/nsc/transform/async/AsyncPhase.scala @@ -27,7 +27,9 @@ abstract class AsyncPhase extends Transform with TypingTransformers with AnfTran val phaseName: String = "async" override def enabled: Boolean = settings.async - private final case class AsyncAttachment(awaitSymbol: Symbol, postAnfTransform: Block => Block, stateDiagram: ((Symbol, Tree) => Option[String => Unit])) extends PlainAttachment + private final case class AsyncAttachment(awaitSymbol: Symbol, postAnfTransform: Block => Block, + stateDiagram: ((Symbol, Tree) => Option[String => Unit]), + allowExceptionsToPropagate: Boolean) extends PlainAttachment // Optimization: avoid the transform altogether if there are no async blocks in a unit. private val sourceFilesToTransform = perRunCaches.newSet[SourceFile]() @@ -45,7 +47,8 @@ abstract class AsyncPhase extends Transform with TypingTransformers with AnfTran sourceFilesToTransform += pos.source val postAnfTransform = config.getOrElse("postAnfTransform", (x: Block) => x).asInstanceOf[Block => Block] val stateDiagram = config.getOrElse("stateDiagram", (sym: Symbol, tree: Tree) => None).asInstanceOf[(Symbol, Tree) => Option[String => Unit]] - method.updateAttachment(new AsyncAttachment(awaitMethod, postAnfTransform, stateDiagram)) + val allowExceptionsToPropagate = config.contains("allowExceptionsToPropagate") + method.updateAttachment(new AsyncAttachment(awaitMethod, postAnfTransform, stateDiagram, allowExceptionsToPropagate)) // Wrap in `{ expr: Any }` to force value class boxing before calling `completeSuccess`, see test/async/run/value-class.scala deriveDefDef(method) { rhs => Block(Apply(gen.mkAttributedRef(definitions.Predef_locally), rhs :: Nil), Literal(Constant(()))) @@ -126,8 +129,16 @@ abstract class AsyncPhase extends Transform with TypingTransformers with AnfTran val trSym = dd.vparamss.head.last.symbol val selfSym = if (dd.symbol.owner.isTerm) dd.vparamss.head.head.symbol else NoSymbol val saved = currentTransformState - currentTransformState = new AsyncTransformState(asyncAttachment.awaitSymbol, - asyncAttachment.postAnfTransform, asyncAttachment.stateDiagram, this, selfSym, trSym, asyncBody.tpe, asyncNames) + currentTransformState = new AsyncTransformState( + asyncAttachment.awaitSymbol, + asyncAttachment.postAnfTransform, + asyncAttachment.stateDiagram, + asyncAttachment.allowExceptionsToPropagate, + this, + selfSym, + trSym, + asyncBody.tpe, + asyncNames) try { val (newRhs, liftedTrees) = asyncTransform(asyncBody) liftableMap(currentTransformState.stateMachineClass) = (dd.symbol, liftedTrees) diff --git a/src/compiler/scala/tools/nsc/transform/async/AsyncTransformStates.scala b/src/compiler/scala/tools/nsc/transform/async/AsyncTransformStates.scala index de93da7f26f8..004867ba0420 100644 --- a/src/compiler/scala/tools/nsc/transform/async/AsyncTransformStates.scala +++ b/src/compiler/scala/tools/nsc/transform/async/AsyncTransformStates.scala @@ -24,6 +24,7 @@ trait AsyncTransformStates extends TypingTransformers { class AsyncTransformState(val awaitSymbol: Symbol, val postAnfTransform: Block => Block, val dotDiagram: (Symbol, Tree) => Option[String => Unit], + val allowExceptionsToPropagate: Boolean, val typingTransformer: TypingTransformer, val exteralFsmSelfParam: Symbol, val applyTrParam: Symbol, diff --git a/src/compiler/scala/tools/nsc/transform/async/ExprBuilder.scala b/src/compiler/scala/tools/nsc/transform/async/ExprBuilder.scala index 95e83e1a95d1..052485f85b99 100644 --- a/src/compiler/scala/tools/nsc/transform/async/ExprBuilder.scala +++ b/src/compiler/scala/tools/nsc/transform/async/ExprBuilder.scala @@ -444,25 +444,28 @@ trait ExprBuilder extends TransformUtils with AsyncAnalysis { def onCompleteHandler: Tree = { val transformState = currentTransformState def stateMemberRef = gen.mkApplyIfNeeded(transformState.memberRef(transformState.stateGetter)) - val throww = Throw(Apply(Select(New(Ident(IllegalStateExceptionClass)), IllegalStateExceptionClass_NEW_String), List(gen.mkMethodCall(currentRun.runDefinitions.String_valueOf_Int, stateMemberRef :: Nil)))) + val asyncStatesInit = asyncStates.init // drop the terminal state which has no code. - val body = + val throww = Throw(Apply(Select(New(Ident(IllegalStateExceptionClass)), IllegalStateExceptionClass_NEW_String), List(gen.mkMethodCall(currentRun.runDefinitions.String_valueOf_Int, stateMemberRef :: Nil)))) + val body = typed(Match(stateMemberRef, - asyncStatesInit.map(_.mkHandlerCaseForState) ++ - List(CaseDef(Ident(nme.WILDCARD), EmptyTree, - throww)))) - - val body1 = compactStates(body.asInstanceOf[Match]) - - val stateMatch = Try( - body1, - List( - CaseDef( - Bind(nme.t, Typed(Ident(nme.WILDCARD), Ident(definitions.ThrowableClass))), - EmptyTree, - Block(Apply(currentTransformState.memberRef(currentTransformState.stateCompleteFailure), Ident(nme.t) :: Nil) :: Nil, Return(literalUnit)) - ) - ), EmptyTree) + asyncStatesInit.map(_.mkHandlerCaseForState) ++ + List(CaseDef(Ident(nme.WILDCARD), EmptyTree, + throww)))) + val body1 = compactStates(body.asInstanceOf[Match]) + val stateMatch = if (transformState.allowExceptionsToPropagate) { + body1 + } else { + Try( + body1, + List( + CaseDef( + Bind(nme.t, Typed(Ident(nme.WILDCARD), Ident(definitions.ThrowableClass))), + EmptyTree, + Block(Apply(currentTransformState.memberRef(currentTransformState.stateCompleteFailure), Ident(nme.t) :: Nil) :: Nil, Return(literalUnit)) + ) + ), EmptyTree) + } typed(LabelDef(transformState.whileLabel, Nil, Block(stateMatch :: Nil, Apply(Ident(transformState.whileLabel), Nil)))) } From 67bc98f249e11ce86f4ec474df38420d7c921982 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Mon, 12 Apr 2021 16:00:11 +1000 Subject: [PATCH 074/769] async: Elide tryGet call if not needed by future system --- .../async/AsyncTransformStates.scala | 3 +++ .../nsc/transform/async/ExprBuilder.scala | 20 +++++++++++-------- 2 files changed, 15 insertions(+), 8 deletions(-) diff --git a/src/compiler/scala/tools/nsc/transform/async/AsyncTransformStates.scala b/src/compiler/scala/tools/nsc/transform/async/AsyncTransformStates.scala index 004867ba0420..d6c54d6c1315 100644 --- a/src/compiler/scala/tools/nsc/transform/async/AsyncTransformStates.scala +++ b/src/compiler/scala/tools/nsc/transform/async/AsyncTransformStates.scala @@ -51,6 +51,9 @@ trait AsyncTransformStates extends TypingTransformers { lazy val stateTryGet: Symbol = stateMachineMember(TermName("tryGet")) lazy val whileLabel: Symbol = applySym.newLabel(nme.WHILE_PREFIX).setInfo(MethodType(Nil, definitions.UnitTpe)) + lazy val tryGetIsIdentity: Boolean = exitingTyper { + stateTryGet.info.finalResultType.termSymbol == stateTryGet.firstParam + } def stateMachineMember(name: TermName): Symbol = stateMachineClass.info.member(name) def memberRef(sym: Symbol): Tree = diff --git a/src/compiler/scala/tools/nsc/transform/async/ExprBuilder.scala b/src/compiler/scala/tools/nsc/transform/async/ExprBuilder.scala index 052485f85b99..7efe0721e182 100644 --- a/src/compiler/scala/tools/nsc/transform/async/ExprBuilder.scala +++ b/src/compiler/scala/tools/nsc/transform/async/ExprBuilder.scala @@ -538,15 +538,19 @@ trait ExprBuilder extends TransformUtils with AsyncAnalysis { private def resumeTree(awaitableResult: ValDef): Tree = { def tryyReference = gen.mkAttributedIdent(currentTransformState.applyTrParam) deriveValDef(awaitableResult) { _ => - val temp = awaitableResult.symbol.newTermSymbol(nme.trGetResult).setInfo(definitions.ObjectTpe) - val tempVd = ValDef(temp, gen.mkMethodCall(currentTransformState.memberRef(currentTransformState.stateTryGet), tryyReference :: Nil)) - typed(Block( - tempVd :: Nil, + if (currentTransformState.tryGetIsIdentity) { + tryyReference + } else { + val temp = awaitableResult.symbol.newTermSymbol(nme.trGetResult).setInfo(definitions.ObjectTpe) + val tempVd = ValDef(temp, gen.mkMethodCall(currentTransformState.memberRef(currentTransformState.stateTryGet), tryyReference :: Nil)) + typed(Block( + tempVd :: Nil, If(Apply(gen.mkAttributedSelect(currentTransformState.stateMachineRef(), definitions.Object_eq), gen.mkAttributedIdent(temp) :: Nil), - Return(literalUnit), - gen.mkCast(gen.mkAttributedIdent(temp), tempVd.symbol.info) - ) - )) + Return(literalUnit), + gen.mkCast(gen.mkAttributedIdent(temp), tempVd.symbol.info) + ) + )) + } } } From baefaea94709cd997705fa8a23cbab032939dfed Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Mon, 12 Apr 2021 16:22:03 +1000 Subject: [PATCH 075/769] Don't introduce dead code null assignments --- .../scala/tools/nsc/transform/async/AsyncPhase.scala | 2 +- .../scala/tools/nsc/transform/async/ExprBuilder.scala | 9 +++++++++ 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/src/compiler/scala/tools/nsc/transform/async/AsyncPhase.scala b/src/compiler/scala/tools/nsc/transform/async/AsyncPhase.scala index 4fe45306bd93..ae4c81727ca3 100644 --- a/src/compiler/scala/tools/nsc/transform/async/AsyncPhase.scala +++ b/src/compiler/scala/tools/nsc/transform/async/AsyncPhase.scala @@ -182,7 +182,7 @@ abstract class AsyncPhase extends Transform with TypingTransformers with AnfTran if (nullOut) { for ((state, (preNulls, postNulls)) <- fieldsToNullOut(asyncBlock.asyncStates, asyncBlock.asyncStates.last, liftedFields)) { val asyncState = asyncBlock.asyncStates.find(_.state == state).get - if (asyncState.nextStates.nonEmpty) + if (asyncState.hasNonTerminalNextState) asyncState.insertNullAssignments(preNulls.iterator, postNulls.iterator) } } diff --git a/src/compiler/scala/tools/nsc/transform/async/ExprBuilder.scala b/src/compiler/scala/tools/nsc/transform/async/ExprBuilder.scala index 7efe0721e182..844c0994bc38 100644 --- a/src/compiler/scala/tools/nsc/transform/async/ExprBuilder.scala +++ b/src/compiler/scala/tools/nsc/transform/async/ExprBuilder.scala @@ -40,6 +40,15 @@ trait ExprBuilder extends TransformUtils with AsyncAnalysis { } } final class AsyncState(var stats: List[Tree], val state: Int, var nextStates: Array[Int], val isEmpty: Boolean) { + def hasNonTerminalNextState: Boolean = { + var i = 0 + val ns = nextStates + while (i < ns.length) { + if (ns(i) != StateAssigner.Terminal) return true + i += 1 + } + false + } def mkHandlerCaseForState: CaseDef = { replaceResidualJumpsWithStateTransitions.transform(CaseDef(Literal(Constant(state)), EmptyTree, adaptToUnit(stats))).asInstanceOf[CaseDef] } From d1392f330a2bc48fc03c3c3cde9068dec87767a2 Mon Sep 17 00:00:00 2001 From: Dale Wijnand Date: Thu, 15 Apr 2021 15:03:00 +0100 Subject: [PATCH 076/769] Give run/string-switch-pos an alternative pattern case ... for the next commit. --- test/files/run/string-switch-pos.check | 32 ++++++++++++++++++-------- test/files/run/string-switch-pos.scala | 3 ++- 2 files changed, 24 insertions(+), 11 deletions(-) diff --git a/test/files/run/string-switch-pos.check b/test/files/run/string-switch-pos.check index 6b292d0a3808..b0f7b64d08bd 100644 --- a/test/files/run/string-switch-pos.check +++ b/test/files/run/string-switch-pos.check @@ -1,11 +1,11 @@ [[syntax trees at end of patmat]] // newSource1.scala -[0:187]package [0:0] { - [0:187]class Switch extends [13:187][187]scala.AnyRef { - [187]def (): [13]Switch = [187]{ - [187][187][187]Switch.super.(); +[0:216]package [0:0] { + [0:216]class Switch extends [13:216][216]scala.AnyRef { + [216]def (): [13]Switch = [216]{ + [216][216][216]Switch.super.(); [13]() }; - [17:185]def switch([28:37]s: [31:37], [39:52]cond: [45:52]): [21]Int = [56:57]{ + [17:214]def switch([28:37]s: [31:37], [39:52]cond: [45:52]): [21]Int = [56:57]{ [56:57]case val x1: [56]String = [56:57]s; [56:57][56:57]x1 match { [56:57]case [75:81]"AaAa" => [93:94]1 @@ -14,6 +14,7 @@ [151:152]3 else [180:181]4 + [56:57]case [56:57]([191:197]"CcCc"| [200:205]"Cc2") => [209:210]5 [56:57]case [56:57]_ => [56:57]throw [56:57][56:57][56:57]new [56:57]MatchError([56:57]x1) } } @@ -21,15 +22,19 @@ } [[syntax trees at end of cleanup]] // newSource1.scala -[0:187]package [0:0] { - [0:187]class Switch extends [13:187][13:187]Object { - [17:185]def switch([28:37]s: [31:37], [39:52]cond: [45:52]): [21]Int = [56:57]{ +[0:216]package [0:0] { + [0:216]class Switch extends [13:216][13:216]Object { + [17:214]def switch([28:37]s: [31:37], [39:52]cond: [45:52]): [21]Int = [56:57]{ [56:57]case val x1: [56]String = [56:57]s; [56:57]{ [56:139][56:57]if ([56][56]x1.eq([56]null)) [56]0 else [56][56]x1.hashCode() match { + [56:57]case [56]67506 => [56:57]if ([56][56][56]"Cc2".equals([56]x1)) + [56][56]case4() + else + [56][56]matchEnd2() [75:81]case [56]2031744 => [75:81]if ([75][75][75]"AaAa".equals([75]x1)) [75][75]case1() else @@ -38,6 +43,10 @@ [133][133]case3() else [56][56]matchEnd2() + [56:57]case [56]2093312 => [56:57]if ([56][56][56]"CcCc".equals([56]x1)) + [56][56]case4() + else + [56][56]matchEnd2() [104:110]case [56]3003444 => [104:110]if ([104][104][104]"asdf".equals([104]x1)) [104][104]case2() else @@ -56,6 +65,9 @@ else [180:181]4) }; + [56]case4(){ + [56][56]matchEnd1([209:210]5) + }; [56]matchEnd2(){ [56][56]matchEnd1([56:57]throw [56:57][56:57][56:57]new [56:57]MatchError([56:57]x1)) }; @@ -64,8 +76,8 @@ } } }; - [187]def (): [13]Switch = [187]{ - [187][187][187]Switch.super.(); + [216]def (): [13]Switch = [216]{ + [216][216][216]Switch.super.(); [13]() } } diff --git a/test/files/run/string-switch-pos.scala b/test/files/run/string-switch-pos.scala index a75208046391..db093bc93a55 100644 --- a/test/files/run/string-switch-pos.scala +++ b/test/files/run/string-switch-pos.scala @@ -10,9 +10,10 @@ object Test extends DirectTest { | case "asdf" => 2 | case "BbBb" if cond => 3 | case "BbBb" => 4 + | case "CcCc" | "Cc2" => 5 | } |} """.stripMargin.trim override def show(): Unit = Console.withErr(Console.out) { super.compile() } -} \ No newline at end of file +} From 755a1243d2dca0e495c3a2b22408336f39fd6693 Mon Sep 17 00:00:00 2001 From: Dale Wijnand Date: Thu, 15 Apr 2021 15:03:47 +0100 Subject: [PATCH 077/769] Rework string switching to use less labels/gotos Labels are necessary when the same body is shared by alternative strings. However, I believe that to be much rarer than the simple string cases. So avoid creating labels & gotos for those simple cases. --- .../scala/tools/nsc/ast/TreeDSL.scala | 12 +- .../scala/tools/nsc/transform/CleanUp.scala | 179 ++++++++---------- test/files/run/string-switch-pos.check | 35 ++-- 3 files changed, 102 insertions(+), 124 deletions(-) diff --git a/src/compiler/scala/tools/nsc/ast/TreeDSL.scala b/src/compiler/scala/tools/nsc/ast/TreeDSL.scala index 7281d66aa807..1dfb5d72ac59 100644 --- a/src/compiler/scala/tools/nsc/ast/TreeDSL.scala +++ b/src/compiler/scala/tools/nsc/ast/TreeDSL.scala @@ -65,16 +65,14 @@ trait TreeDSL { * a member called nme.EQ. Not sure if that should happen, but we can be * robust by dragging in Any regardless. */ - def MEMBER_== (other: Tree) = { - val opSym = if (target.tpe == null) NoSymbol else target.tpe member nme.EQ - if (opSym == NoSymbol) ANY_==(other) - else fn(target, opSym, other) - } + def MEMBER_== (other: Tree) = fn(target, (if (target.tpe == null) NoSymbol else target.tpe member nme.EQ).orElse(Any_==), other) def ANY_EQ (other: Tree) = OBJ_EQ(other AS ObjectTpe) def ANY_== (other: Tree) = fn(target, Any_==, other) def ANY_!= (other: Tree) = fn(target, Any_!=, other) - def OBJ_EQ (other: Tree) = fn(target, Object_eq, other) - def OBJ_NE (other: Tree) = fn(target, Object_ne, other) + def OBJ_EQ (other: Tree) = fn(target, Object_eq, other) + def OBJ_NE (other: Tree) = fn(target, Object_ne, other) + def OBJ_== (other: Tree) = fn(target, Object_equals, other) + def OBJ_## = fn(target, Object_hashCode) def INT_>= (other: Tree) = fn(target, getMember(IntClass, nme.GE), other) def INT_== (other: Tree) = fn(target, getMember(IntClass, nme.EQ), other) diff --git a/src/compiler/scala/tools/nsc/transform/CleanUp.scala b/src/compiler/scala/tools/nsc/transform/CleanUp.scala index aaec0a0a314b..b2242116a7c3 100644 --- a/src/compiler/scala/tools/nsc/transform/CleanUp.scala +++ b/src/compiler/scala/tools/nsc/transform/CleanUp.scala @@ -17,14 +17,14 @@ import symtab._ import Flags._ import scala.collection._ import scala.tools.nsc.Reporting.WarningCategory +import scala.util.chaining._ abstract class CleanUp extends Statics with Transform with ast.TreeDSL { import global._ import definitions._ import CODE._ - import treeInfo.StripCast + import treeInfo.{ SYNTH_CASE_FLAGS, isDefaultCase, StripCast } - /** the following two members override abstract members in Transform */ val phaseName: String = "cleanup" /* used in GenBCode: collects ClassDef symbols owning a main(Array[String]) method */ @@ -398,105 +398,94 @@ abstract class CleanUp extends Statics with Transform with ast.TreeDSL { } } - // transform scrutinee of all matches to ints - def transformSwitch(sw: Match): Tree = { import CODE._ - sw.selector.tpe.widen match { - case IntTpe => sw // can switch directly on ints - case StringTpe => - // these assumptions about the shape of the tree are justified by the codegen in MatchOptimization - val Match(Typed(selTree, _), cases) = sw: @unchecked - def selArg = selTree match { - case x: Ident => REF(x.symbol) - case x: Literal => x - case x => throw new MatchError(x) - } - val restpe = sw.tpe - val swPos = sw.pos.focus - - /* From this: - * string match { case "AaAa" => 1 case "BBBB" | "c" => 2 case _ => 3} - * Generate this: - * string.## match { - * case 2031744 => - * if ("AaAa" equals string) goto match1 - * else if ("BBBB" equals string) goto match2 - * else goto matchFailure - * case 99 => - * if ("c" equals string) goto match2 - * else goto matchFailure - * case _ => goto matchFailure - * } - * match1: goto matchSuccess (1) - * match2: goto matchSuccess (2) - * matchFailure: goto matchSuccess (3) // would be throw new MatchError(string) if no default was given - * matchSuccess(res: Int): res - * This proliferation of labels is needed to handle alternative patterns, since multiple branches in the - * resulting switch may need to correspond to a single case body. - */ - - val stats = mutable.ListBuffer.empty[Tree] - var failureBody = Throw(New(definitions.MatchErrorClass.tpe_*, selArg)) : Tree - - // genbcode isn't thrilled about seeing labels with Unit arguments, so `success`'s type is one of - // `${sw.tpe} => ${sw.tpe}` or `() => Unit` depending. - val success = { - val lab = currentOwner.newLabel(unit.freshTermName("matchEnd"), swPos) - if (restpe =:= UnitTpe) { - lab.setInfo(MethodType(Nil, restpe)) - } else { - lab.setInfo(MethodType(lab.newValueParameter(nme.x_1).setInfo(restpe) :: Nil, restpe)) - } - } - def succeed(res: Tree): Tree = - if (restpe =:= UnitTpe) BLOCK(res, REF(success) APPLY Nil) else REF(success) APPLY res - - val failure = currentOwner.newLabel(unit.freshTermName("matchEnd"), swPos).setInfo(MethodType(Nil, restpe)) - def fail(): Tree = atPos(swPos) { Apply(REF(failure), Nil) } - - val ifNull = LIT(0) - val noNull = Apply(selArg DOT Object_hashCode, Nil) - - val newSel = selTree match { - case _: Ident => atPos(selTree.symbol.pos) { IF(selTree.symbol OBJ_EQ NULL) THEN ifNull ELSE noNull } - case x: Literal => atPos(selTree.pos) { if (x.value.value == null) ifNull else noNull } - case x => throw new MatchError(x) + private def transformStringSwitch(sw: Match): Tree = { import CODE._ + // these assumptions about the shape of the tree are justified by the codegen in MatchOptimization + val Match(Typed(selTree, _), cases) = sw: @unchecked + def selArg = selTree match { + case x: Ident => REF(x.symbol) + case x: Literal => x + case x => throw new MatchError(x) + } + val newSel = selTree match { + case x: Ident => atPos(x.symbol.pos)(IF (x.symbol OBJ_EQ NULL) THEN ZERO ELSE selArg.OBJ_##) + case x: Literal => atPos(x.pos) (if (x.value.value == null) ZERO else selArg.OBJ_##) + case x => throw new MatchError(x) + } + val restpe = sw.tpe + val resUnit = restpe =:= UnitTpe + val swPos = sw.pos.focus + + /* From this: + * string match { case "AaAa" => 1 case "BBBB" | "c" => 2 case _ => 3 } + * Generate this: + * string.## match { + * case 2031744 => + * if ("AaAa" equals string) goto matchEnd (1) + * else if ("BBBB" equals string) goto case2 + * else goto defaultCase + * case 99 => + * if ("c" equals string) goto case2 + * else goto defaultCase + * case _ => goto defaultCase + * } + * case2: goto matchEnd (2) + * defaultCase: goto matchEnd (3) // or `goto matchEnd (throw new MatchError(string))` if no default was given + * matchEnd(res: Int): res + * Extra labels are added for alternative patterns branches, since multiple branches in the + * resulting switch may need to correspond to a single case body. + */ + + val labels = mutable.ListBuffer.empty[LabelDef] + var defaultCaseBody = Throw(New(MatchErrorClass.tpe_*, selArg)): Tree + + def LABEL(name: String) = currentOwner.newLabel(unit.freshTermName(name), swPos).setFlag(SYNTH_CASE_FLAGS) + def newCase() = LABEL( "case").setInfo(MethodType(Nil, restpe)) + val defaultCase = LABEL("defaultCase").setInfo(MethodType(Nil, restpe)) + val matchEnd = LABEL("matchEnd").tap { lab => + // genbcode isn't thrilled about seeing labels with Unit arguments, so `success`'s type is one of + // `${sw.tpe} => ${sw.tpe}` or `() => Unit` depending. + lab.setInfo(MethodType(if (resUnit) Nil else List(lab.newSyntheticValueParam(restpe)), restpe)) + } + def goto(sym: Symbol, params: Tree*) = REF(sym) APPLY (params: _*) + def gotoEnd(body: Tree) = if (resUnit) BLOCK(body, goto(matchEnd)) else goto(matchEnd, body) + + val casesByHash = cases.flatMap { + case cd@CaseDef(StringsPattern(strs), _, body) => + val jump = newCase() // always create a label so when its used it matches the source case (e.g. `case4()`) + strs match { + case str :: Nil => List((str, gotoEnd(body), cd.pat.pos)) + case _ => + labels += LabelDef(jump, Nil, gotoEnd(body)) + strs.map((_, goto(jump), cd.pat.pos)) } - val casesByHash = - cases.flatMap { - case cd@CaseDef(StringsPattern(strs), _, body) => - val jump = currentOwner.newLabel(unit.freshTermName("case"), swPos).setInfo(MethodType(Nil, restpe)) - stats += LabelDef(jump, Nil, succeed(body)) - strs.map((_, jump, cd.pat.pos)) - case cd@CaseDef(Ident(nme.WILDCARD), _, body) => - failureBody = succeed(body) - None - case cd => globalError(s"unhandled in switch: $cd"); None - }.groupBy(_._1.##) - val newCases = casesByHash.toList.sortBy(_._1).map { - case (hash, cases) => - val newBody = cases.foldLeft(fail()) { - case (next, (pat, jump, pos)) => - val comparison = if (pat == null) Object_eq else Object_equals - atPos(pos) { - IF(LIT(pat) DOT comparison APPLY selArg) THEN (REF(jump) APPLY Nil) ELSE next - } - } - CaseDef(LIT(hash), EmptyTree, newBody) + case cd if isDefaultCase(cd) => defaultCaseBody = gotoEnd(cd.body); None + case cd => globalError(s"unhandled in switch: $cd"); None + }.groupBy(_._1.##) + + val newCases = casesByHash.toList.sortBy(_._1).map { + case (hash, cases) => + val newBody = cases.foldRight(atPos(swPos)(goto(defaultCase): Tree)) { + case ((null, rhs, pos), next) => atPos(pos)(IF (NULL OBJ_EQ selArg) THEN rhs ELSE next) + case ((str, rhs, pos), next) => atPos(pos)(IF (LIT(str) OBJ_== selArg) THEN rhs ELSE next) } + CASE(LIT(hash)) ==> newBody + } - stats += LabelDef(failure, Nil, failureBody) + labels += LabelDef(defaultCase, Nil, defaultCaseBody) + labels += LabelDef(matchEnd, matchEnd.info.params, matchEnd.info.params.headOption.fold(UNIT: Tree)(REF)) - stats += (if (restpe =:= UnitTpe) { - LabelDef(success, Nil, gen.mkLiteralUnit) - } else { - LabelDef(success, success.info.params.head :: Nil, REF(success.info.params.head)) - }) + val stats = Match(newSel, newCases :+ (DEFAULT ==> goto(defaultCase))) :: labels.toList - stats prepend Match(newSel, newCases :+ CaseDef(Ident(nme.WILDCARD), EmptyTree, fail())) + val res = Block(stats: _*) + localTyper.typedPos(sw.pos)(res) + } - val res = Block(stats.result() : _*) - localTyper.typedPos(sw.pos)(res) - case _ => globalError(s"unhandled switch scrutinee type ${sw.selector.tpe}: $sw"); sw + // transform scrutinee of all matches to switchable types (ints, strings) + def transformSwitch(sw: Match): Tree = { + sw.selector.tpe.widen match { + case IntTpe => sw // can switch directly on ints + case StringTpe => transformStringSwitch(sw) + case _ => globalError(s"unhandled switch scrutinee type ${sw.selector.tpe}: $sw"); sw } } diff --git a/test/files/run/string-switch-pos.check b/test/files/run/string-switch-pos.check index b0f7b64d08bd..805f5a3143bd 100644 --- a/test/files/run/string-switch-pos.check +++ b/test/files/run/string-switch-pos.check @@ -34,41 +34,32 @@ [56:57]case [56]67506 => [56:57]if ([56][56][56]"Cc2".equals([56]x1)) [56][56]case4() else - [56][56]matchEnd2() + [56][56]defaultCase1() [75:81]case [56]2031744 => [75:81]if ([75][75][75]"AaAa".equals([75]x1)) - [75][75]case1() + [93:94][75]matchEnd1([93:94]1) else - [56][56]matchEnd2() + [56][56]defaultCase1() [133:139]case [56]2062528 => [133:139]if ([133][133][133]"BbBb".equals([133]x1)) - [133][133]case3() + [143:181][133]matchEnd1([143:181]if ([143:147]cond) + [151:152]3 + else + [180:181]4) else - [56][56]matchEnd2() + [56][56]defaultCase1() [56:57]case [56]2093312 => [56:57]if ([56][56][56]"CcCc".equals([56]x1)) [56][56]case4() else - [56][56]matchEnd2() + [56][56]defaultCase1() [104:110]case [56]3003444 => [104:110]if ([104][104][104]"asdf".equals([104]x1)) - [104][104]case2() + [122:123][104]matchEnd1([122:123]2) else - [56][56]matchEnd2() - [56]case [56]_ => [56][56]matchEnd2() - }; - [56]case1(){ - [56][56]matchEnd1([93:94]1) - }; - [56]case2(){ - [56][56]matchEnd1([122:123]2) - }; - [56]case3(){ - [56][56]matchEnd1([143:181]if ([143:147]cond) - [151:152]3 - else - [180:181]4) + [56][56]defaultCase1() + [56]case [56]_ => [56][56]defaultCase1() }; [56]case4(){ [56][56]matchEnd1([209:210]5) }; - [56]matchEnd2(){ + [56]defaultCase1(){ [56][56]matchEnd1([56:57]throw [56:57][56:57][56:57]new [56:57]MatchError([56:57]x1)) }; [56]matchEnd1(x$1: [NoPosition]Int){ From 2c1ca22ad72abae1c10c99a47e8cced6f3341d28 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E6=A2=A6=E5=A2=83=E8=BF=B7=E7=A6=BB?= Date: Thu, 1 Apr 2021 15:17:30 +0800 Subject: [PATCH 078/769] Emit LocalVariableTable for mirror methods --- .../tools/nsc/backend/jvm/BCodeHelpers.scala | 23 +++++++++----- .../tools/nsc/backend/jvm/BytecodeTest.scala | 30 +++++++++++++++++++ 2 files changed, 46 insertions(+), 7 deletions(-) diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala index da8f110d5be6..c12a631da054 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala @@ -14,14 +14,15 @@ package scala package tools.nsc package backend.jvm +import scala.PartialFunction.cond +import scala.annotation.tailrec import scala.tools.asm -import BackendReporting._ -import scala.tools.asm.ClassWriter +import scala.tools.asm.{ClassWriter, Label} +import scala.tools.nsc.Reporting.WarningCategory import scala.tools.nsc.backend.jvm.BCodeHelpers.ScalaSigBytes +import scala.tools.nsc.backend.jvm.BackendReporting._ import scala.tools.nsc.reporters.NoReporter -import PartialFunction.cond -import scala.annotation.tailrec -import scala.tools.nsc.Reporting.WarningCategory +import scala.util.chaining.scalaUtilChainingOps /* * Traits encapsulating functionality to convert Scala AST Trees into ASM ClassNodes. @@ -31,9 +32,9 @@ import scala.tools.nsc.Reporting.WarningCategory */ abstract class BCodeHelpers extends BCodeIdiomatic { import global._ - import definitions._ import bTypes._ import coreBTypes._ + import definitions._ import genBCode.postProcessor.backendUtils /** @@ -365,7 +366,7 @@ abstract class BCodeHelpers extends BCodeIdiomatic { */ trait BCPickles { - import scala.reflect.internal.pickling.{ PickleFormat, PickleBuffer } + import scala.reflect.internal.pickling.{PickleBuffer, PickleFormat} val versionPickle = { val vp = new PickleBuffer(new Array[Byte](16), -1, 0) @@ -794,6 +795,7 @@ abstract class BCodeHelpers extends BCodeIdiomatic { mirrorMethod.visitCode() + val codeStart: Label = new Label().tap(mirrorMethod.visitLabel) mirrorMethod.visitFieldInsn(asm.Opcodes.GETSTATIC, moduleName, strMODULE_INSTANCE_FIELD, classBTypeFromSymbol(moduleClass).descriptor) var index = 0 @@ -805,6 +807,13 @@ abstract class BCodeHelpers extends BCodeIdiomatic { mirrorMethod.visitMethodInsn(asm.Opcodes.INVOKEVIRTUAL, moduleName, mirrorMethodName, methodBTypeFromSymbol(m).descriptor, false) mirrorMethod.visitInsn(jReturnType.typedOpcode(asm.Opcodes.IRETURN)) + val codeEnd = new Label().tap(mirrorMethod.visitLabel) + + methodInfo.params.lazyZip(paramJavaTypes).foldLeft(0) { + case (idx, (p, tp)) => + mirrorMethod.visitLocalVariable(p.name.encoded, tp.descriptor, null, codeStart, codeEnd, idx) + idx + tp.size + } mirrorMethod.visitMaxs(0, 0) // just to follow protocol, dummy arguments mirrorMethod.visitEnd() diff --git a/test/junit/scala/tools/nsc/backend/jvm/BytecodeTest.scala b/test/junit/scala/tools/nsc/backend/jvm/BytecodeTest.scala index b67ee23b13e3..4bc7e2035e2e 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/BytecodeTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/BytecodeTest.scala @@ -10,6 +10,7 @@ import scala.tools.testkit.ASMConverters._ import scala.tools.testkit.BytecodeTesting import scala.tools.testkit.BytecodeTesting._ import scala.tools.asm.Opcodes +import scala.tools.asm.tree.MethodNode class BytecodeTest extends BytecodeTesting { import compiler._ @@ -343,4 +344,33 @@ class BytecodeTest extends BytecodeTesting { val a = A.fields.asScala.find(_.name == "a").get assertEquals(0, a.access & Opcodes.ACC_FINAL) } + + @Test + def t12362(): Unit = { + val code = + """object Test { + | def foo(value: String) = { + | println(value) + | } + | + | def abcde(value1: String, value2: Long, value3: Double, value4: Int, value5: Double): Double = { + | println(value1) + | value5 + | } + |}""".stripMargin + + val List(mirror, _) = compileClasses(code) + assertEquals(mirror.name, "Test") + + val foo = getAsmMethod(mirror, "foo") + val abcde = getAsmMethod(mirror, "abcde") + + def t(m: MethodNode, r: List[(String, String, Int)]) = { + assertTrue((m.access & Opcodes.ACC_STATIC) != 0) + assertEquals(r, m.localVariables.asScala.toList.map(l => (l.desc, l.name, l.index))) + } + + t(foo, List(("Ljava/lang/String;", "value", 0))) + t(abcde, List(("Ljava/lang/String;", "value1", 0), ("J", "value2", 1), ("D", "value3", 3), ("I", "value4", 5), ("D", "value5", 6))) + } } From e5e5cc36ff3e5d9dc1444a8b070b5173bed11776 Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Thu, 15 Apr 2021 09:20:24 -0700 Subject: [PATCH 079/769] Check for bad param names on overload When issuing the error, notice if a param name does not apply to any alternative. --- .../tools/nsc/typechecker/ContextErrors.scala | 9 ++++++++- test/files/neg/annots-constant-neg.check | 2 +- test/files/neg/t12347.check | 10 ++++++++++ test/files/neg/t12347.scala | 16 ++++++++++++++++ test/files/neg/t2488.check | 6 +++--- 5 files changed, 38 insertions(+), 5 deletions(-) create mode 100644 test/files/neg/t12347.check create mode 100644 test/files/neg/t12347.scala diff --git a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala index 04c5258561d6..a2948f8deccc 100644 --- a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala +++ b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala @@ -16,6 +16,7 @@ package typechecker import scala.reflect.internal.util.StringOps.{countAsString, countElementsAsString} import java.lang.System.{lineSeparator => EOL} +import scala.PartialFunction.cond import scala.annotation.tailrec import scala.reflect.runtime.ReflectionUtils import scala.reflect.macros.runtime.AbortMacroException @@ -1166,9 +1167,15 @@ trait ContextErrors { val proscription = if (tree.symbol.isConstructor) " cannot be invoked with " else " cannot be applied to " + val junkNames = { + val bads = argtpes.collect { + case NamedType(name, _) if !alts.exists(cond(_) { case MethodType(params, _) => params.exists(_.name == name) }) => name.decoded + } + if (bads.isEmpty) "" else bads.mkString(" [which have no such parameter ", ",", "]") + } issueNormalTypeError(tree, - applyErrorMsg(tree, proscription, widenedArgtpes, pt)) + applyErrorMsg(tree, junkNames + proscription, widenedArgtpes, pt)) // since inferMethodAlternative modifies the state of the tree // we have to set the type of tree to ErrorType only in the very last // fallback action that is done in the inference. diff --git a/test/files/neg/annots-constant-neg.check b/test/files/neg/annots-constant-neg.check index 800e06c70489..f531b2a98540 100644 --- a/test/files/neg/annots-constant-neg.check +++ b/test/files/neg/annots-constant-neg.check @@ -79,7 +79,7 @@ Test.scala:71: error: annotation argument needs to be a constant; found: new sca Test.scala:76: error: multiple constructors for Ann1 with alternatives: (s: String)Ann1 (value: Int)Ann1 - cannot be invoked with (x: String) + [which have no such parameter x] cannot be invoked with (x: String) @Ann1(x = "") def v4 = 0 // err ^ Test.scala:78: error: Ann1 does not take parameters diff --git a/test/files/neg/t12347.check b/test/files/neg/t12347.check new file mode 100644 index 000000000000..0476089c1c4e --- /dev/null +++ b/test/files/neg/t12347.check @@ -0,0 +1,10 @@ +t12347.scala:14: error: unknown parameter name: x + X.f(n = count, x = text) + ^ +t12347.scala:15: error: overloaded method f with alternatives: + (s: String)String + (n: Int,s: String)String + [which have no such parameter x] cannot be applied to (n: Int, x: String) + Y.f(n = count, x = text) + ^ +2 errors diff --git a/test/files/neg/t12347.scala b/test/files/neg/t12347.scala new file mode 100644 index 000000000000..1795ecfc8320 --- /dev/null +++ b/test/files/neg/t12347.scala @@ -0,0 +1,16 @@ + +object X { + def f(n: Int, s: String) = s * n +} + +object Y { + def f(n: Int, s: String) = s * n + def f(s: String) = s * 3 +} + +object Test extends App { + def count = 2 + def text = "hi" + X.f(n = count, x = text) + Y.f(n = count, x = text) +} diff --git a/test/files/neg/t2488.check b/test/files/neg/t2488.check index f69ca0a939dc..03b6838519d1 100644 --- a/test/files/neg/t2488.check +++ b/test/files/neg/t2488.check @@ -7,19 +7,19 @@ t2488.scala:7: error: overloaded method f with alternatives: t2488.scala:8: error: overloaded method f with alternatives: ()Int (a: Int,b: Int)Int - cannot be applied to (a: Int, c: Int) + [which have no such parameter c] cannot be applied to (a: Int, c: Int) println(c.f(a = 2, c = 2)) ^ t2488.scala:9: error: overloaded method f with alternatives: ()Int (a: Int,b: Int)Int - cannot be applied to (Int, c: Int) + [which have no such parameter c] cannot be applied to (Int, c: Int) println(c.f(2, c = 2)) ^ t2488.scala:10: error: overloaded method f with alternatives: ()Int (a: Int,b: Int)Int - cannot be applied to (c: Int, Int) + [which have no such parameter c] cannot be applied to (c: Int, Int) println(c.f(c = 2, 2)) ^ t2488.scala:11: error: overloaded method f with alternatives: From a91eea6ce9c2ce270003346f726f13777fe77128 Mon Sep 17 00:00:00 2001 From: Dale Wijnand Date: Thu, 15 Apr 2021 10:05:08 +0100 Subject: [PATCH 080/769] Drop old ScalaVersions With if (s.value < version213) errorFn.apply(s"-Xsource must be at least the current major version (${version213.versionString})") I think -Xsource can never be < 2.13.0. So all those isScala212 and isScala213 are trues in disguise. Let's get rid of them. Also we can cache the isScala3 comparison in a boolean setting that is managed by Xsource's postSetHook. --- src/compiler/scala/tools/nsc/Global.scala | 4 +--- src/compiler/scala/tools/nsc/Parsing.scala | 1 - .../scala/tools/nsc/ast/parser/Parsers.scala | 9 +------ .../scala/tools/nsc/ast/parser/Scanners.scala | 4 +--- .../tools/nsc/settings/ScalaSettings.scala | 24 +++++++------------ .../scala/tools/nsc/transform/UnCurry.scala | 2 +- .../tools/nsc/typechecker/ContextErrors.scala | 2 +- .../tools/nsc/typechecker/Contexts.scala | 2 +- .../tools/nsc/typechecker/Implicits.scala | 24 ++++--------------- .../scala/tools/nsc/typechecker/Namers.scala | 2 +- .../tools/nsc/typechecker/RefChecks.scala | 2 +- .../tools/nsc/typechecker/Unapplies.scala | 2 +- .../scala/reflect/internal/Definitions.scala | 3 +-- .../scala/reflect/internal/Types.scala | 2 +- .../internal/settings/MutableSettings.scala | 3 --- .../reflect/internal/tpe/TypeComparers.scala | 2 +- .../scala/reflect/runtime/Settings.scala | 6 ++--- 17 files changed, 28 insertions(+), 66 deletions(-) diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala index a80c5dbf4d51..07389b5c2d9f 100644 --- a/src/compiler/scala/tools/nsc/Global.scala +++ b/src/compiler/scala/tools/nsc/Global.scala @@ -1154,9 +1154,7 @@ class Global(var currentSettings: Settings, reporter0: Reporter) keepPhaseStack = settings.log.isSetByUser // We hit these checks regularly. They shouldn't change inside the same run, so cache the comparisons here. - val isScala212: Boolean = settings.isScala212 - val isScala213: Boolean = settings.isScala213 - val isScala3: Boolean = settings.isScala3 + val isScala3 = settings.isScala3 // used in sbt def uncheckedWarnings: List[(Position, String)] = reporting.uncheckedWarnings diff --git a/src/compiler/scala/tools/nsc/Parsing.scala b/src/compiler/scala/tools/nsc/Parsing.scala index 673d30bf237c..7d48e27678d8 100644 --- a/src/compiler/scala/tools/nsc/Parsing.scala +++ b/src/compiler/scala/tools/nsc/Parsing.scala @@ -22,7 +22,6 @@ trait Parsing { self : Positions with Reporting => trait RunParsing { val parsing: PerRunParsing = new PerRunParsing - def isScala213: Boolean } class PerRunParsing { diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala index 22e4dc86e691..e84248e4663b 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala @@ -2366,14 +2366,7 @@ self => if (vds.isEmpty) syntaxError(start, s"case classes must have a parameter list; try 'case class $name()' or 'case object $name'") else if (vds.head.nonEmpty && vds.head.head.mods.isImplicit) { - if (currentRun.isScala213) - syntaxError(start, s"case classes must have a non-implicit parameter list; try 'case class $name()$elliptical'") - else { - deprecationWarning(start, s"case classes should have a non-implicit parameter list; adapting to 'case class $name()$elliptical'", "2.12.2") - vds.insert(0, List.empty[ValDef]) - vds(1) = vds(1).map(vd => copyValDef(vd)(mods = vd.mods & ~Flags.CASEACCESSOR)) - if (implicitSection != -1) implicitSection += 1 - } + syntaxError(start, s"case classes must have a non-implicit parameter list; try 'case class $name()$elliptical'") } } if (implicitSection != -1 && implicitSection != vds.length - 1) diff --git a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala index 5c165a6dfed0..30916cc29d6b 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala @@ -670,11 +670,9 @@ trait Scanners extends ScannersCommon { val isEmptyCharLit = (ch == '\'') getLitChar() if (ch == '\'') { - if (isEmptyCharLit && currentRun.isScala213) + if (isEmptyCharLit) syntaxError("empty character literal (use '\\'' for single quote)") else { - if (isEmptyCharLit) - deprecationWarning("deprecated syntax for character literal (use '\\'' for single quote)", "2.12.2") nextChar() token = CHARLIT setStrVal() diff --git a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala index d070a7870652..5d5518ee59d8 100644 --- a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala +++ b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala @@ -92,18 +92,6 @@ trait ScalaSettings extends StandardScalaSettings with Warnings { _: MutableSett } withAbbreviation "--release" def releaseValue: Option[String] = Option(release.value).filter(_ != "") - /* - * The previous "-Xsource" option is intended to be used mainly - * though this helper. - */ - private[this] val version212 = ScalaVersion("2.12.0") - def isScala212: Boolean = source.value >= version212 - private[this] val version213 = ScalaVersion("2.13.0") - def isScala213: Boolean = source.value >= version213 - private[this] val version214 = ScalaVersion("2.14.0") - private[this] val version3 = ScalaVersion("3.0.0") - def isScala3: Boolean = source.value >= version3 - /** * -X "Advanced" settings */ @@ -145,10 +133,16 @@ trait ScalaSettings extends StandardScalaSettings with Warnings { _: MutableSett val mainClass = StringSetting ("-Xmain-class", "path", "Class for manifest's Main-Class entry (only useful with -d )", "") val sourceReader = StringSetting ("-Xsource-reader", "classname", "Specify a custom method for reading source files.", "") val reporter = StringSetting ("-Xreporter", "classname", "Specify a custom subclass of FilteringReporter for compiler messages.", "scala.tools.nsc.reporters.ConsoleReporter") - val source = ScalaVersionSetting ("-Xsource", "version", "Enable features that will be available in a future version of Scala, for purposes of early migration and alpha testing.", initial = version213).withPostSetHook { s => - if (s.value < version213) errorFn.apply(s"-Xsource must be at least the current major version (${version213.versionString})") - if (s.value >= version214 && s.value < version3) s.withDeprecationMessage("instead of -Xsource:2.14, use -Xsource:3").value = version3 + val source = ScalaVersionSetting ("-Xsource", "version", "Enable features that will be available in a future version of Scala, for purposes of early migration and alpha testing.", initial = ScalaVersion("2.13")).withPostSetHook { s => + if (s.value >= ScalaVersion("3")) + isScala3.value = true + else if (s.value >= ScalaVersion("2.14")) + s.withDeprecationMessage("instead of -Xsource:2.14, use -Xsource:3").value = ScalaVersion("3") + else if (s.value < ScalaVersion("2.13")) + errorFn.apply(s"-Xsource must be at least the current major version (${ScalaVersion("2.13").versionString})") } + val isScala3 = BooleanSetting ("isScala3", "Is -Xsource Scala 3?").internalOnly() + // The previous "-Xsource" option is intended to be used mainly though ^ helper val XnoPatmatAnalysis = BooleanSetting ("-Xno-patmat-analysis", "Don't perform exhaustivity/unreachability analysis. Also, ignore @switch annotation.") diff --git a/src/compiler/scala/tools/nsc/transform/UnCurry.scala b/src/compiler/scala/tools/nsc/transform/UnCurry.scala index f3428ca3c677..0ee1246b3357 100644 --- a/src/compiler/scala/tools/nsc/transform/UnCurry.scala +++ b/src/compiler/scala/tools/nsc/transform/UnCurry.scala @@ -445,7 +445,7 @@ abstract class UnCurry extends InfoTransform if (sym.isMethod) level < settings.elidebelow.value else { // TODO: report error? It's already done in RefChecks. https://github.com/scala/scala/pull/5539#issuecomment-331376887 - if (currentRun.isScala213) reporter.error(sym.pos, s"${sym.name}: Only methods can be marked @elidable.") + reporter.error(sym.pos, s"${sym.name}: Only methods can be marked @elidable.") false } } diff --git a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala index 04c5258561d6..6f549d6fc243 100644 --- a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala +++ b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala @@ -107,7 +107,7 @@ trait ContextErrors { def issueTypeError(err: AbsTypeError)(implicit context: Context): Unit = { context.issue(err) } def typeErrorMsg(context: Context, found: Type, req: Type) = - if (context.openImplicits.nonEmpty && !settings.XlogImplicits.value && currentRun.isScala213) + if (context.openImplicits.nonEmpty && !settings.XlogImplicits.value) // OPT: avoid error string creation for errors that won't see the light of day, but predicate // this on -Xsource:2.13 for bug compatibility with https://github.com/scala/scala/pull/7147#issuecomment-418233611 "type mismatch" diff --git a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala index bb4e1fb1ccd8..5c7e3128b8ed 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala @@ -1058,7 +1058,7 @@ trait Contexts { self: Analyzer => ) && !(imported && { val e = scope.lookupEntry(name) - (e ne null) && (e.owner == scope) && (!currentRun.isScala212 || e.sym.exists) + (e ne null) && (e.owner == scope) && e.sym.exists }) /** Do something with the symbols with name `name` imported via the import in `imp`, diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala index 1ed5bfd55f52..2e69740d85ed 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala @@ -1087,19 +1087,12 @@ trait Implicits { /** Sorted list of eligible implicits. */ - private def eligibleOld = Shadower.using(isLocalToCallsite){ shadower => - val matches = iss flatMap { is => + private def eligibleOld = Shadower.using(isLocalToCallsite) { shadower => + iss flatMap { is => val result = is filter (info => checkValid(info.sym) && survives(info, shadower)) shadower addInfos is result } - - if (currentRun.isScala213) matches - else { - // most frequent one first under Scala 2.12 mode. We've turned this optimization off to avoid - // compilation order variation in whether a search succeeds or diverges. - matches sortBy (x => if (isView) -x.useCountView else -x.useCountArg) - } } /** Sorted list of eligible implicits. @@ -1173,16 +1166,7 @@ trait Implicits { } } - val eligible: List[ImplicitInfo] = { - val matches = if (shadowerUseOldImplementation) eligibleOld else eligibleNew - if (currentRun.isScala213) matches - else { - // most frequent one first under Scala 2.12 mode. We've turned this optimization off to avoid - // compilation order variation in whether a search succeeds or diverges. - matches sortBy (x => if (isView) -x.useCountView else -x.useCountArg) - } - } - + val eligible: List[ImplicitInfo] = if (shadowerUseOldImplementation) eligibleOld else eligibleNew if (eligible.nonEmpty) printTyping(tree, "" + eligible.size + s" eligible for pt=$pt at ${fullSiteString(context)}") @@ -1222,7 +1206,7 @@ trait Implicits { foreach2(undetParams, savedInfos){ (up, si) => up.setInfo(si) } } } - if (typedFirstPending.isFailure && currentRun.isScala213) + if (typedFirstPending.isFailure) undoLog.undoTo(mark) // Don't accumulate constraints from typechecking or type error message creation for failed candidates // Pass the errors to `DivergentImplicitRecovery` so that it can note diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala index 35dbe24f0650..825bcd50b04f 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala @@ -1724,7 +1724,7 @@ trait Namers extends MethodSynthesis { val valOwner = owner.owner // there's no overriding outside of classes, and we didn't use to do this in 2.11, so provide opt-out - if (!currentRun.isScala212 || !valOwner.isClass) WildcardType + if (!valOwner.isClass) WildcardType else { // normalize to getter so that we correctly consider a val overriding a def // (a val's name ends in a " ", so can't compare to def) diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala index 3b0ad5ad7084..47d6610f6e57 100644 --- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala +++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala @@ -1486,7 +1486,7 @@ abstract class RefChecks extends Transform { reporter.error(sym.pos, s"${sym.name}: Only concrete methods can be marked @elidable.$rest") } } - if (currentRun.isScala213) checkIsElidable(tree.symbol) + checkIsElidable(tree.symbol) def checkMember(sym: Symbol): Unit = { sym.setAnnotations(applyChecks(sym.annotations)) diff --git a/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala b/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala index b63f8c0e7b55..cb6356103af9 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala @@ -168,7 +168,7 @@ trait Unapplies extends ast.TreeDSL { case _ => nme.unapply } val cparams = List(ValDef(Modifiers(PARAM | SYNTHETIC), unapplyParamName, classType(cdef, tparams), EmptyTree)) - val resultType = if (!currentRun.isScala212) TypeTree() else { // fix for scala/bug#6541 under -Xsource:2.12 + val resultType = { // fix for scala/bug#6541 under -Xsource:2.12 def repeatedToSeq(tp: Tree) = tp match { case AppliedTypeTree(Select(_, tpnme.REPEATED_PARAM_CLASS_NAME), tps) => AppliedTypeTree(gen.rootScalaDot(tpnme.Seq), tps) case _ => tp diff --git a/src/reflect/scala/reflect/internal/Definitions.scala b/src/reflect/scala/reflect/internal/Definitions.scala index 1727c94fe8d3..3c4296fe9a1c 100644 --- a/src/reflect/scala/reflect/internal/Definitions.scala +++ b/src/reflect/scala/reflect/internal/Definitions.scala @@ -995,7 +995,6 @@ trait Definitions extends api.StandardDefinitions { (sym eq PartialFunctionClass) || (sym eq AbstractPartialFunctionClass) } - private[this] val doSam = settings.isScala212 private[this] val samCache = perRunCaches.newAnyRefMap[Symbol, Symbol]() /** The single abstract method declared by type `tp` (or `NoSymbol` if it cannot be found). * @@ -1008,7 +1007,7 @@ trait Definitions extends api.StandardDefinitions { * It's kind of strange that erasure sees deferredMembers that typer does not (see commented out assert below) */ def samOf(tp: Type): Symbol = - if (doSam && isNonRefinementClassType(unwrapToClass(tp))) { // TODO: is this really faster than computing tpSym below? how about just `tp.typeSymbol.isClass` (and !tpSym.isRefinementClass)? + if (isNonRefinementClassType(unwrapToClass(tp))) { // TODO: is this really faster than computing tpSym below? how about just `tp.typeSymbol.isClass` (and !tpSym.isRefinementClass)? // look at erased type because we (only) care about what ends up in bytecode // (e.g., an alias type is fine as long as is compiles to a single-abstract-method) val tpSym: Symbol = erasure.javaErasure(tp).typeSymbol diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala index 29a5177e9675..deec5ade2c71 100644 --- a/src/reflect/scala/reflect/internal/Types.scala +++ b/src/reflect/scala/reflect/internal/Types.scala @@ -3620,7 +3620,7 @@ trait Types // This is a higher-kinded type var with same arity as tp. // If so (see scala/bug#7517), side effect: adds the type constructor itself as a bound. isSubArgs(lhs, rhs, params, AnyDepth) && {addBound(tp.typeConstructor); true} - } else if (settings.isScala213 && numCaptured > 0) { + } else if (numCaptured > 0) { // Simple algorithm as suggested by Paul Chiusano in the comments on scala/bug#2712 // // https://github.com/scala/bug/issues/2712#issuecomment-292374655 diff --git a/src/reflect/scala/reflect/internal/settings/MutableSettings.scala b/src/reflect/scala/reflect/internal/settings/MutableSettings.scala index ca8c24d6e8d3..df656fd53c69 100644 --- a/src/reflect/scala/reflect/internal/settings/MutableSettings.scala +++ b/src/reflect/scala/reflect/internal/settings/MutableSettings.scala @@ -63,9 +63,6 @@ abstract class MutableSettings extends AbsSettings { def YstatisticsEnabled: BooleanSetting def Yrecursion: IntSetting - - def isScala212: Boolean - private[scala] def isScala213: Boolean } object MutableSettings { diff --git a/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala b/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala index baaa6d4561c7..441b25bb9d74 100644 --- a/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala +++ b/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala @@ -404,7 +404,7 @@ trait TypeComparers { } def isSub(tp1: Type, tp2: Type) = - settings.isScala213 && isSubHKTypeVar(tp1, tp2) || + isSubHKTypeVar(tp1, tp2) || isSub2(tp1.normalize, tp2.normalize) // @M! normalize reduces higher-kinded typeref to PolyType def isSub2(ntp1: Type, ntp2: Type) = (ntp1, ntp2) match { diff --git a/src/reflect/scala/reflect/runtime/Settings.scala b/src/reflect/scala/reflect/runtime/Settings.scala index 4b8b771f52c5..5ab2be417955 100644 --- a/src/reflect/scala/reflect/runtime/Settings.scala +++ b/src/reflect/scala/reflect/runtime/Settings.scala @@ -60,7 +60,7 @@ private[reflect] class Settings extends MutableSettings { val YhotStatisticsEnabled = new BooleanSetting(false) val YstatisticsEnabled = new BooleanSetting(false) - val Yrecursion = new IntSetting(0) - def isScala212 = true - private[scala] def isScala213 = true + val Yrecursion = new IntSetting(0) + def isScala212 = true + def isScala213 = true } From e0c831baadffd5130d4fd6a79bbff016b06f6639 Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Fri, 16 Apr 2021 03:50:02 -0700 Subject: [PATCH 081/769] Test status quo for Scala main --- test/files/run/t7448.scala | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) create mode 100644 test/files/run/t7448.scala diff --git a/test/files/run/t7448.scala b/test/files/run/t7448.scala new file mode 100644 index 000000000000..5bf74ee85a77 --- /dev/null +++ b/test/files/run/t7448.scala @@ -0,0 +1,18 @@ +// scalac: -nowarn +import util.chaining._ + +object Test { + def main(args: Array[String]) = 42.tap(res => assert(res == 42)) +} + +// test that partest is using scala runner to execute this test. +// With warnings enabled: +/* +t7448.scala:7: warning: not a valid main method for Test, + because main methods must have the exact signature `(Array[String]): Unit`, though Scala runners will forgive a non-Unit result. + To define an entry point, please define the main method as: + def main(args: Array[String]): Unit + + def main(args: Array[String]) = 42.tap(res => assert(res == 42)) + ^ + */ From 0be1fef0e4062705ede67ee48f9fb5f91f27a8c2 Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Fri, 16 Apr 2021 03:57:21 -0700 Subject: [PATCH 082/769] Improve text alignment --- .../tools/nsc/backend/jvm/BCodeHelpers.scala | 13 +++++++------ .../scala/reflect/internal/Definitions.scala | 17 ++++++++--------- 2 files changed, 15 insertions(+), 15 deletions(-) diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala index c12a631da054..6c2b4056f1ef 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala @@ -283,7 +283,7 @@ abstract class BCodeHelpers extends BCodeIdiomatic { |""".stripMargin, WarningCategory.Other, sym) - val possibles = (sym.tpe nonPrivateMember nme.main).alternatives + val possibles = sym.tpe.nonPrivateMember(nme.main).alternatives val hasApproximate = possibles.exists(m => cond(m.info) { case MethodType(p :: Nil, _) => p.tpe.typeSymbol == definitions.ArrayClass }) // Before erasure so we can identify generic mains. @@ -308,16 +308,17 @@ abstract class BCodeHelpers extends BCodeIdiomatic { val mainAdvice = if (hasExact) Nil else possibles.map { m => - m.info match { + val msg = m.info match { case PolyType(_, _) => - ("main methods cannot be generic", m) + "main methods cannot be generic" case MethodType(params, res) if res.typeSymbol :: params exists (_.isAbstractType) => - ("main methods cannot refer to type parameters or abstract types", m) + "main methods cannot refer to type parameters or abstract types" case MethodType(_, _) => - ("main methods must have the exact signature (Array[String])Unit", m) + "main methods must have the exact signature (Array[String])Unit" case tp => - (s"don't know what this is: $tp", m) + s"don't know what this is: $tp" } + (msg, m) } companionAdvice.foreach(msg => warnNoForwarder(msg, hasExact, exactly.fold(alternate)(_.info))) diff --git a/src/reflect/scala/reflect/internal/Definitions.scala b/src/reflect/scala/reflect/internal/Definitions.scala index 3c4296fe9a1c..f6a8615e44d2 100644 --- a/src/reflect/scala/reflect/internal/Definitions.scala +++ b/src/reflect/scala/reflect/internal/Definitions.scala @@ -456,13 +456,13 @@ trait Definitions extends api.StandardDefinitions { else if (isScalaRepeatedParamType(tp)) elementExtract(RepeatedParamClass, tp) orElse tp else tp ) - def repeatedToSingle(tp: Type): Type = elementExtract(RepeatedParamClass, tp) orElse elementExtract(JavaRepeatedParamClass, tp) orElse tp + def repeatedToSingle(tp: Type): Type = elementExtract(RepeatedParamClass, tp) orElse elementExtract(JavaRepeatedParamClass, tp) orElse tp // We don't need to deal with JavaRepeatedParamClass here, as `repeatedToSeq` is only called in the patmat translation for Scala sources. - def repeatedToSeq(tp: Type): Type = elementTransform(RepeatedParamClass, tp)(seqType) orElse tp - def seqToRepeated(tp: Type): Type = elementTransform(SeqClass, tp)(scalaRepeatedType) orElse tp - def isReferenceArray(tp: Type) = elementTest(ArrayClass, tp)(elemtp => elemtp <:< AnyRefTpe || (elemtp eq ObjectTpeJava)) - def isArrayOfSymbol(tp: Type, elem: Symbol) = elementTest(ArrayClass, tp)(_.typeSymbol == elem) - def elementType(container: Symbol, tp: Type): Type = elementExtract(container, tp) + def repeatedToSeq(tp: Type): Type = elementTransform(RepeatedParamClass, tp)(seqType) orElse tp + def seqToRepeated(tp: Type): Type = elementTransform(SeqClass, tp)(scalaRepeatedType) orElse tp + def isReferenceArray(tp: Type) = elementTest(ArrayClass, tp)(elemtp => elemtp <:< AnyRefTpe || (elemtp eq ObjectTpeJava)) + def isArrayOfSymbol(tp: Type, elem: Symbol) = elementTest(ArrayClass, tp)(_.typeSymbol == elem) + def elementType(container: Symbol, tp: Type): Type = elementExtract(container, tp) // Classes treated specially with respect to -Ywarn-unused lazy val SubTypeClass = requiredClass[scala.<:<[_,_]] @@ -474,7 +474,7 @@ trait Definitions extends api.StandardDefinitions { lazy val IteratorClass = requiredClass[scala.collection.Iterator[_]] lazy val IterableClass = requiredClass[scala.collection.Iterable[_]] lazy val ListClass = requiredClass[scala.collection.immutable.List[_]] - def List_cons = getMemberMethod(ListClass, nme.CONS) + def List_cons = getMemberMethod(ListClass, nme.CONS) @migration("SeqClass now refers to scala.collection.immutable.Seq", "2.13.0") lazy val SeqClass = requiredClass[scala.collection.immutable.Seq[_]] lazy val SeqFactoryClass = requiredModule[scala.collection.SeqFactory.type] @@ -640,8 +640,7 @@ trait Definitions extends api.StandardDefinitions { case _ => false }) // The given class has a main method. - def hasJavaMainMethod(sym: Symbol): Boolean = - (sym.tpe member nme.main).alternatives exists isJavaMainMethod + def hasJavaMainMethod(sym: Symbol): Boolean = sym.tpe.member(nme.main).alternatives.exists(isJavaMainMethod) class VarArityClass(name: String, maxArity: Int, countFrom: Int = 0, init: Option[ClassSymbol] = None) extends VarArityClassApi { private[this] val offset = countFrom - init.size From 31acbf03ddee1259832a32379fcaf6eefe97752d Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Fri, 16 Apr 2021 04:49:21 -0700 Subject: [PATCH 083/769] Tweak message for Scala main --- .../tools/nsc/backend/jvm/BCodeHelpers.scala | 4 +++- test/files/neg/main1.check | 17 ++++++++++++----- test/files/neg/main1.scala | 6 ++++++ test/files/neg/t4749.check | 4 ++-- 4 files changed, 23 insertions(+), 8 deletions(-) diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala index 6c2b4056f1ef..c1eb637d8ae4 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala @@ -313,8 +313,10 @@ abstract class BCodeHelpers extends BCodeIdiomatic { "main methods cannot be generic" case MethodType(params, res) if res.typeSymbol :: params exists (_.isAbstractType) => "main methods cannot refer to type parameters or abstract types" + case MethodType(param :: Nil, _) if definitions.isArrayOfSymbol(param.tpe, StringClass) => + "main methods must have the exact signature `(Array[String]): Unit`, though Scala runners will forgive a non-Unit result" case MethodType(_, _) => - "main methods must have the exact signature (Array[String])Unit" + "main methods must have the exact signature `(Array[String]): Unit`" case tp => s"don't know what this is: $tp" } diff --git a/test/files/neg/main1.check b/test/files/neg/main1.check index e1ba37b9ac90..4d9ef2fba906 100644 --- a/test/files/neg/main1.check +++ b/test/files/neg/main1.check @@ -29,7 +29,7 @@ main1.scala:41: warning: Foo has a valid main method (args: Array[String]): Unit object Foo extends Foo { // Overrides main from the class ^ main1.scala:53: warning: not a valid main method for p6.Main, - because main methods must have the exact signature (Array[String])Unit. + because main methods must have the exact signature `(Array[String]): Unit`. To define an entry point, please define the main method as: def main(args: Array[String]): Unit @@ -42,7 +42,7 @@ main1.scala:59: warning: Main has a main method (args: Array[Int]): Unit, object Main { ^ main1.scala:60: warning: not a valid main method for p7.Main, - because main methods must have the exact signature (Array[String])Unit. + because main methods must have the exact signature `(Array[String]): Unit`. To define an entry point, please define the main method as: def main(args: Array[String]): Unit @@ -55,19 +55,26 @@ main1.scala:66: warning: Main has a main method, object Main { ^ main1.scala:68: warning: not a valid main method for p8.Main, - because main methods must have the exact signature (Array[String])Unit. + because main methods must have the exact signature `(Array[String]): Unit`. To define an entry point, please define the main method as: def main(args: Array[String]): Unit def main(args: Array[Double]) = () ^ main1.scala:67: warning: not a valid main method for p8.Main, - because main methods must have the exact signature (Array[String])Unit. + because main methods must have the exact signature `(Array[String]): Unit`. To define an entry point, please define the main method as: def main(args: Array[String]): Unit def main(args: Array[Int]) = () ^ +main1.scala:74: warning: not a valid main method for t7448.Main, + because main methods must have the exact signature `(Array[String]): Unit`, though Scala runners will forgive a non-Unit result. + To define an entry point, please define the main method as: + def main(args: Array[String]): Unit + + def main(args: Array[String]) = ??? + ^ error: No warnings can be incurred under -Werror. -11 warnings +12 warnings 1 error diff --git a/test/files/neg/main1.scala b/test/files/neg/main1.scala index 88a94d85bbb2..295920808350 100644 --- a/test/files/neg/main1.scala +++ b/test/files/neg/main1.scala @@ -68,3 +68,9 @@ package p8 { def main(args: Array[Double]) = () } } + +package t7448 { + object Main { + def main(args: Array[String]) = ??? + } +} diff --git a/test/files/neg/t4749.check b/test/files/neg/t4749.check index 2799d8ddc0b7..ee5967c2cd0c 100644 --- a/test/files/neg/t4749.check +++ b/test/files/neg/t4749.check @@ -1,5 +1,5 @@ t4749.scala:5: warning: not a valid main method for bippy.Fail1, - because main methods must have the exact signature (Array[String])Unit. + because main methods must have the exact signature `(Array[String]): Unit`, though Scala runners will forgive a non-Unit result. To define an entry point, please define the main method as: def main(args: Array[String]): Unit @@ -38,7 +38,7 @@ t4749.scala:28: warning: Fail6 has a valid main method (args: Array[String]): Un object Fail6 { ^ t4749.scala:44: warning: not a valid main method for bippy.Win3, - because main methods must have the exact signature (Array[String])Unit. + because main methods must have the exact signature `(Array[String]): Unit`, though Scala runners will forgive a non-Unit result. To define an entry point, please define the main method as: def main(args: Array[String]): Unit From dcc97b54c37ee28d0c3444e553e4efb6038c7ecd Mon Sep 17 00:00:00 2001 From: Torsten Schmits Date: Fri, 22 Feb 2019 19:42:04 +0100 Subject: [PATCH 084/769] =?UTF-8?q?Integrate=20splain=20=E2=80=93=20implic?= =?UTF-8?q?it=20resolution=20chains=20and=20type=20diffs=20in=20error=20me?= =?UTF-8?q?ssages?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This error formatting framework displays a tree of implicit parameters that correspond to the resolution chain between an error's call site and the offending implicit. Several additional improvements for error formatting are provided as well, like colored diffs of found/required error types, which are based on a set of pure data types extracted from the compiler internals, available to plugin developers through the AnalyzerPlugin API. --- project/ScalaOptionParser.scala | 4 +- .../scala/reflect/reify/Taggers.scala | 2 - .../tools/nsc/settings/ScalaSettings.scala | 43 +- .../nsc/typechecker/AnalyzerPlugins.scala | 19 +- .../tools/nsc/typechecker/ContextErrors.scala | 45 +- .../tools/nsc/typechecker/Implicits.scala | 16 +- .../nsc/typechecker/TypeDiagnostics.scala | 9 +- .../tools/nsc/typechecker/splain/Colors.scala | 47 + .../nsc/typechecker/splain/SplainData.scala | 111 +++ .../splain/SplainDiagnostics.scala | 27 + .../nsc/typechecker/splain/SplainErrors.scala | 64 ++ .../typechecker/splain/SplainFormatData.scala | 170 ++++ .../typechecker/splain/SplainFormatting.scala | 826 ++++++++++++++++++ .../scala/tools/reflect/ToolBox.scala | 2 - src/manual/scala/man1/scalac.scala | 3 - src/reflect/scala/reflect/macros/Typers.scala | 2 - test/files/neg/implicit-any2stringadd.scala | 2 +- test/files/neg/implicit-log.check | 10 - test/files/neg/implicit-log.scala | 2 +- test/files/neg/implicit-shadow.check | 9 +- test/files/neg/implicit-shadow.scala | 2 - test/files/neg/t6323a.check | 16 +- test/files/neg/t6323a.scala | 2 +- test/files/run/splain-tree.check | 47 + test/files/run/splain-tree.scala | 50 ++ test/files/run/splain-truncrefined.check | 4 + test/files/run/splain-truncrefined.scala | 30 + test/files/run/splain.check | 115 +++ test/files/run/splain.scala | 225 +++++ 29 files changed, 1829 insertions(+), 75 deletions(-) create mode 100644 src/compiler/scala/tools/nsc/typechecker/splain/Colors.scala create mode 100644 src/compiler/scala/tools/nsc/typechecker/splain/SplainData.scala create mode 100644 src/compiler/scala/tools/nsc/typechecker/splain/SplainDiagnostics.scala create mode 100644 src/compiler/scala/tools/nsc/typechecker/splain/SplainErrors.scala create mode 100644 src/compiler/scala/tools/nsc/typechecker/splain/SplainFormatData.scala create mode 100644 src/compiler/scala/tools/nsc/typechecker/splain/SplainFormatting.scala create mode 100644 test/files/run/splain-tree.check create mode 100644 test/files/run/splain-tree.scala create mode 100644 test/files/run/splain-truncrefined.check create mode 100644 test/files/run/splain-truncrefined.scala create mode 100644 test/files/run/splain.check create mode 100644 test/files/run/splain.scala diff --git a/project/ScalaOptionParser.scala b/project/ScalaOptionParser.scala index 367805199cc7..e3149a39c048 100644 --- a/project/ScalaOptionParser.scala +++ b/project/ScalaOptionParser.scala @@ -82,7 +82,7 @@ object ScalaOptionParser { } // TODO retrieve these data programmatically, ala https://github.com/scala/scala-tool-support/blob/master/bash-completion/src/main/scala/BashCompletion.scala - private def booleanSettingNames = List("-X", "-Xasync", "-Xcheckinit", "-Xdev", "-Xdisable-assertions", "-Xexperimental", "-Xfatal-warnings", "-Xlog-free-terms", "-Xlog-free-types", "-Xlog-implicit-conversions", "-Xlog-implicits", "-Xlog-reflective-calls", + private def booleanSettingNames = List("-X", "-Xasync", "-Xcheckinit", "-Xdev", "-Xdisable-assertions", "-Xexperimental", "-Xfatal-warnings", "-Xlog-free-terms", "-Xlog-free-types", "-Xlog-implicit-conversions", "-Xlog-reflective-calls", "-Xno-forwarders", "-Xno-patmat-analysis", "-Xnon-strict-patmat-analysis", "-Xprint-pos", "-Xprint-types", "-Xprompt", "-Xresident", "-Xshow-phases", "-Xverify", "-Y", "-Ybreak-cycles", "-Ydebug", "-Ycompact-trees", "-YdisableFlatCpCaching", "-Ydoc-debug", "-Yide-debug", @@ -97,7 +97,7 @@ object ScalaOptionParser { "-Vhot-statistics", "-Vide", "-Vimplicit-conversions", "-Vimplicits", "-Vissue", "-Vmacro", "-Vmacro-lite", "-Vpatmat", "-Vphases", "-Vpos", "-Vprint-pos", "-Vprint-types", "-Vquasiquote", "-Vreflective-calls", "-Vreify", - "-Vshow-member-pos", "-Vshow-symkinds", "-Vshow-symowners", "-Vsymbols", "-Vtyper", + "-Vshow-member-pos", "-Vshow-symkinds", "-Vshow-symowners", "-Vsymbols", "-Vtype-diffs", "-Vtyper", "-W", "-Wdead-code", "-Werror", "-Wextra-implicit", "-Wnumeric-widen", "-Woctal-literal", "-Wvalue-discard", "-Wself-implicit", diff --git a/src/compiler/scala/reflect/reify/Taggers.scala b/src/compiler/scala/reflect/reify/Taggers.scala index 41eddd796c85..adff7a293503 100644 --- a/src/compiler/scala/reflect/reify/Taggers.scala +++ b/src/compiler/scala/reflect/reify/Taggers.scala @@ -102,8 +102,6 @@ abstract class Taggers { val tpe = tpeTree.tpe val PolyType(_, MethodType(_, tagTpe)) = fun.tpe: @unchecked val tagModule = tagTpe.typeSymbol.companionSymbol - if (c.compilerSettings.contains("-Xlog-implicits")) - c.echo(c.enclosingPosition, s"cannot materialize ${tagModule.name}[$tpe] as $result because:\n$reason") c.abort(c.enclosingPosition, "No %s available for %s".format(tagModule.name, tpe)) } diff --git a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala index 5d5518ee59d8..eaf19d98539c 100644 --- a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala +++ b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala @@ -501,8 +501,6 @@ trait ScalaSettings extends StandardScalaSettings with Warnings { _: MutableSett .withAbbreviation("-Yhot-statistics") val Yshowsyms = BooleanSetting("-Vsymbols", "Print the AST symbol hierarchy after each phase.") withAbbreviation "-Yshow-syms" val Ytyperdebug = BooleanSetting("-Vtyper", "Trace type assignments.") withAbbreviation "-Ytyper-debug" - val XlogImplicits = BooleanSetting("-Vimplicits", "Show more detail on why some implicits are not applicable.") - .withAbbreviation("-Xlog-implicits") val logImplicitConv = BooleanSetting("-Vimplicit-conversions", "Print a message whenever an implicit conversion is inserted.") .withAbbreviation("-Xlog-implicit-conversions") val logReflectiveCalls = BooleanSetting("-Vreflective-calls", "Print a message when a reflective method call is generated") @@ -569,4 +567,45 @@ trait ScalaSettings extends StandardScalaSettings with Warnings { _: MutableSett */ None } + + object VimplicitsChoices extends MultiChoiceEnumeration { + val enable = Choice("enable", "print dependent missing implicits") + val disable = Choice("disable", "disable printing dependent missing implicits") + val noColor = Choice("no-color", "don't colorize type errors formatted by splain") + val verboseTree = Choice("verbose-tree", "display all intermediate implicits in a chain") + } + + val Vimplicits: MultiChoiceSetting[VimplicitsChoices.type] = + MultiChoiceSetting( + name = "-Vimplicits", + helpArg = "feature", + descr = "Print dependent missing implicits and colored found/required type diffs. See https://docs.scala-lang.org/overviews/compiler-options/errors.html", + domain = VimplicitsChoices, + default = Some("enable" :: Nil), + ).withPostSetHook(_ => enableVexplainImplicitsImplicitly()) + + def enableVexplainImplicitsImplicitly(): Unit = + if (!Vimplicits.contains(VimplicitsChoices.disable) && !Vimplicits.contains(VimplicitsChoices.enable)) + Vimplicits.enable(VimplicitsChoices.enable) + + val VimplicitsMaxRefined: IntSetting = + IntSetting( + "-Vimplicits-max-refined", + "max chars for printing refined types, abbreviate to `F {...}`", + 0, + Some((0, Int.MaxValue)), + str => Some(str.toInt), + ).withPostSetHook(_ => enableVexplainImplicitsImplicitly()) + + def implicitsSettingEnable: Boolean = + Vimplicits.contains(VimplicitsChoices.enable) && + !Vimplicits.contains(VimplicitsChoices.disable) + def implicitsSettingNoColor: Boolean = Vimplicits.contains(VimplicitsChoices.noColor) + def implicitsSettingVerboseTree: Boolean = Vimplicits.contains(VimplicitsChoices.verboseTree) + + val VtypeDiffs: BooleanSetting = + BooleanSetting("-Vtype-diffs", "Print found/required error messages as colored diffs.") + + def typeDiffsSettingEnable: Boolean = + VtypeDiffs.value } diff --git a/src/compiler/scala/tools/nsc/typechecker/AnalyzerPlugins.scala b/src/compiler/scala/tools/nsc/typechecker/AnalyzerPlugins.scala index a86f2c409151..b99ba49a989d 100644 --- a/src/compiler/scala/tools/nsc/typechecker/AnalyzerPlugins.scala +++ b/src/compiler/scala/tools/nsc/typechecker/AnalyzerPlugins.scala @@ -16,7 +16,7 @@ package typechecker /** * @author Lukas Rytz */ -trait AnalyzerPlugins { self: Analyzer => +trait AnalyzerPlugins { self: Analyzer with splain.SplainData => import global._ trait AnalyzerPlugin { @@ -179,6 +179,16 @@ trait AnalyzerPlugins { self: Analyzer => * @param result The result to a given implicit search. */ def pluginsNotifyImplicitSearchResult(result: SearchResult): Unit = () + + /** + * Construct a custom error message for implicit parameters that could not be resolved. + * + * @param param The implicit parameter that was resolved + * @param errors The chain of intermediate implicits that lead to this error + * @param previous The error message constructed by the previous analyzer plugin, or the builtin default + */ + def noImplicitFoundError(param: Symbol, errors: List[ImplicitError], previous: Option[String]): Option[String] = + previous } /** @@ -390,6 +400,13 @@ trait AnalyzerPlugins { self: Analyzer => def accumulate = (_, p) => p.pluginsNotifyImplicitSearchResult(result) }) + /** @see AnalyzerPlugin.noImplicitFoundError */ + def pluginsNoImplicitFoundError(param: Symbol, errors: List[ImplicitError], initial: String): Option[String] = + invoke(new CumulativeOp[Option[String]] { + def default = Some(initial) + def accumulate = (previous, p) => p.noImplicitFoundError(param, errors, previous) + }) + /** A list of registered macro plugins */ private var macroPlugins: List[MacroPlugin] = Nil diff --git a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala index 0d7fc8d0947f..18a3c8179fbf 100644 --- a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala +++ b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala @@ -25,7 +25,9 @@ import scala.tools.nsc.util.stackTraceString import scala.reflect.io.NoAbstractFile import scala.reflect.internal.util.NoSourceFile -trait ContextErrors { +trait ContextErrors +extends splain.SplainErrors +{ self: Analyzer => import global._ @@ -108,7 +110,7 @@ trait ContextErrors { def issueTypeError(err: AbsTypeError)(implicit context: Context): Unit = { context.issue(err) } def typeErrorMsg(context: Context, found: Type, req: Type) = - if (context.openImplicits.nonEmpty && !settings.XlogImplicits.value) + if (context.openImplicits.nonEmpty) // OPT: avoid error string creation for errors that won't see the light of day, but predicate // this on -Xsource:2.13 for bug compatibility with https://github.com/scala/scala/pull/7147#issuecomment-418233611 "type mismatch" @@ -152,8 +154,25 @@ trait ContextErrors { def MacroCantExpandIncompatibleMacrosError(internalMessage: String) = MacroIncompatibleEngineError("macro cannot be expanded, because it was compiled by an incompatible macro engine", internalMessage) + def NoImplicitFoundAnnotation(tree: Tree, param: Symbol): Option[(Boolean, String)] = { + param match { + case ImplicitNotFoundMsg(msg) => Some((false, msg.formatParameterMessage(tree))) + case _ => + val paramTp = param.tpe + paramTp.typeSymbolDirect match { + case ImplicitNotFoundMsg(msg) => Some((false, msg.formatDefSiteMessage(paramTp))) + case _ => + val supplement = param.baseClasses.collectFirst { + case ImplicitNotFoundMsg(msg) => s" (${msg.formatDefSiteMessage(paramTp)})" + } + supplement.map((true, _)) + } + } + } + def NoImplicitFoundError(tree: Tree, param: Symbol)(implicit context: Context): Unit = { - def errMsg = { + val annotationMsg: Option[(Boolean, String)] = NoImplicitFoundAnnotation(tree, param) + def defaultErrMsg = { val paramName = param.name val paramTp = param.tpe def evOrParam = @@ -161,21 +180,15 @@ trait ContextErrors { "evidence parameter of type" else s"parameter $paramName:" - - param match { - case ImplicitNotFoundMsg(msg) => msg.formatParameterMessage(tree) - case _ => - paramTp.typeSymbolDirect match { - case ImplicitNotFoundMsg(msg) => msg.formatDefSiteMessage(paramTp) - case _ => - val supplement = param.baseClasses.collectFirst { - case ImplicitNotFoundMsg(msg) => s" (${msg.formatDefSiteMessage(paramTp)})" - }.getOrElse("") - s"could not find implicit value for $evOrParam $paramTp$supplement" - } + annotationMsg match { + case Some((false, msg)) => msg + case msg => + val supplement = msg.fold("")(_._2) + s"could not find implicit value for $evOrParam $paramTp$supplement" } } - issueNormalTypeError(tree, errMsg) + val errMsg = splainPushOrReportNotFound(tree, param, annotationMsg.map(_._2)) + issueNormalTypeError(tree, errMsg.getOrElse(defaultErrMsg)) } trait TyperContextErrors { diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala index 2e69740d85ed..e573a4d74c0e 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala @@ -33,7 +33,7 @@ import scala.tools.nsc.Reporting.WarningCategory * * @author Martin Odersky */ -trait Implicits { +trait Implicits extends splain.SplainData { self: Analyzer => import global._ @@ -105,12 +105,14 @@ trait Implicits { if (shouldPrint) typingStack.printTyping(tree, "typing implicit: %s %s".format(tree, context.undetparamsString)) val implicitSearchContext = context.makeImplicit(reportAmbiguous) + ImplicitErrors.startSearch(pt) val dpt = if (isView) pt else dropByName(pt) val isByName = dpt ne pt val search = new ImplicitSearch(tree, dpt, isView, implicitSearchContext, pos, isByName) pluginsNotifyImplicitSearch(search) val result = search.bestImplicit pluginsNotifyImplicitSearchResult(result) + ImplicitErrors.finishSearch(result.isSuccess, pt) if (result.isFailure && saveAmbiguousDivergent && implicitSearchContext.reporter.hasErrors) implicitSearchContext.reporter.propagateImplicitTypeErrorsTo(context.reporter) @@ -146,7 +148,7 @@ trait Implicits { if (result.isFailure && !silent) { val err = context.reporter.firstError val errPos = err.map(_.errPos).getOrElse(pos) - val errMsg = err.map(_.errMsg).getOrElse("implicit search has failed. to find out the reason, turn on -Xlog-implicits") + val errMsg = err.map(_.errMsg).getOrElse("implicit search has failed. to find out the reason, turn on -Vimplicits") onError(errPos, errMsg) } result.tree @@ -443,8 +445,6 @@ trait Implicits { def pos = if (pos0 != NoPosition) pos0 else tree.pos @inline final def failure(what: Any, reason: => String, pos: Position = this.pos): SearchResult = { - if (settings.XlogImplicits) - reporter.echo(pos, s"$what is not a valid implicit value for $pt because:\n$reason") SearchFailure } /** Is implicit info `info1` better than implicit info `info2`? @@ -906,7 +906,8 @@ trait Implicits { // bounds check on the expandee tree itree3.attachments.get[MacroExpansionAttachment] match { case Some(MacroExpansionAttachment(exp @ TypeApply(fun, targs), _)) => - checkBounds(exp, NoPrefix, NoSymbol, fun.symbol.typeParams, targs.map(_.tpe), "inferred ") + val withinBounds = checkBounds(exp, NoPrefix, NoSymbol, fun.symbol.typeParams, targs.map(_.tpe), "inferred ") + if (!withinBounds) splainPushNonconformantBonds(pt, tree, targs.map(_.tpe), undetParams, None) case _ => () } @@ -953,6 +954,7 @@ trait Implicits { context.reporter.firstError match { case Some(err) => + splainPushImplicitSearchFailure(itree3, pt, err) fail("typing TypeApply reported errors for the implicit tree: " + err.errMsg) case None => val result = new SearchResult(unsuppressMacroExpansion(itree3), subst, context.undetparams) @@ -1492,17 +1494,15 @@ trait Implicits { // so that if we find one, we could convert it to whatever universe we need by the means of the `in` method // if no tag is found in scope, we end up here, where we ask someone to materialize the tag for us // however, since the original search was about a tag with no particular prefix, we cannot proceed - // this situation happens very often, so emitting an error message here (even if only for -Xlog-implicits) would be too much + // this situation happens very often, so emitting an error message here (even if only for -Vimplicits) would be too much //return failure(tp, "tag error: unsupported prefix type %s (%s)".format(pre, pre.kind)) return SearchFailure } ) // todo. migrate hardcoded materialization in Implicits to corresponding implicit macros val materializer = atPos(pos.focus)(gen.mkMethodCall(TagMaterializers(tagClass), List(tp), if (prefix != EmptyTree) List(prefix) else List())) - if (settings.XlogImplicits) reporter.echo(pos, "materializing requested %s.%s[%s] using %s".format(pre, tagClass.name, tp, materializer)) if (context.macrosEnabled) success(materializer) // don't call `failure` here. if macros are disabled, we just fail silently - // otherwise -Xlog-implicits will spam the long with zillions of "macros are disabled" // this is ugly but temporary, since all this code will be removed once I fix implicit macros else SearchFailure } diff --git a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala index 2ee0a2efba1f..a71539ee277e 100644 --- a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala +++ b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala @@ -40,7 +40,9 @@ import scala.tools.nsc.Reporting.WarningCategory * * @author Paul Phillips */ -trait TypeDiagnostics { +trait TypeDiagnostics +extends splain.SplainDiagnostics +{ self: Analyzer with StdAttachments => import global._ @@ -310,7 +312,7 @@ trait TypeDiagnostics { // when the message will never be seen. I though context.reportErrors // being false would do that, but if I return "" under // that condition, I see it. - def foundReqMsg(found: Type, req: Type): String = { + def builtinFoundReqMsg(found: Type, req: Type): String = { val foundWiden = found.widen val reqWiden = req.widen val sameNamesDifferentPrefixes = @@ -340,6 +342,9 @@ trait TypeDiagnostics { } } + def foundReqMsg(found: Type, req: Type): String = + splainFoundReqMsg(found, req).getOrElse(builtinFoundReqMsg(found, req)) + def typePatternAdvice(sym: Symbol, ptSym: Symbol) = { val clazz = if (sym.isModuleClass) sym.companionClass else sym val caseString = diff --git a/src/compiler/scala/tools/nsc/typechecker/splain/Colors.scala b/src/compiler/scala/tools/nsc/typechecker/splain/Colors.scala new file mode 100644 index 000000000000..67bea85500db --- /dev/null +++ b/src/compiler/scala/tools/nsc/typechecker/splain/Colors.scala @@ -0,0 +1,47 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.tools.nsc +package typechecker +package splain + +trait StringColor +{ + def color(s: String, col: String): String +} + +object StringColors +{ + implicit val noColor = + new StringColor { + def color(s: String, col: String) = s + } + + implicit val color = + new StringColor { + import Console.RESET + + def color(s: String, col: String) = col + s + RESET + } +} + +object StringColor +{ + implicit class StringColorOps(s: String)(implicit sc: StringColor) + { + import Console._ + def red = sc.color(s, RED) + def green = sc.color(s, GREEN) + def yellow = sc.color(s, YELLOW) + def blue = sc.color(s, BLUE) + } +} diff --git a/src/compiler/scala/tools/nsc/typechecker/splain/SplainData.scala b/src/compiler/scala/tools/nsc/typechecker/splain/SplainData.scala new file mode 100644 index 000000000000..c86481559d81 --- /dev/null +++ b/src/compiler/scala/tools/nsc/typechecker/splain/SplainData.scala @@ -0,0 +1,111 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.tools.nsc +package typechecker +package splain + +import scala.util.matching.Regex + +trait SplainData { self: Analyzer => + + import global._ + + sealed trait ImplicitErrorSpecifics + + object ImplicitErrorSpecifics + { + case class NotFound(param: Symbol) + extends ImplicitErrorSpecifics + + case class NonconformantBounds(targs: List[Type], tparams: List[Symbol], originalError: Option[AbsTypeError]) + extends ImplicitErrorSpecifics + } + + object ImplicitErrors + { + var stack: List[Type] = Nil + + var errors: List[ImplicitError] = Nil + + def push(error: ImplicitError): Unit = errors = error :: errors + + def nesting: Int = stack.length - 1 + + def nested: Boolean = stack.nonEmpty + + def removeErrorsFor(tpe: Type): Unit = errors = errors.dropWhile(_.tpe == tpe) + + def startSearch(expectedType: Type): Unit = { + if (settings.implicitsSettingEnable) { + if (!nested) errors = List() + stack = expectedType :: stack + } + } + + def finishSearch(success: Boolean, expectedType: Type): Unit = { + if (settings.implicitsSettingEnable) { + if (success) removeErrorsFor(expectedType) + stack = stack.drop(1) + } + } + } + + case class ImplicitError(tpe: Type, candidate: Tree, nesting: Int, specifics: ImplicitErrorSpecifics) + { + override def equals(other: Any) = other match { + case o: ImplicitError => + o.tpe.toString == tpe.toString && ImplicitError.candidateName(this) == ImplicitError.candidateName(o) + case _ => false + } + + override def hashCode = (tpe.toString.hashCode, ImplicitError.candidateName(this).hashCode).hashCode + + override def toString: String = + s"NotFound(${ImplicitError.shortName(tpe.toString)}, ${ImplicitError.shortName(candidate.toString)}), $nesting, $specifics)" + } + + object ImplicitError + { + def notFound(tpe: Type, candidate: Tree, nesting: Int)(param: Symbol): ImplicitError = + ImplicitError(tpe, candidate, nesting, ImplicitErrorSpecifics.NotFound(param)) + + def nonconformantBounds + (tpe: Type, candidate: Tree, nesting: Int) + (targs: List[Type], tparams: List[Symbol], originalError: Option[AbsTypeError]) + : ImplicitError = + ImplicitError(tpe, candidate, nesting, ImplicitErrorSpecifics.NonconformantBounds(targs, tparams, originalError)) + + def unapplyCandidate(e: ImplicitError): Tree = + e.candidate match { + case TypeApply(name, _) => name + case a => a + } + + def candidateName(e: ImplicitError): String = + unapplyCandidate(e) match { + case Select(_, name) => name.toString + case Ident(name) => name.toString + case a => a.toString + } + + val candidateRegex: Regex = """.*\.this\.(.*)""".r + + def cleanCandidate(e: ImplicitError): String = + unapplyCandidate(e).toString match { + case candidateRegex(suf) => suf + case a => a + } + + def shortName(ident: String): String = ident.split('.').toList.lastOption.getOrElse(ident) + } +} diff --git a/src/compiler/scala/tools/nsc/typechecker/splain/SplainDiagnostics.scala b/src/compiler/scala/tools/nsc/typechecker/splain/SplainDiagnostics.scala new file mode 100644 index 000000000000..20dcc0d4da24 --- /dev/null +++ b/src/compiler/scala/tools/nsc/typechecker/splain/SplainDiagnostics.scala @@ -0,0 +1,27 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.tools.nsc +package typechecker +package splain + +trait SplainDiagnostics +extends SplainFormatting +{ self: Analyzer with SplainData => + import global._ + + def splainFoundReqMsg(found: Type, req: Type): Option[String] = + if (settings.typeDiffsSettingEnable) + Some(";\n" + showFormattedL(formatDiff(found, req, true), true).indent.joinLines) + else + None +} diff --git a/src/compiler/scala/tools/nsc/typechecker/splain/SplainErrors.scala b/src/compiler/scala/tools/nsc/typechecker/splain/SplainErrors.scala new file mode 100644 index 000000000000..e2ffeade29be --- /dev/null +++ b/src/compiler/scala/tools/nsc/typechecker/splain/SplainErrors.scala @@ -0,0 +1,64 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.tools.nsc +package typechecker +package splain + +trait SplainErrors { self: Analyzer with SplainFormatting => + import global._ + + def splainPushNotFound(tree: Tree, param: Symbol): Unit = + ImplicitErrors.stack + .headOption + .map(ImplicitError.notFound(_, tree, ImplicitErrors.nesting)(param)) + .foreach(err => ImplicitErrors.push(err)) + + def splainPushOrReportNotFound(tree: Tree, param: Symbol, annotationMsg: Option[String]): Option[String] = + if (settings.implicitsSettingEnable) + if (ImplicitErrors.nested) { + splainPushNotFound(tree, param) + None + } + else pluginsNoImplicitFoundError(param, ImplicitErrors.errors, formatImplicitError(param, ImplicitErrors.errors, annotationMsg)) + else None + + def splainPushNonconformantBonds( + tpe: Type, + candidate: Tree, + targs: List[Type], + tparams: List[Symbol], + originalError: Option[AbsTypeError], + ): Unit = { + if (settings.implicitsSettingEnable) { + val err = ImplicitError.nonconformantBounds(tpe, candidate, ImplicitErrors.nesting)(targs, tparams, originalError) + ImplicitErrors.push(err) + } + } + + def splainPushImplicitSearchFailure(implicitTree: Tree, expectedType: Type, originalError: AbsTypeError): Unit = { + def pushImpFailure(fun: Tree, args: List[Tree]): Unit = { + fun.tpe match { + case PolyType(tparams, restpe) if tparams.nonEmpty && sameLength(tparams, args) => + val targs = mapList(args)(_.tpe) + splainPushNonconformantBonds(expectedType, implicitTree, targs, tparams, Some(originalError)) + case _ => () + } + } + if (settings.implicitsSettingEnable) { + (implicitTree: @unchecked) match { + case TypeApply(fun, args) => pushImpFailure(fun, args) + case Apply(TypeApply(fun, args), _) => pushImpFailure(fun, args) + } + } + } +} diff --git a/src/compiler/scala/tools/nsc/typechecker/splain/SplainFormatData.scala b/src/compiler/scala/tools/nsc/typechecker/splain/SplainFormatData.scala new file mode 100644 index 000000000000..46ba14800a06 --- /dev/null +++ b/src/compiler/scala/tools/nsc/typechecker/splain/SplainFormatData.scala @@ -0,0 +1,170 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.tools.nsc +package typechecker +package splain + +sealed trait Formatted +{ + def length: Int +} + +object Formatted { + def comparator: Formatted => String = { + case Infix(left, _, _, _) => + comparator(left) + case Simple(tpe) => + tpe + case Qualified(Nil, tpe) => + tpe + case Qualified(path, tpe) => + s"${path.mkString}$tpe" + case UnitForm => + "()" + case Applied(cons, _) => + comparator(cons) + case TupleForm(Nil) => + "()" + case TupleForm(h :: _) => + comparator(h) + case FunctionForm(Nil, ret, _) => + comparator(ret) + case FunctionForm(h :: _, _, _) => + comparator(h) + case RefinedForm(Nil, _) => + "()" + case RefinedForm(h :: _, _) => + comparator(h) + case Diff(l, _) => + comparator(l) + case Decl(sym, _) => + comparator(sym) + case DeclDiff(sym, _, _) => + comparator(sym) + case ByName(tpe) => + comparator(tpe) + } + + implicit def Ordering_Formatted: Ordering[Formatted] = + new Ordering[Formatted] { + def compare(x: Formatted, y: Formatted): Int = Ordering[String].compare(comparator(x), comparator(y)) + } +} + +case class Infix(infix: Formatted, left: Formatted, right: Formatted, + top: Boolean) +extends Formatted +{ + def length = List(infix, left, right).map(_.length).sum + 2 +} + +case class Simple(tpe: String) +extends Formatted +{ + def length = tpe.length +} + +case class Qualified(path: List[String], tpe: String) +extends Formatted +{ + def length: Int = path.map(_.length).sum + path.length + tpe.length +} + +case object UnitForm +extends Formatted +{ + def length = 4 +} + +case class Applied(cons: Formatted, args: List[Formatted]) +extends Formatted +{ + def length = args.map(_.length).sum + (args.length - 1) * 2 + cons.length + 2 +} + +case class TupleForm(elems: List[Formatted]) +extends Formatted +{ + def length = elems.map(_.length).sum + (elems.length - 1) + 2 +} + +case class FunctionForm(args: List[Formatted], ret: Formatted, top: Boolean) +extends Formatted +{ + def length = args.map(_.length).sum + (args.length - 1) + 2 + ret.length + 4 +} + +object FunctionForm +{ + def fromArgs(args: List[Formatted], top: Boolean) = { + val (params, returnt) = args.splitAt(args.length - 1) + FunctionForm(params, returnt.headOption.getOrElse(UnitForm), top) + } +} + +case class RefinedForm(elems: List[Formatted], decls: List[Formatted]) +extends Formatted +{ + def length: Int = elems.map(_.length).sum + (elems.length - 1) * 6 +} + +case class Diff(left: Formatted, right: Formatted) +extends Formatted +{ + def length = left.length + right.length + 1 +} + +case class Decl(sym: Formatted, rhs: Formatted) +extends Formatted +{ + def length: Int = sym.length + rhs.length + 8 +} + +case class DeclDiff(sym: Formatted, left: Formatted, right: Formatted) +extends Formatted +{ + def length: Int = sym.length + left.length + right.length + 9 +} + +case class ByName(tpe: Formatted) +extends Formatted +{ + def length: Int = tpe.length + 5 +} + +sealed trait TypeRepr +{ + def broken: Boolean + def flat: String + def lines: List[String] + def tokenize = lines mkString " " + def joinLines = lines mkString "\n" + def indent: TypeRepr +} + +case class BrokenType(lines: List[String]) +extends TypeRepr +{ + def broken = true + def flat = lines mkString " " + def indent = BrokenType(lines map (" " + _)) +} + +case class FlatType(flat: String) +extends TypeRepr +{ + def broken = false + def length = flat.length + def lines = List(flat) + def indent = FlatType(" " + flat) +} diff --git a/src/compiler/scala/tools/nsc/typechecker/splain/SplainFormatting.scala b/src/compiler/scala/tools/nsc/typechecker/splain/SplainFormatting.scala new file mode 100644 index 000000000000..14fbfba729d3 --- /dev/null +++ b/src/compiler/scala/tools/nsc/typechecker/splain/SplainFormatting.scala @@ -0,0 +1,826 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.tools.nsc +package typechecker +package splain + +import collection.mutable + +import StringColor._ + +object Messages +{ + val hasMatching = "hasMatchingSymbol reported error: " + + val typingTypeApply = + "typing TypeApply reported errors for the implicit tree: " +} + +class FormatCache[K, V](cache: mutable.Map[K, V], var hits: Long) +{ + def apply(k: K, orElse: => V) = { + if (cache.contains(k)) hits += 1 + cache.getOrElseUpdate(k, orElse) + } + + def stats = s"${cache.size}/$hits" +} + +object FormatCache +{ + def apply[K, V] = new FormatCache[K, V](mutable.Map(), 0) +} + +trait SplainFormatters +{ self: Analyzer => + import global._ + + def formatType(tpe: Type, top: Boolean): Formatted + + object Refined { + def unapply(tpe: Type): Option[(List[Type], Scope)] = + tpe match { + case RefinedType(parents, decls) => + Some((parents, decls)) + case t @ SingleType(_, _) => + unapply(t.underlying) + case _ => + None + } + } + + trait SpecialFormatter + { + def apply[A](tpe: Type, simple: String, args: List[A], + formattedArgs: => List[Formatted], top: Boolean, + rec: A => Boolean => Formatted): Option[Formatted] + + def diff(left: Type, right: Type, top: Boolean): Option[Formatted] + } + + object FunctionFormatter + extends SpecialFormatter + { + def apply[A](tpe: Type, simple: String, args: List[A], + formattedArgs: => List[Formatted], top: Boolean, + rec: A => Boolean => Formatted) = { + if (simple.startsWith("Function")) + Some(FunctionForm.fromArgs(formattedArgs, top)) + else None + } + + def diff(left: Type, right: Type, top: Boolean) = None + } + + object TupleFormatter + extends SpecialFormatter + { + def apply[A](tpe: Type, simple: String, args: List[A], + formattedArgs: => List[Formatted], top: Boolean, + rec: A => Boolean => Formatted) = { + if (simple.startsWith("Tuple")) + Some(TupleForm(formattedArgs)) + else None + } + + def diff(left: Type, right: Type, top: Boolean) = None + } + + object RefinedFormatter extends SpecialFormatter { + object DeclSymbol { + def unapply(sym: Symbol): Option[(Formatted, Formatted)] = + if (sym.hasRawInfo) + Some((Simple(sym.simpleName.toString), formatType(sym.rawInfo, true))) + else + None + } + + def ignoredTypes: List[Type] = List(typeOf[Object], typeOf[Any], typeOf[AnyRef]) + + def sanitizeParents: List[Type] => List[Type] = { + case List(tpe) => + List(tpe) + case tpes => + tpes.filterNot(t => ignoredTypes.exists(_ =:= t)) + } + + def formatDecl: Symbol => Formatted = { + case DeclSymbol(n, t) => + Decl(n, t) + case sym => + Simple(sym.toString) + } + + def apply[A]( + tpe: Type, + simple: String, + args: List[A], + formattedArgs: => List[Formatted], + top: Boolean, + rec: A => Boolean => Formatted, + ): Option[Formatted] = + tpe match { + case Refined(parents, decls) => + Some(RefinedForm(sanitizeParents(parents).map(formatType(_, top)), decls.toList.map(formatDecl))) + case _ => + None + } + + val none: Formatted = Simple("") + + def separate[A](left: List[A], right: List[A]): (List[A], List[A], List[A]) = { + val leftS = Set(left: _*) + val rightS = Set(right: _*) + val common = leftS.intersect(rightS) + val uniqueLeft = leftS -- common + val uniqueRight = rightS -- common + (common.toList, uniqueLeft.toList, uniqueRight.toList) + } + + def matchTypes(left: List[Type], right: List[Type]): List[Formatted] = { + val (common, uniqueLeft, uniqueRight) = separate(left.map(formatType(_, true)), right.map(formatType(_, true))) + val diffs = uniqueLeft + .toList + .zipAll(uniqueRight.toList, none, none) + .map { case (l, r) => + Diff(l, r) + } + common.toList ++ diffs + } + + def filterDecls(syms: List[Symbol]): List[(Formatted, Formatted)] = + syms.collect { case DeclSymbol(sym, rhs) => + (sym, rhs) + } + + def matchDecls(left: List[Symbol], right: List[Symbol]): List[Formatted] = { + val (common, uniqueLeft, uniqueRight) = separate(filterDecls(left), filterDecls(right)) + val diffs = uniqueLeft + .toList + .map(Some(_)) + .zipAll(uniqueRight.toList.map(Some(_)), None, None) + .collect { + case (Some((sym, l)), Some((_, r))) => + DeclDiff(sym, l, r) + case (None, Some((sym, r))) => + DeclDiff(sym, none, r) + case (Some((sym, l)), None) => + DeclDiff(sym, l, none) + } + common.toList.map { case (sym, rhs) => + Decl(sym, rhs) + } ++ diffs + } + + def diff(left: Type, right: Type, top: Boolean): Option[Formatted] = + (left, right) match { + case (Refined(leftParents, leftDecls), Refined(rightParents, rightDecls)) => + val parents = matchTypes(sanitizeParents(leftParents), sanitizeParents(rightParents)).sorted + val decls = matchDecls(leftDecls.toList, rightDecls.toList).sorted + Some(RefinedForm(parents, decls)) + case _ => + None + } + } + + object ByNameFormatter extends SpecialFormatter { + def apply[A]( + tpe: Type, + simple: String, + args: List[A], + formattedArgs: => List[Formatted], + top: Boolean, + rec: A => Boolean => Formatted, + ): Option[Formatted] = + tpe match { + case TypeRef(_, sym, List(a)) if sym.name.decodedName.toString == "" => + Some(ByName(formatType(a, true))) + case _ => + None + } + + def diff(left: Type, right: Type, top: Boolean): Option[Formatted] = None + } +} + +trait SplainFormatting +extends SplainFormatters +{ self: Analyzer => + import global._ + + def breakInfixLength: Int = 70 + + def splainSettingTruncRefined: Option[Int] = { + val value = settings.VimplicitsMaxRefined.value + if (value == 0) None else Some(value) + } + + implicit def colors = + if(settings.implicitsSettingNoColor) StringColors.noColor + else StringColors.color + + def dealias(tpe: Type) = + if (isAux(tpe)) tpe + else { + val actual = tpe match { + case ExistentialType(_, t) => t + case _ => tpe + } + actual.dealias + } + + def extractArgs(tpe: Type) = { + tpe match { + case PolyType(params, result) => + result.typeArgs.map { + case t if params.contains(t.typeSymbol) => WildcardType + case a => a + } + case t: AliasTypeRef if !isAux(tpe) => + t.betaReduce.typeArgs.map(a => if (a.typeSymbolDirect.isTypeParameter) WildcardType else a) + case _ => tpe.typeArgs + } + } + + def isRefined(tpe: Type) = tpe.dealias match { + case RefinedType(_, _) => true + case _ => false + } + + def isSymbolic(tpe: Type) = { + val n = tpe.typeConstructor.typeSymbol.name + !isRefined(tpe) && (n.encodedName.toString != n.decodedName.toString) + } + + def ctorNames(tpe: Type): List[String] = + scala.util.Try(tpe.typeConstructor.toString) + .map(_.split('.').toList) + .getOrElse(List(tpe.toString)) + + def isAux(tpe: Type) = ctorNames(tpe).lastOption.contains("Aux") + + def formatRefinement(sym: Symbol) = { + if (sym.hasRawInfo) { + val rhs = showType(sym.rawInfo) + s"$sym = $rhs" + } + else sym.toString + } + + def formatAuxSimple(tpe: Type): (List[String], String) = { + val names = ctorNames(tpe) + (names.dropRight(2), ctorNames(tpe).takeRight(2).mkString(".")) + } + + def symbolPath(sym: Symbol): List[String] = + sym + .ownerChain + .takeWhile(sym => sym.isType && !sym.isPackageClass) + .map(_.name.decodedName.toString) + .reverse + + def sanitizePath(path: List[String]): List[String] = + path + .takeWhile(_ != "type") + .filterNot(_.contains("$")) + + def pathPrefix: List[String] => String = { + case Nil => + "" + case List("") => + "" + case a => + a.mkString("", ".", ".") + } + + def qualifiedName(path: List[String], name: String): String = s"${pathPrefix(path)}$name" + + def stripModules(path: List[String], name: String): Option[Int] => String = { + case Some(keep) => + qualifiedName(path.takeRight(keep), name) + case None => + name + } + + case class TypeParts(sym: Symbol, tt: Type) { + + def modulePath: List[String] = + (tt, sym) match { + case (TypeRef(pre, _, _), _) if !pre.toString.isEmpty => + sanitizePath(pre.toString.split("\\.").toList) + case (SingleType(_, _), sym) => + symbolPath(sym).dropRight(1) + case (_, _) => + Nil + } + + def ownerPath: List[String] = { + val chain = sym.ownerChain.reverse + val parts = chain.map(_.name.decodedName.toString) + val (paths, names) = parts.splitAt( + Math.max(0, parts.size - 1), + ) + paths + } + + def shortName: String = { + val prefixes = tt.prefixString.split('.').dropRight(1) + val prefix = prefixes.mkString(".") + "." + val name = tt.safeToString + name.stripPrefix(prefix) + } + } + + def stripType(tpe: Type): (List[String], String) = + tpe match { + case tt: SingletonType => + val sym = tt.termSymbol + val parts = TypeParts(sym, tt) + + parts.modulePath -> parts.shortName + + case tt: RefinedType => + val sym = tt.typeSymbol + val parts = TypeParts(sym, tt) + + parts.modulePath -> parts.shortName + + case _ => + // TODO: should this also use TypeParts ? + val sym = + if (tpe.takesTypeArgs) + tpe.typeSymbolDirect + else + tpe.typeSymbol + val symName = sym.name.decodedName.toString + val parts = TypeParts(sym, tpe) + + val name = + if (sym.isModuleClass) + s"$symName.type" + else + symName + (parts.modulePath, name) + } + + def formatNormalSimple(tpe: Type): (List[String], String) = + tpe match { + case a @ WildcardType => + (Nil, a.toString) + case a => + stripType(a) + } + + def formatSimpleType(tpe: Type): (List[String], String) = + if (isAux(tpe)) + formatAuxSimple(tpe) + else + formatNormalSimple(tpe) + + def indentLine(line: String, n: Int = 1, prefix: String = " ") = (prefix * n) + line + + def indent(lines: List[String], n: Int = 1, prefix: String = " ") = lines.map(indentLine(_, n, prefix)) + + /** + * If the args of an applied type constructor are multiline, create separate + * lines for the constructor name and the closing bracket; else return a + * single line. + */ + def showTypeApply + (cons: String, args: List[TypeRepr], break: Boolean) + : TypeRepr = { + val flatArgs = bracket(args map (_.flat)) + val flat = FlatType(s"$cons$flatArgs") + def brokenArgs = args match { + case head :: tail => + tail.foldLeft(head.lines)((z, a) => z ::: "," :: a.lines) + case _ => Nil + } + def broken = BrokenType(s"$cons[" :: indent(brokenArgs) ::: List("]")) + if (break) decideBreak(flat, broken) else flat + } + + def showTuple(args: List[String]) = + args match { + case head :: Nil => + s"Tuple1[$head]" + case _ => + args.mkString("(", ",", ")") + } + + def showFuncParams(args: List[String]) = + args match { + case head :: Nil => + head + case _ => + args.mkString("(", ",", ")") + } + + def showRefined(parents: List[String], decls: List[String]) = { + val p = parents.mkString(" with ") + val d = + if (decls.isEmpty) + "" + else + decls.mkString(" {", "; ", "}") + s"$p$d" + } + + def bracket[A](params: List[A]) = params.mkString("[", ", ", "]") + + def formatFunction(args: List[String]) = { + val (params, returnt) = args.splitAt(args.length - 1) + s"${showTuple(params)} => ${showTuple(returnt)}" + } + + def decideBreak(flat: FlatType, broken: => BrokenType): TypeRepr = + if (flat.length > breakInfixLength) broken + else flat + + /** + * Turn a nested infix type structure into a flat list + * ::[A, ::[B, C]]] => List(A, ::, B, ::, C) + */ + def flattenInfix(tpe: Infix): List[Formatted] = { + def step(tpe: Formatted): List[Formatted] = tpe match { + case Infix(infix, left, right, top) => + left :: infix :: step(right) + case a => List(a) + } + step(tpe) + } + + /** + * Break a list produced by [[flattenInfix]] into lines by taking two + * elements at a time, then appending the terminal. + * If the expression's length is smaller than the threshold specified via + * plugin parameter, return a single line. + */ + def breakInfix(types: List[Formatted]): TypeRepr = { + val form = types map showFormattedLBreak + def broken: List[String] = form + .sliding(2, 2) + .toList + .flatMap { + case left :: right :: Nil => + (left, right) match { + case (FlatType(tpe), FlatType(infix)) => + List(s"$tpe $infix") + case _ => left.lines ++ right.lines + } + case last :: Nil => last.lines + // for exhaustiveness, cannot be reached + case l => l.flatMap(_.lines) + } + val flat = FlatType(form.flatMap(_.lines) mkString " ") + decideBreak(flat, BrokenType(broken)) + } + + val showFormattedLCache = FormatCache[(Formatted, Boolean), TypeRepr] + + def truncateDecls(decls: List[Formatted]): Boolean = splainSettingTruncRefined.exists(_ < decls.map(_.length).sum) + + def showFormattedQualified(path: List[String], name: String): TypeRepr = + FlatType(name) + + def formattedDiff: (Formatted, Formatted) => String = { + case (Qualified(lpath, lname), Qualified(rpath, rname)) if lname == rname => + val prefix = + lpath + .reverse + .zip(rpath.reverse) + .takeWhile { case (l, r) => + l == r + } + .size + 1 + s"${qualifiedName(lpath.takeRight(prefix), lname).red}|${qualifiedName(rpath.takeRight(prefix), rname).green}" + case (left, right) => + val l = showFormattedNoBreak(left) + val r = showFormattedNoBreak(right) + s"${l.red}|${r.green}" + } + + def showFormattedLImpl(tpe: Formatted, break: Boolean): TypeRepr = + tpe match { + case Simple(name) => + FlatType(name) + case Qualified(Nil, name) => + FlatType(name) + case Qualified(path, name) => + showFormattedQualified(path, name) + case Applied(cons, args) => + val reprs = args.map(showFormattedL(_, break)) + showTypeApply(showFormattedNoBreak(cons), reprs, break) + case tpe @ Infix(_, _, _, top) => + val flat = flattenInfix(tpe) + val broken: TypeRepr = + if (break) + breakInfix(flat) + else + FlatType(flat.map(showFormattedNoBreak).mkString(" ")) + wrapParensRepr(broken, top) + case UnitForm => + FlatType("Unit") + case FunctionForm(args, ret, top) => + val a = showFuncParams(args.map(showFormattedNoBreak)) + val r = showFormattedNoBreak(ret) + FlatType(wrapParens(s"$a => $r", top)) + case TupleForm(elems) => + FlatType(showTuple(elems.map(showFormattedNoBreak))) + case RefinedForm(elems, decls) if truncateDecls(decls) => + FlatType(showRefined(elems.map(showFormattedNoBreak), List("..."))) + case RefinedForm(elems, decls) => + FlatType(showRefined(elems.map(showFormattedNoBreak), decls.map(showFormattedNoBreak))) + case Diff(left, right) => + FlatType(formattedDiff(left, right)) + case Decl(sym, rhs) => + val s = showFormattedNoBreak(sym) + val r = showFormattedNoBreak(rhs) + FlatType(s"type $s = $r") + case DeclDiff(sym, left, right) => + val s = showFormattedNoBreak(sym) + val diff = formattedDiff(left, right) + FlatType(s"type $s = $diff") + case ByName(tpe) => + val t = showFormattedNoBreak(tpe) + FlatType(s"(=> $t)") + } + + def showFormattedL(tpe: Formatted, break: Boolean): TypeRepr = { + val key = (tpe, break) + showFormattedLCache(key, showFormattedLImpl(tpe, break)) + } + + def showFormattedLBreak(tpe: Formatted): TypeRepr = showFormattedL(tpe, true) + + def showFormattedLNoBreak(tpe: Formatted): TypeRepr = showFormattedL(tpe, false) + + def showFormatted(tpe: Formatted, break: Boolean): String = showFormattedL(tpe, break).joinLines + + def showFormattedNoBreak(tpe: Formatted): String = showFormattedLNoBreak(tpe).tokenize + + def showType(tpe: Type): String = showFormatted(formatType(tpe, true), false) + + def showTypeBreak(tpe: Type): String = showFormatted(formatType(tpe, true), true) + + def showTypeBreakL(tpe: Type): List[String] = showFormattedL(formatType(tpe, true), true).lines + + def wrapParens(expr: String, top: Boolean): String = + if (top) + expr + else + s"($expr)" + + def wrapParensRepr(tpe: TypeRepr, top: Boolean): TypeRepr = + tpe match { + case FlatType(tpe) => + FlatType(wrapParens(tpe, top)) + case BrokenType(lines) => + if (top) + tpe + else + BrokenType("(" :: indent(lines) ::: List(")")) + } + + val specialFormatters: List[SpecialFormatter] = + List( + FunctionFormatter, + TupleFormatter, + RefinedFormatter, + ByNameFormatter, + ) + + def formatSpecial[A](tpe: Type, simple: String, args: List[A], formattedArgs: => List[Formatted], top: Boolean, + rec: A => Boolean => Formatted) + : Option[Formatted] = { + specialFormatters + .map(_.apply(tpe, simple, args, formattedArgs, top, rec)) + .collectFirst { case Some(a) => a } + .headOption + } + + def formatInfix[A]( + path: List[String], + simple: String, + left: A, + right: A, + top: Boolean, + rec: A => Boolean => Formatted, + ) = { + val l = rec(left)(false) + val r = rec(right)(false) + Infix(Qualified(path, simple), l, r, top) + } + + def formatWithInfix[A](tpe: Type, args: List[A], top: Boolean, rec: A => Boolean => Formatted): Formatted = { + val (path, simple) = formatSimpleType(tpe) + lazy val formattedArgs = args.map(rec(_)(true)) + formatSpecial(tpe, simple, args, formattedArgs, top, rec).getOrElse { + args match { + case left :: right :: Nil if isSymbolic(tpe) => + formatInfix(path, simple, left, right, top, rec) + case _ :: _ => + Applied(Qualified(path, simple), formattedArgs) + case _ => + Qualified(path, simple) + } + } + } + + def formatTypeImpl(tpe: Type, top: Boolean): Formatted = { + val dtpe = dealias(tpe) + val rec = (tp: Type) => (t: Boolean) => formatType(tp, t) + formatWithInfix(dtpe, extractArgs(dtpe), top, rec) + } + + val formatTypeCache = FormatCache[(Type, Boolean), Formatted] + + def formatType(tpe: Type, top: Boolean): Formatted = { + val key = (tpe, top) + formatTypeCache(key, formatTypeImpl(tpe, top)) + } + + def formatDiffInfix(left: Type, right: Type, top: Boolean): Formatted = { + val rec = (l: Type, r: Type) => (t: Boolean) => formatDiff(l, r, t) + val recT = rec.tupled + val args = extractArgs(left) zip extractArgs(right) + formatWithInfix(left, args, top, recT) + } + + def formatDiffSpecial(left: Type, right: Type, top: Boolean): Option[Formatted] = { + specialFormatters.map(_.diff(left, right, top)) + .collectFirst { case Some(a) => a } + .headOption + } + + def formatDiffSimple(left: Type, right: Type): Formatted = { + val l = formatType(left, true) + val r = formatType(right, true) + Diff(l, r) + } + + def formatDiffImpl(found: Type, req: Type, top: Boolean): Formatted = { + val (left, right) = dealias(found) -> dealias(req) + if (left =:= right) + formatType(left, top) + else if (left.typeSymbol == right.typeSymbol) + formatDiffInfix(left, right, top) + else + formatDiffSpecial(left, right, top) getOrElse + formatDiffSimple(left, right) + } + + val formatDiffCache = FormatCache[(Type, Type, Boolean), Formatted] + + def formatDiff(left: Type, right: Type, top: Boolean): Formatted = { + val key = (left, right, top) + formatDiffCache(key, formatDiffImpl(left, right, top)) + } + + def formatNonConfBounds(err: ImplicitErrorSpecifics.NonconformantBounds): List[String] = { + val params = bracket(err.tparams.map(_.defString)) + val tpes = bracket(err.targs map showType) + List("nonconformant bounds;", tpes.red, params.green) + } + + def formatNestedImplicit(err: ImplicitError): (String, List[String], Int) = { + val candidate = ImplicitError.cleanCandidate(err) + val problem = s"${candidate.red} invalid because" + val reason = err.specifics match { + case e: ImplicitErrorSpecifics.NotFound => + implicitMessage(e.param, NoImplicitFoundAnnotation(err.candidate, e.param).map(_._2)) + case e: ImplicitErrorSpecifics.NonconformantBounds => + formatNonConfBounds(e) + } + (problem, reason, err.nesting) + } + + def hideImpError(error: ImplicitError): Boolean = + error.specifics match { + case ImplicitErrorSpecifics.NonconformantBounds(_, _, _) => true + case ImplicitErrorSpecifics.NotFound(_) => false + } + + def indentTree(tree: List[(String, List[String], Int)], baseIndent: Int): List[String] = { + val nestings = tree.map(_._3).distinct.sorted + tree + .flatMap { + case (head, tail, nesting) => + val ind = baseIndent + nestings.indexOf(nesting).abs + indentLine(head, ind, "――") :: indent(tail, ind) + } + } + + def formatIndentTree(chain: List[ImplicitError], baseIndent: Int) = { + val formatted = chain map formatNestedImplicit + indentTree(formatted, baseIndent) + } + + def deepestLevel(chain: List[ImplicitError]) = { + chain.foldLeft(0)((z, a) => if (a.nesting > z) a.nesting else z) + } + + def formatImplicitChainTreeCompact(chain: List[ImplicitError]): Option[List[String]] = { + chain + .headOption + .map { head => + val max = deepestLevel(chain) + val leaves = chain.drop(1).dropWhile(_.nesting < max) + val base = if (head.nesting == 0) 0 else 1 + val (fhh, fht, fhn) = formatNestedImplicit(head) + val spacer = if (leaves.nonEmpty && leaves.length < chain.length) List("⋮".blue) else Nil + val fh = (fhh, fht ++ spacer, fhn) + val ft = leaves map formatNestedImplicit + indentTree(fh :: ft, base) + } + } + + def formatImplicitChainTreeFull(chain: List[ImplicitError]): List[String] = { + val baseIndent = chain.headOption.map(_.nesting).getOrElse(0) + formatIndentTree(chain, baseIndent) + } + + def formatImplicitChainFlat(chain: List[ImplicitError]): List[String] = { + chain map formatNestedImplicit flatMap { case (h, t, _) => h :: t } + } + + def formatImplicitChain(chain: List[ImplicitError]): List[String] = { + val compact = if (settings.implicitsSettingVerboseTree) None else formatImplicitChainTreeCompact(chain) + compact getOrElse formatImplicitChainTreeFull(chain) + } + + /** + * Remove duplicates and special cases that should not be shown. + * In some cases, candidates are reported twice, once as `Foo.f` and once as + * `f`. `ImplicitError.equals` checks the simple names for identity, which + * is suboptimal, but works for 99% of cases. + * Special cases are handled in [[hideImpError]] + */ + def formatNestedImplicits(errors: List[ImplicitError]) = { + val visible = errors filterNot hideImpError + val chains = splitChains(visible).map(_.distinct).distinct + chains map formatImplicitChain flatMap ("" :: _) drop 1 + } + + def formatImplicitParam(sym: Symbol) = sym.name.toString + + def effectiveImplicitType(tpe: Type) = { + if (tpe.typeSymbol.name.toString == "Lazy") + tpe.typeArgs.headOption.getOrElse(tpe) + else tpe + } + + def implicitMessage(param: Symbol, annotationMsg: Option[String]): List[String] = { + val tpe = param.tpe + val msg = annotationMsg match { + case Some(msg) => msg.split("\n").toList.map(_.blue) ++ List("") + case _ => Nil + } + val effTpe = effectiveImplicitType(tpe) + val paramName = formatImplicitParam(param) + val bang = "!" + val i = "I" + val head = s"${bang.red}${i.blue} ${paramName.yellow}:" + val lines = showTypeBreakL(effTpe) match { + case single :: Nil => List(s"$head ${single.green}") + case l => head :: indent(l).map(_.green) + } + lines ++ indent(msg) + } + + def splitChains(errors: List[ImplicitError]): List[List[ImplicitError]] = { + errors.foldRight(Nil: List[List[ImplicitError]]) { + case (a, chains @ ((chain @ (prev :: _)) :: tail)) => + if (a.nesting > prev.nesting) List(a) :: chains + else (a :: chain) :: tail + case (a, _) => + List(List(a)) + } + } + + def formatImplicitError(param: Symbol, errors: List[ImplicitError], annotationMsg: Option[String]) = { + val stack = formatNestedImplicits(errors) + val nl = if (errors.nonEmpty) "\n" else "" + val ex = stack.mkString("\n") + val pre = "implicit error;\n" + val msg = implicitMessage(param, annotationMsg).mkString("\n") + s"$pre$msg$nl$ex" + } + + def cacheStats = { + val sfl = showFormattedLCache.stats + val ft = formatTypeCache.stats + val df = formatDiffCache.stats + s"showFormatted -> $sfl, formatType -> $ft, formatDiff -> $df" + } +} diff --git a/src/compiler/scala/tools/reflect/ToolBox.scala b/src/compiler/scala/tools/reflect/ToolBox.scala index a8aaf53b9622..cccce85741c4 100644 --- a/src/compiler/scala/tools/reflect/ToolBox.scala +++ b/src/compiler/scala/tools/reflect/ToolBox.scala @@ -84,7 +84,6 @@ trait ToolBox[U <: scala.reflect.api.Universe] { * * If `silent` is false, `ToolBoxError` will be thrown in case of an inference error. * If `silent` is true, the typecheck is silent and will return `EmptyTree` if an error occurs. - * Such errors don't vanish and can be inspected by turning on -Xlog-implicits. * Unlike in `typecheck`, `silent` is true by default. */ def inferImplicitValue(pt: u.Type, silent: Boolean = true, withMacrosDisabled: Boolean = false, pos: u.Position = u.NoPosition): u.Tree @@ -98,7 +97,6 @@ trait ToolBox[U <: scala.reflect.api.Universe] { * * If `silent` is false, `ToolBoxError` will be thrown in case of an inference error. * If `silent` is true, the typecheck is silent and will return `EmptyTree` if an error occurs. - * Such errors don't vanish and can be inspected by turning on -Xlog-implicits. * Unlike in `typecheck`, `silent` is true by default. */ def inferImplicitView(tree: u.Tree, from: u.Type, to: u.Type, silent: Boolean = true, withMacrosDisabled: Boolean = false, pos: u.Position = u.NoPosition): u.Tree diff --git a/src/manual/scala/man1/scalac.scala b/src/manual/scala/man1/scalac.scala index b4a83e3cbf37..88788133debd 100644 --- a/src/manual/scala/man1/scalac.scala +++ b/src/manual/scala/man1/scalac.scala @@ -220,9 +220,6 @@ object scalac extends Command { Definition( CmdOption("Xlog-implicit-conversions"), "Print a message whenever an implicit conversion is inserted."), - Definition( - CmdOption("Xlog-implicits"), - "Show more detail on why some implicits are not applicable."), Definition( CmdOption("Xlog-reflective-calls"), "Print a message when a reflective method call is generated."), diff --git a/src/reflect/scala/reflect/macros/Typers.scala b/src/reflect/scala/reflect/macros/Typers.scala index 9a0904a1a47e..90f360901c64 100644 --- a/src/reflect/scala/reflect/macros/Typers.scala +++ b/src/reflect/scala/reflect/macros/Typers.scala @@ -91,7 +91,6 @@ trait Typers { * * If `silent` is false, `TypecheckException` will be thrown in case of an inference error. * If `silent` is true, the typecheck is silent and will return `EmptyTree` if an error occurs. - * Such errors don't vanish and can be inspected by turning on -Xlog-implicits. * Unlike in `typecheck`, `silent` is true by default. * * @throws scala.reflect.macros.TypecheckException @@ -103,7 +102,6 @@ trait Typers { * * If `silent` is false, `TypecheckException` will be thrown in case of an inference error. * If `silent` is true, the typecheck is silent and will return `EmptyTree` if an error occurs. - * Such errors don't vanish and can be inspected by turning on -Xlog-implicits. * Unlike in `typecheck`, `silent` is true by default. * * @throws scala.reflect.macros.TypecheckException diff --git a/test/files/neg/implicit-any2stringadd.scala b/test/files/neg/implicit-any2stringadd.scala index 80f1ab29bd85..32984ab85dbb 100644 --- a/test/files/neg/implicit-any2stringadd.scala +++ b/test/files/neg/implicit-any2stringadd.scala @@ -1,4 +1,4 @@ -// scalac: -Xsource:3 -Xlog-implicits +// scalac: -Xsource:3 // object Test { true + "what" diff --git a/test/files/neg/implicit-log.check b/test/files/neg/implicit-log.check index c0115c6291a9..541aa6251b25 100644 --- a/test/files/neg/implicit-log.check +++ b/test/files/neg/implicit-log.check @@ -1,13 +1,3 @@ -implicit-log.scala:61: byVal is not a valid implicit value for Int(7) => ?{def unwrap: ?} because: -incompatible: (x: 7): 7 does not match expected type Int(7) => ?{def unwrap: ?} - val res = 7.unwrap() // doesn't work - ^ -implicit-log.scala:70: materializing requested scala.reflect.type.ClassTag[String] using scala.reflect.`package`.materializeClassTag[String]() - val x: java.util.List[String] = List("foo") - ^ -implicit-log.scala:96: materializing requested reflect.runtime.universe.type.TypeTag[Class[_]] using scala.reflect.api.`package`.materializeTypeTag[Class[_]](scala.reflect.runtime.`package`.universe) - println(implicitly[TypeTag[Class[_]]]) - ^ implicit-log.scala:100: error: value baa is not a member of Int 1.baa ^ diff --git a/test/files/neg/implicit-log.scala b/test/files/neg/implicit-log.scala index adfe3acbf0e3..0e5d3f53ad77 100644 --- a/test/files/neg/implicit-log.scala +++ b/test/files/neg/implicit-log.scala @@ -1,4 +1,4 @@ -/* scalac: -Xlog-implicits -Xsource:3 -Xfatal-warnings */ +/* scalac: -Xsource:3 -Xfatal-warnings */ package foo diff --git a/test/files/neg/implicit-shadow.check b/test/files/neg/implicit-shadow.check index 423f7c56aa99..a36b502f43af 100644 --- a/test/files/neg/implicit-shadow.check +++ b/test/files/neg/implicit-shadow.check @@ -1,11 +1,4 @@ -implicit-shadow.scala:6: is not a valid implicit value for Int(1) => ?{def isEmpty: ?} because: -reference to i2s is ambiguous; -it is imported twice in the same scope by -import C._ -and import B._ - 1.isEmpty - ^ -implicit-shadow.scala:6: error: value isEmpty is not a member of Int +implicit-shadow.scala:4: error: value isEmpty is not a member of Int 1.isEmpty ^ 1 error diff --git a/test/files/neg/implicit-shadow.scala b/test/files/neg/implicit-shadow.scala index 7fea7d5d32a0..ec7f70b6d01e 100644 --- a/test/files/neg/implicit-shadow.scala +++ b/test/files/neg/implicit-shadow.scala @@ -1,5 +1,3 @@ -// scalac: -Xlog-implicits -// object Test { import B._, C._ diff --git a/test/files/neg/t6323a.check b/test/files/neg/t6323a.check index 83966449e7aa..d8622cd22e1c 100644 --- a/test/files/neg/t6323a.check +++ b/test/files/neg/t6323a.check @@ -1,15 +1,7 @@ -t6323a.scala:12: materializing requested scala.reflect.type.ClassTag[Test] using scala.reflect.`package`.materializeClassTag[Test]() - val lookAtMe = m.reflect(Test("a",List(5))) - ^ -t6323a.scala:13: materializing requested reflect.runtime.universe.type.TypeTag[Test] using scala.reflect.api.`package`.materializeTypeTag[Test](scala.reflect.runtime.`package`.universe) - val value = u.typeOf[Test] - ^ -t6323a.scala:13: scala.reflect.api.`package`.materializeTypeTag[Test](scala.reflect.runtime.`package`.universe) is not a valid implicit value for reflect.runtime.universe.TypeTag[Test] because: -failed to typecheck the materialized tag: -cannot create a TypeTag referring to class Test.Test local to the reifee: use WeakTypeTag instead - val value = u.typeOf[Test] - ^ -t6323a.scala:13: error: No TypeTag available for Test +t6323a.scala:13: error: implicit error; +!I ttag: TypeTag[Test] + No TypeTag available for Test + val value = u.typeOf[Test] ^ 1 error diff --git a/test/files/neg/t6323a.scala b/test/files/neg/t6323a.scala index 34305c69028b..30f5bac00ede 100644 --- a/test/files/neg/t6323a.scala +++ b/test/files/neg/t6323a.scala @@ -1,4 +1,4 @@ -// scalac: -Xlog-implicits +// scalac: -Vimplicits no-color // import scala.reflect.runtime.universe._ import scala.reflect.runtime.{currentMirror => m} diff --git a/test/files/run/splain-tree.check b/test/files/run/splain-tree.check new file mode 100644 index 000000000000..08f373071066 --- /dev/null +++ b/test/files/run/splain-tree.check @@ -0,0 +1,47 @@ +newSource1.scala:28: error: implicit error; +!I e: I1 +i1a invalid because +!I p: I2 +――i2 invalid because + !I p: I3 +――――i3a invalid because + !I p: I4 +――――――i4 invalid because + !I p: I5 +――――――――i5 invalid because + !I p: I6 +――――――――――i6a invalid because + !I p: I7 +――――――――――――i7 invalid because + !I p: I8 +――――――――――――――i8 invalid because + !I p: I9 + +――――――――――i6b invalid because + !I p: I8 +――――――――――――i8 invalid because + !I p: I9 + +――――i3b invalid because + !I p: I4 +――――――i4 invalid because + !I p: I5 +――――――――i5 invalid because + !I p: I6 +――――――――――i6a invalid because + !I p: I7 +――――――――――――i7 invalid because + !I p: I8 +――――――――――――――i8 invalid because + !I p: I9 + +i1b invalid because +!I p: I6 +――i6a invalid because + !I p: I7 +――――i7 invalid because + !I p: I8 +――――――i8 invalid because + !I p: I9 + implicitly[I1] + ^ diff --git a/test/files/run/splain-tree.scala b/test/files/run/splain-tree.scala new file mode 100644 index 000000000000..372eb8a17006 --- /dev/null +++ b/test/files/run/splain-tree.scala @@ -0,0 +1,50 @@ +import scala.tools.partest._ + +object Test +extends DirectTest +{ + override def extraSettings: String = "-usejavacp -Vimplicits:verbose-tree,no-color" + + def code: String = "" + + def verboseTree: String = """ +object tpes +{ + trait I1 + trait I2 + trait I3 + trait I4 + trait I5 + trait I6 + trait I7 + trait I8 + trait I9 +} +import tpes._ + +object Tree +{ + implicit def i8(implicit p: I9): I8 = ??? + implicit def i7(implicit p: I8): I7 = ??? + implicit def i6a(implicit p: I7): I6 = ??? + implicit def i6b(implicit p: I8): I6 = ??? + implicit def i5(implicit p: I6): I5 = ??? + implicit def i4(implicit p: I5): I4 = ??? + implicit def i3a(implicit p: I4): I3 = ??? + implicit def i3b(implicit p: I4): I3 = ??? + implicit def i2(implicit p: I3): I2 = ??? + implicit def i1a(implicit p: I2): I1 = ??? + implicit def i1b(implicit p: I6): I1 = ??? + implicitly[I1] +} + """ + + def show(): Unit = { + val global = newCompiler() + + def run(code: String): Unit = + compileString(global)(code.trim) + + run(verboseTree) + } +} diff --git a/test/files/run/splain-truncrefined.check b/test/files/run/splain-truncrefined.check new file mode 100644 index 000000000000..b940efbf3678 --- /dev/null +++ b/test/files/run/splain-truncrefined.check @@ -0,0 +1,4 @@ +newSource1.scala:7: error: type mismatch; + D|C {...} + f(new D { type X = C; type Y = D }) + ^ diff --git a/test/files/run/splain-truncrefined.scala b/test/files/run/splain-truncrefined.scala new file mode 100644 index 000000000000..da24f448bbe7 --- /dev/null +++ b/test/files/run/splain-truncrefined.scala @@ -0,0 +1,30 @@ +import scala.tools.partest._ + +object Test +extends DirectTest +{ + override def extraSettings: String = "-usejavacp -Vimplicits:no-color -Vtype-diffs -Vimplicits-max-refined 5" + + def code: String = "" + + def truncrefined: String = """ +object TruncRefined +{ + class C + trait D + type CAux[A] = C { type X = C; type Y = D } + def f(arg1: CAux[D]) = ??? + f(new D { type X = C; type Y = D }) +} + + """ + + def show(): Unit = { + val global = newCompiler() + + def run(code: String): Unit = + compileString(global)(code.trim) + + run(truncrefined) + } +} diff --git a/test/files/run/splain.check b/test/files/run/splain.check new file mode 100644 index 000000000000..1e534c40a026 --- /dev/null +++ b/test/files/run/splain.check @@ -0,0 +1,115 @@ +newSource1.scala:13: error: implicit error; +!I e: II +ImplicitChain.g invalid because +!I impPar3: I1 +⋮ +――ImplicitChain.i1 invalid because + !I impPar7: I3 + implicitly[II] + ^ +newSource1.scala:6: error: type mismatch; + L|R + f(new L) + ^ +newSource1.scala:7: error: implicit error; +!I e: F[Arg] + + implicitly[F[Arg]] + ^ +newSource1.scala:4: error: implicit error; +!I ec: ExecutionContext + Cannot find an implicit ExecutionContext. You might add + an (implicit ec: ExecutionContext) parameter to your method. + + The ExecutionContext is used to configure how and on which + thread pools asynchronous tasks (such as Futures) will run, + so the specific ExecutionContext that is selected is important. + + If your application does not define an ExecutionContext elsewhere, + consider using Scala's global ExecutionContext by defining + the following: + + implicit val ec: scala.concurrent.ExecutionContext = scala.concurrent.ExecutionContext.global + + long + ^ +newSource1.scala:10: error: implicit error; +!I e: String +f invalid because +!I impPar4: + List[ + ( + VeryLongTypeName :::: + VeryLongTypeName :::: + VeryLongTypeName :::: + VeryLongTypeName + ) + :::: + (Short :::: Short) :::: + ( + VeryLongTypeName :::: + VeryLongTypeName :::: + VeryLongTypeName :::: + VeryLongTypeName + ) + :::: + VeryLongTypeName :::: + VeryLongTypeName :::: + VeryLongTypeName :::: + VeryLongTypeName + ] + (No implicit view available from Int => InfixBreak.T2.) + + implicitly[String] + ^ +newSource1.scala:11: error: implicit error; +!I e: C1[T3[T1[List[String], ?], T2[Id, C4, ?], ?]] + implicitly[C1[T3]] + ^ +newSource1.scala:9: error: implicit error; +!I e: F.Aux[C, D] +Aux.f invalid because +!I impPar10: C + implicitly[F.Aux[C, D]] + ^ +newSource1.scala:11: error: type mismatch; + A with B with E|C with F| {type X = Int|String; type Y = String; type Z = |String} + f(x) + ^ +newSource1.scala:25: error: type mismatch; + C.X.Y.T|B.X.Y.T + f(x: C.X.Y.T) + ^ +newSource1.scala:6: error: type mismatch; + Int|(=> A) => B + f(1: Int) + ^ +newSource1.scala:3: error: type mismatch; + String|Tuple1[String] + val a: Tuple1[String] = "Tuple1": String + ^ +newSource1.scala:7: error: implicit error; +!I e: a.type *** b.type + implicitly[a.type *** b.type] + ^ +newSource1.scala:8: error: implicit error; +!I e: a.type *** b.type + implicitly[a.type *** b.type] + ^ +newSource1.scala:6: error: implicit error; +!I e: a.type *** b.type + implicitly[a.type *** b.type] + ^ +newSource1.scala:5: error: implicit error; +!I ev: Ordering[Object] + No implicit Ordering[Object] found to build a SortedSet[Object]. You may want to upcast to a Set[Int] first by calling `unsorted`. + +Ordering.ordered invalid because +!I asComparable: Object => Comparable[_$2] + No implicit view available from Object => Comparable[_ >: Object]. + +⋮ +Ordering.comparatorToOrdering invalid because +!I cmp: Comparator[Object] + ms.map(_ => o) + ^ diff --git a/test/files/run/splain.scala b/test/files/run/splain.scala new file mode 100644 index 000000000000..62a829996302 --- /dev/null +++ b/test/files/run/splain.scala @@ -0,0 +1,225 @@ +import scala.tools.partest._ + +object Test +extends DirectTest +{ + override def extraSettings: String = "-usejavacp -Vimplicits no-color -Vtype-diffs" + + def code: String = "" + + def chain: String = """ +object ImplicitChain +{ + trait I1 + trait I2 + trait I3 + trait I4 + trait II + implicit def i1(implicit impPar7: I3): I1 = ??? + implicit def i2a(implicit impPar8: I3): I2 = ??? + implicit def i2b(implicit impPar8: I3): I2 = ??? + implicit def i4(implicit impPar9: I2): I4 = ??? + implicit def g(implicit impPar3: I1, impPar1: I4): II = ??? + implicitly[II] +} + """ + + def foundReq: String = """ +object FoundReq +{ + class L + type R + def f(r: R): Int = ??? + f(new L) +} + """ + + def bounds: String = """ +object Bounds +{ + trait Base + trait Arg + trait F[A] + implicit def g[A <: Base, B]: F[A] = ??? + implicitly[F[Arg]] +} + """ + + def longAnnotationMessage: String = """ +object Long +{ + def long(implicit ec: concurrent.ExecutionContext): Unit = ??? + long +} + """ + + def longInfix: String = """ +object InfixBreak +{ + type ::::[A, B] + trait VeryLongTypeName + trait Short + type T1 = VeryLongTypeName :::: VeryLongTypeName :::: VeryLongTypeName :::: + VeryLongTypeName + type T2 = T1 :::: (Short :::: Short) :::: T1 :::: T1 + implicit def f(implicit impPar4: List[T2]): String = ??? + implicitly[String] +} + """ + + def deeplyNestedHole: String = """ +object DeepHole +{ + trait C1[F[_]] + trait C2[F[_], G[_], A] + trait C3[A, B] + trait C4[A] + type Id[A] = A + type T1[X] = C3[List[String], X] + type T2[Y] = C2[Id, C4, Y] + type T3[Z] = C2[T1, T2, Z] + implicitly[C1[T3]] +} + """ + + def auxType: String = """ +object Aux +{ + trait C + trait D + trait F + object F { type Aux[A, B] = F { type X = A; type Y = B } } + implicit def f[A, B](implicit impPar10: C): F { type X = A; type Y = B } = + ??? + implicitly[F.Aux[C, D]] +} + """ + + def refined: String = """ +object Refined +{ + trait A + trait B + trait C + trait D + trait E + trait F + def f(a: A with B with C { type Y = String; type X = String; type Z = String }): Unit = ??? + val x: B with E with A with F { type X = Int; type Y = String } = ??? + f(x) +} + """ + + def disambiguateQualified: String = """ +object A +{ + object B + { + object X + { + object Y + { + type T + } + } + } + object C + { + object X + { + object Y + { + type T + } + } + } + def f(a: B.X.Y.T): Unit = () + val x: C.X.Y.T = ??? + f(x: C.X.Y.T) +} + """ + + def bynameParam: String = """ +object Foo +{ + type A + type B + def f(g: (=> A) => B): Unit = () + f(1: Int) +} + """ + + def tuple1: String = """ +object Tup1 +{ + val a: Tuple1[String] = "Tuple1": String +} + """ + + def singleType: String = """ +object SingleImp +{ + class ***[A, B] + val a = 1 + val b = 2 + + implicitly[a.type *** b.type] +} + """ + + def singleTypeInFunction: String = """ +object SingleImp +{ + class ***[A, B] + def fn(): Unit = { + val a = 1 + val b = 2 + + implicitly[a.type *** b.type] + } +} + """ + + def singleTypeWithFreeSymbol: String = """ +object SingleImp +{ + class ***[A, B] + def fn[A, B](a: A, b: B) = { + + implicitly[a.type *** b.type] + } +} + """ + + def parameterAnnotation: String = """ + import collection.{mutable => m, immutable => i} + object Test { + val o = new Object + val ms = m.SortedSet(1,2,3) + ms.map(_ => o) + } + """ + + def show(): Unit = { + val global = newCompiler() + + def run(code: String): Unit = + compileString(global)(code.trim) + + run(chain) + run(foundReq) + run(bounds) + run(longAnnotationMessage) + run(longInfix) + run(deeplyNestedHole) + run(auxType) + run(refined) + run(disambiguateQualified) + run(bynameParam) + run(tuple1) + run(singleType) + run(singleTypeInFunction) + run(singleTypeWithFreeSymbol) + run(parameterAnnotation) + } +} From 461f8f16253b965ad4cc77521027fbb859c63a09 Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Fri, 16 Apr 2021 12:35:15 -0700 Subject: [PATCH 085/769] Do not add self-invoke evidence in parser The RHS of an auxiliary constructor should have implicit args supplied by typer as usual, not using current args in parser, which could only work if all alternatives of the overloaded constructor have the same implicit parameters. --- .../scala/tools/nsc/ast/parser/Parsers.scala | 3 +-- test/files/neg/t12233.check | 7 +++++++ test/files/neg/t12233.scala | 20 +++++++++++++++++++ test/files/pos/t12233.scala | 12 +++++++++++ 4 files changed, 40 insertions(+), 2 deletions(-) create mode 100644 test/files/neg/t12233.check create mode 100644 test/files/neg/t12233.scala create mode 100644 test/files/pos/t12233.scala diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala index e84248e4663b..09ab668d2fdc 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala @@ -2863,8 +2863,7 @@ self => t = Apply(t, argumentExprs()) newLineOptWhenFollowedBy(LBRACE) } - if (classContextBounds.isEmpty) t - else Apply(t, vparamss.last.map(vp => Ident(vp.name))) + t } /** {{{ diff --git a/test/files/neg/t12233.check b/test/files/neg/t12233.check new file mode 100644 index 000000000000..ffa267af2701 --- /dev/null +++ b/test/files/neg/t12233.check @@ -0,0 +1,7 @@ +t12233.scala:4: error: ambiguous implicit values: + both value hehe of type TypeClass[T] + and value evidence$2 of type TypeClass[T] + match expected type TypeClass[T] + def this(i: Int)(implicit hehe: TypeClass[T], j: Int) = this(i, j) + ^ +1 error diff --git a/test/files/neg/t12233.scala b/test/files/neg/t12233.scala new file mode 100644 index 000000000000..b2ad76732461 --- /dev/null +++ b/test/files/neg/t12233.scala @@ -0,0 +1,20 @@ + +trait TypeClass[T] +class Hehe[T: TypeClass](i: Int, j: Int) { + def this(i: Int)(implicit hehe: TypeClass[T], j: Int) = this(i, j) +} + +/* was +t12233.scala:4: error: too many arguments (found 3, expected 1) for constructor Hehe: (implicit evidence$1: TypeClass[T]): Hehe[T] + def this(i: Int)(implicit hehe: TypeClass[T], j: Int) = this(i, j) + ^ +1 error + * now +t12233.scala:4: error: ambiguous implicit values: + both value hehe of type TypeClass[T] + and value evidence$2 of type TypeClass[T] + match expected type TypeClass[T] + def this(i: Int)(implicit hehe: TypeClass[T], j: Int) = this(i, j) + ^ +1 error + */ diff --git a/test/files/pos/t12233.scala b/test/files/pos/t12233.scala new file mode 100644 index 000000000000..481b5258d2d5 --- /dev/null +++ b/test/files/pos/t12233.scala @@ -0,0 +1,12 @@ + +trait TypeClass[T] +class Hehe[T: TypeClass](i: Int, j: Int) { + def this(i: Int)(implicit j: Int) = this(i, j) +} + +/* was +test/files/pos/t12233.scala:4: error: too many arguments (found 2, expected 1) for constructor Hehe: (implicit evidence$1: TypeClass[T]): Hehe[T] + def this(i: Int)(implicit j: Int) = this(i, j) + ^ +1 error + */ From be2070e3180ad0aa6cf9fa462ed2d68f3d1be011 Mon Sep 17 00:00:00 2001 From: Dale Wijnand Date: Thu, 1 Apr 2021 10:31:55 +0100 Subject: [PATCH 086/769] Clean up splain --- .../tools/nsc/settings/ScalaSettings.scala | 45 +- .../nsc/typechecker/AnalyzerPlugins.scala | 8 +- .../tools/nsc/typechecker/ContextErrors.scala | 31 +- .../tools/nsc/typechecker/Implicits.scala | 9 +- .../nsc/typechecker/TypeDiagnostics.scala | 10 +- .../tools/nsc/typechecker/splain/Colors.scala | 47 - .../nsc/typechecker/splain/SplainData.scala | 73 +- .../splain/SplainDiagnostics.scala | 15 +- .../nsc/typechecker/splain/SplainErrors.scala | 34 +- .../typechecker/splain/SplainFormatData.scala | 196 ++--- .../typechecker/splain/SplainFormatting.scala | 825 ++++++------------ .../scala/tools/reflect/ToolBox.scala | 2 + .../reflect/internal/TypeDebugging.scala | 49 +- src/reflect/scala/reflect/macros/Typers.scala | 2 + test/files/neg/implicit-any2stringadd.scala | 2 +- test/files/neg/implicit-log.scala | 2 +- test/files/neg/implicit-shadow.check | 2 +- test/files/neg/implicit-shadow.scala | 2 + test/files/neg/t6323a.scala | 2 +- test/files/run/splain-tree.scala | 6 +- test/files/run/splain-truncrefined.scala | 6 +- test/files/run/splain.check | 1 - test/files/run/splain.scala | 2 +- 23 files changed, 453 insertions(+), 918 deletions(-) delete mode 100644 src/compiler/scala/tools/nsc/typechecker/splain/Colors.scala diff --git a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala index eaf19d98539c..c3b224d888c0 100644 --- a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala +++ b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala @@ -501,6 +501,10 @@ trait ScalaSettings extends StandardScalaSettings with Warnings { _: MutableSett .withAbbreviation("-Yhot-statistics") val Yshowsyms = BooleanSetting("-Vsymbols", "Print the AST symbol hierarchy after each phase.") withAbbreviation "-Yshow-syms" val Ytyperdebug = BooleanSetting("-Vtyper", "Trace type assignments.") withAbbreviation "-Ytyper-debug" + val Vimplicits = BooleanSetting("-Vimplicits", "Print dependent missing implicits.").withAbbreviation("-Xlog-implicits") + val VimplicitsVerboseTree = BooleanSetting("-Vimplicits-verbose-tree", "Display all intermediate implicits in a chain.") + val VimplicitsMaxRefined = IntSetting("-Vimplicits-max-refined", "max chars for printing refined types, abbreviate to `F {...}`", Int.MaxValue, Some((0, Int.MaxValue)), _ => None) + val VtypeDiffs = BooleanSetting("-Vtype-diffs", "Print found/required error messages as colored diffs.") val logImplicitConv = BooleanSetting("-Vimplicit-conversions", "Print a message whenever an implicit conversion is inserted.") .withAbbreviation("-Xlog-implicit-conversions") val logReflectiveCalls = BooleanSetting("-Vreflective-calls", "Print a message when a reflective method call is generated") @@ -567,45 +571,4 @@ trait ScalaSettings extends StandardScalaSettings with Warnings { _: MutableSett */ None } - - object VimplicitsChoices extends MultiChoiceEnumeration { - val enable = Choice("enable", "print dependent missing implicits") - val disable = Choice("disable", "disable printing dependent missing implicits") - val noColor = Choice("no-color", "don't colorize type errors formatted by splain") - val verboseTree = Choice("verbose-tree", "display all intermediate implicits in a chain") - } - - val Vimplicits: MultiChoiceSetting[VimplicitsChoices.type] = - MultiChoiceSetting( - name = "-Vimplicits", - helpArg = "feature", - descr = "Print dependent missing implicits and colored found/required type diffs. See https://docs.scala-lang.org/overviews/compiler-options/errors.html", - domain = VimplicitsChoices, - default = Some("enable" :: Nil), - ).withPostSetHook(_ => enableVexplainImplicitsImplicitly()) - - def enableVexplainImplicitsImplicitly(): Unit = - if (!Vimplicits.contains(VimplicitsChoices.disable) && !Vimplicits.contains(VimplicitsChoices.enable)) - Vimplicits.enable(VimplicitsChoices.enable) - - val VimplicitsMaxRefined: IntSetting = - IntSetting( - "-Vimplicits-max-refined", - "max chars for printing refined types, abbreviate to `F {...}`", - 0, - Some((0, Int.MaxValue)), - str => Some(str.toInt), - ).withPostSetHook(_ => enableVexplainImplicitsImplicitly()) - - def implicitsSettingEnable: Boolean = - Vimplicits.contains(VimplicitsChoices.enable) && - !Vimplicits.contains(VimplicitsChoices.disable) - def implicitsSettingNoColor: Boolean = Vimplicits.contains(VimplicitsChoices.noColor) - def implicitsSettingVerboseTree: Boolean = Vimplicits.contains(VimplicitsChoices.verboseTree) - - val VtypeDiffs: BooleanSetting = - BooleanSetting("-Vtype-diffs", "Print found/required error messages as colored diffs.") - - def typeDiffsSettingEnable: Boolean = - VtypeDiffs.value } diff --git a/src/compiler/scala/tools/nsc/typechecker/AnalyzerPlugins.scala b/src/compiler/scala/tools/nsc/typechecker/AnalyzerPlugins.scala index b99ba49a989d..2557867ea966 100644 --- a/src/compiler/scala/tools/nsc/typechecker/AnalyzerPlugins.scala +++ b/src/compiler/scala/tools/nsc/typechecker/AnalyzerPlugins.scala @@ -187,7 +187,7 @@ trait AnalyzerPlugins { self: Analyzer with splain.SplainData => * @param errors The chain of intermediate implicits that lead to this error * @param previous The error message constructed by the previous analyzer plugin, or the builtin default */ - def noImplicitFoundError(param: Symbol, errors: List[ImplicitError], previous: Option[String]): Option[String] = + def noImplicitFoundError(param: Symbol, errors: List[ImplicitError], previous: String): String = previous } @@ -401,9 +401,9 @@ trait AnalyzerPlugins { self: Analyzer with splain.SplainData => }) /** @see AnalyzerPlugin.noImplicitFoundError */ - def pluginsNoImplicitFoundError(param: Symbol, errors: List[ImplicitError], initial: String): Option[String] = - invoke(new CumulativeOp[Option[String]] { - def default = Some(initial) + def pluginsNoImplicitFoundError(param: Symbol, errors: List[ImplicitError], initial: String): String = + invoke(new CumulativeOp[String] { + def default = initial def accumulate = (previous, p) => p.noImplicitFoundError(param, errors, previous) }) diff --git a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala index 18a3c8179fbf..cb5e3889b190 100644 --- a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala +++ b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala @@ -25,9 +25,7 @@ import scala.tools.nsc.util.stackTraceString import scala.reflect.io.NoAbstractFile import scala.reflect.internal.util.NoSourceFile -trait ContextErrors -extends splain.SplainErrors -{ +trait ContextErrors extends splain.SplainErrors { self: Analyzer => import global._ @@ -110,7 +108,7 @@ extends splain.SplainErrors def issueTypeError(err: AbsTypeError)(implicit context: Context): Unit = { context.issue(err) } def typeErrorMsg(context: Context, found: Type, req: Type) = - if (context.openImplicits.nonEmpty) + if (context.openImplicits.nonEmpty && !settings.Vimplicits) // OPT: avoid error string creation for errors that won't see the light of day, but predicate // this on -Xsource:2.13 for bug compatibility with https://github.com/scala/scala/pull/7147#issuecomment-418233611 "type mismatch" @@ -154,24 +152,25 @@ extends splain.SplainErrors def MacroCantExpandIncompatibleMacrosError(internalMessage: String) = MacroIncompatibleEngineError("macro cannot be expanded, because it was compiled by an incompatible macro engine", internalMessage) - def NoImplicitFoundAnnotation(tree: Tree, param: Symbol): Option[(Boolean, String)] = { + /** The implicit not found message from the annotation, and whether it's a supplement message or not. */ + def NoImplicitFoundAnnotation(tree: Tree, param: Symbol): (Boolean, String) = { param match { - case ImplicitNotFoundMsg(msg) => Some((false, msg.formatParameterMessage(tree))) + case ImplicitNotFoundMsg(msg) => (false, msg.formatParameterMessage(tree)) case _ => val paramTp = param.tpe paramTp.typeSymbolDirect match { - case ImplicitNotFoundMsg(msg) => Some((false, msg.formatDefSiteMessage(paramTp))) + case ImplicitNotFoundMsg(msg) => (false, msg.formatDefSiteMessage(paramTp)) case _ => val supplement = param.baseClasses.collectFirst { case ImplicitNotFoundMsg(msg) => s" (${msg.formatDefSiteMessage(paramTp)})" - } - supplement.map((true, _)) + }.getOrElse("") + true -> supplement } } } def NoImplicitFoundError(tree: Tree, param: Symbol)(implicit context: Context): Unit = { - val annotationMsg: Option[(Boolean, String)] = NoImplicitFoundAnnotation(tree, param) + val (isSupplement, annotationMsg) = NoImplicitFoundAnnotation(tree, param) def defaultErrMsg = { val paramName = param.name val paramTp = param.tpe @@ -180,15 +179,11 @@ extends splain.SplainErrors "evidence parameter of type" else s"parameter $paramName:" - annotationMsg match { - case Some((false, msg)) => msg - case msg => - val supplement = msg.fold("")(_._2) - s"could not find implicit value for $evOrParam $paramTp$supplement" - } + if (isSupplement) s"could not find implicit value for $evOrParam $paramTp$annotationMsg" + else annotationMsg } - val errMsg = splainPushOrReportNotFound(tree, param, annotationMsg.map(_._2)) - issueNormalTypeError(tree, errMsg.getOrElse(defaultErrMsg)) + val errMsg = splainPushOrReportNotFound(tree, param, annotationMsg) + issueNormalTypeError(tree, if (errMsg.isEmpty) defaultErrMsg else errMsg) } trait TyperContextErrors { diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala index e573a4d74c0e..bb233527d6f6 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala @@ -445,6 +445,8 @@ trait Implicits extends splain.SplainData { def pos = if (pos0 != NoPosition) pos0 else tree.pos @inline final def failure(what: Any, reason: => String, pos: Position = this.pos): SearchResult = { + if (settings.debug) + reporter.echo(pos, s"$what is not a valid implicit value for $pt because:\n$reason") SearchFailure } /** Is implicit info `info1` better than implicit info `info2`? @@ -906,8 +908,9 @@ trait Implicits extends splain.SplainData { // bounds check on the expandee tree itree3.attachments.get[MacroExpansionAttachment] match { case Some(MacroExpansionAttachment(exp @ TypeApply(fun, targs), _)) => - val withinBounds = checkBounds(exp, NoPrefix, NoSymbol, fun.symbol.typeParams, targs.map(_.tpe), "inferred ") - if (!withinBounds) splainPushNonconformantBonds(pt, tree, targs.map(_.tpe), undetParams, None) + val targTpes = mapList(targs)(_.tpe) + val withinBounds = checkBounds(exp, NoPrefix, NoSymbol, fun.symbol.typeParams, targTpes, "inferred ") + if (!withinBounds) splainPushNonconformantBonds(pt, tree, targTpes, undetParams, None) case _ => () } @@ -1501,8 +1504,10 @@ trait Implicits extends splain.SplainData { ) // todo. migrate hardcoded materialization in Implicits to corresponding implicit macros val materializer = atPos(pos.focus)(gen.mkMethodCall(TagMaterializers(tagClass), List(tp), if (prefix != EmptyTree) List(prefix) else List())) + if (settings.debug) reporter.echo(pos, "materializing requested %s.%s[%s] using %s".format(pre, tagClass.name, tp, materializer)) if (context.macrosEnabled) success(materializer) // don't call `failure` here. if macros are disabled, we just fail silently + // otherwise -Vimplicits/-Vdebug will spam the long with zillions of "macros are disabled" // this is ugly but temporary, since all this code will be removed once I fix implicit macros else SearchFailure } diff --git a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala index a71539ee277e..4a0f049e585b 100644 --- a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala +++ b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala @@ -40,9 +40,7 @@ import scala.tools.nsc.Reporting.WarningCategory * * @author Paul Phillips */ -trait TypeDiagnostics -extends splain.SplainDiagnostics -{ +trait TypeDiagnostics extends splain.SplainDiagnostics { self: Analyzer with StdAttachments => import global._ @@ -342,8 +340,10 @@ extends splain.SplainDiagnostics } } - def foundReqMsg(found: Type, req: Type): String = - splainFoundReqMsg(found, req).getOrElse(builtinFoundReqMsg(found, req)) + def foundReqMsg(found: Type, req: Type): String = { + val errMsg = splainFoundReqMsg(found, req) + if (errMsg.isEmpty) builtinFoundReqMsg(found, req) else errMsg + } def typePatternAdvice(sym: Symbol, ptSym: Symbol) = { val clazz = if (sym.isModuleClass) sym.companionClass else sym diff --git a/src/compiler/scala/tools/nsc/typechecker/splain/Colors.scala b/src/compiler/scala/tools/nsc/typechecker/splain/Colors.scala deleted file mode 100644 index 67bea85500db..000000000000 --- a/src/compiler/scala/tools/nsc/typechecker/splain/Colors.scala +++ /dev/null @@ -1,47 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.tools.nsc -package typechecker -package splain - -trait StringColor -{ - def color(s: String, col: String): String -} - -object StringColors -{ - implicit val noColor = - new StringColor { - def color(s: String, col: String) = s - } - - implicit val color = - new StringColor { - import Console.RESET - - def color(s: String, col: String) = col + s + RESET - } -} - -object StringColor -{ - implicit class StringColorOps(s: String)(implicit sc: StringColor) - { - import Console._ - def red = sc.color(s, RED) - def green = sc.color(s, GREEN) - def yellow = sc.color(s, YELLOW) - def blue = sc.color(s, BLUE) - } -} diff --git a/src/compiler/scala/tools/nsc/typechecker/splain/SplainData.scala b/src/compiler/scala/tools/nsc/typechecker/splain/SplainData.scala index c86481559d81..7c438a2d202d 100644 --- a/src/compiler/scala/tools/nsc/typechecker/splain/SplainData.scala +++ b/src/compiler/scala/tools/nsc/typechecker/splain/SplainData.scala @@ -16,86 +16,69 @@ package splain import scala.util.matching.Regex -trait SplainData { self: Analyzer => +trait SplainData { + self: Analyzer => import global._ sealed trait ImplicitErrorSpecifics - object ImplicitErrorSpecifics - { - case class NotFound(param: Symbol) - extends ImplicitErrorSpecifics + object ImplicitErrorSpecifics { + case class NotFound(param: Symbol) extends ImplicitErrorSpecifics - case class NonconformantBounds(targs: List[Type], tparams: List[Symbol], originalError: Option[AbsTypeError]) - extends ImplicitErrorSpecifics + case class NonconformantBounds( + targs: List[Type], tparams: List[Symbol], originalError: Option[AbsTypeError], + ) extends ImplicitErrorSpecifics } - object ImplicitErrors - { - var stack: List[Type] = Nil - + object ImplicitErrors { + var stack: List[Type] = Nil var errors: List[ImplicitError] = Nil - def push(error: ImplicitError): Unit = errors = error :: errors - - def nesting: Int = stack.length - 1 - - def nested: Boolean = stack.nonEmpty - + def push(error: ImplicitError): Unit = errors ::= error + def nesting: Int = stack.length - 1 + def nested: Boolean = stack.nonEmpty def removeErrorsFor(tpe: Type): Unit = errors = errors.dropWhile(_.tpe == tpe) def startSearch(expectedType: Type): Unit = { - if (settings.implicitsSettingEnable) { + if (settings.Vimplicits) { if (!nested) errors = List() stack = expectedType :: stack } } def finishSearch(success: Boolean, expectedType: Type): Unit = { - if (settings.implicitsSettingEnable) { + if (settings.Vimplicits) { if (success) removeErrorsFor(expectedType) stack = stack.drop(1) } } } - case class ImplicitError(tpe: Type, candidate: Tree, nesting: Int, specifics: ImplicitErrorSpecifics) - { + case class ImplicitError(tpe: Type, candidate: Tree, nesting: Int, specifics: ImplicitErrorSpecifics) { + import ImplicitError._ + override def equals(other: Any) = other match { - case o: ImplicitError => - o.tpe.toString == tpe.toString && ImplicitError.candidateName(this) == ImplicitError.candidateName(o) - case _ => false + case o: ImplicitError => o.tpe.toString == tpe.toString && candidateName(this) == candidateName(o) + case _ => false } - override def hashCode = (tpe.toString.hashCode, ImplicitError.candidateName(this).hashCode).hashCode - - override def toString: String = - s"NotFound(${ImplicitError.shortName(tpe.toString)}, ${ImplicitError.shortName(candidate.toString)}), $nesting, $specifics)" + override def hashCode = (tpe.toString.##, ImplicitError.candidateName(this).##).## + override def toString = s"ImplicitError(${shortName(tpe.toString)}, ${shortName(candidate.toString)}), $nesting, $specifics)" } - object ImplicitError - { - def notFound(tpe: Type, candidate: Tree, nesting: Int)(param: Symbol): ImplicitError = - ImplicitError(tpe, candidate, nesting, ImplicitErrorSpecifics.NotFound(param)) - - def nonconformantBounds - (tpe: Type, candidate: Tree, nesting: Int) - (targs: List[Type], tparams: List[Symbol], originalError: Option[AbsTypeError]) - : ImplicitError = - ImplicitError(tpe, candidate, nesting, ImplicitErrorSpecifics.NonconformantBounds(targs, tparams, originalError)) - + object ImplicitError { def unapplyCandidate(e: ImplicitError): Tree = e.candidate match { - case TypeApply(name, _) => name - case a => a + case TypeApply(fun, _) => fun + case a => a } def candidateName(e: ImplicitError): String = unapplyCandidate(e) match { case Select(_, name) => name.toString - case Ident(name) => name.toString - case a => a.toString + case Ident(name) => name.toString + case a => a.toString } val candidateRegex: Regex = """.*\.this\.(.*)""".r @@ -103,9 +86,9 @@ trait SplainData { self: Analyzer => def cleanCandidate(e: ImplicitError): String = unapplyCandidate(e).toString match { case candidateRegex(suf) => suf - case a => a + case a => a } - def shortName(ident: String): String = ident.split('.').toList.lastOption.getOrElse(ident) + def shortName(ident: String): String = ident.substring(ident.lastIndexOf(".") + 1) } } diff --git a/src/compiler/scala/tools/nsc/typechecker/splain/SplainDiagnostics.scala b/src/compiler/scala/tools/nsc/typechecker/splain/SplainDiagnostics.scala index 20dcc0d4da24..ca0caa642286 100644 --- a/src/compiler/scala/tools/nsc/typechecker/splain/SplainDiagnostics.scala +++ b/src/compiler/scala/tools/nsc/typechecker/splain/SplainDiagnostics.scala @@ -14,14 +14,13 @@ package scala.tools.nsc package typechecker package splain -trait SplainDiagnostics -extends SplainFormatting -{ self: Analyzer with SplainData => +trait SplainDiagnostics extends splain.SplainFormatting { + self: Analyzer => + import global._ - def splainFoundReqMsg(found: Type, req: Type): Option[String] = - if (settings.typeDiffsSettingEnable) - Some(";\n" + showFormattedL(formatDiff(found, req, true), true).indent.joinLines) - else - None + def splainFoundReqMsg(found: Type, req: Type): String = { + if (settings.VtypeDiffs) ";\n" + showFormattedL(formatDiff(found, req, top = true), break = true).indent.joinLines + else "" + } } diff --git a/src/compiler/scala/tools/nsc/typechecker/splain/SplainErrors.scala b/src/compiler/scala/tools/nsc/typechecker/splain/SplainErrors.scala index e2ffeade29be..41a96c5403b3 100644 --- a/src/compiler/scala/tools/nsc/typechecker/splain/SplainErrors.scala +++ b/src/compiler/scala/tools/nsc/typechecker/splain/SplainErrors.scala @@ -18,19 +18,19 @@ trait SplainErrors { self: Analyzer with SplainFormatting => import global._ def splainPushNotFound(tree: Tree, param: Symbol): Unit = - ImplicitErrors.stack - .headOption - .map(ImplicitError.notFound(_, tree, ImplicitErrors.nesting)(param)) - .foreach(err => ImplicitErrors.push(err)) + ImplicitErrors.stack.headOption.foreach { pt => + val specifics = ImplicitErrorSpecifics.NotFound(param) + ImplicitErrors.push(ImplicitError(pt, tree, ImplicitErrors.nesting, specifics)) + } - def splainPushOrReportNotFound(tree: Tree, param: Symbol, annotationMsg: Option[String]): Option[String] = - if (settings.implicitsSettingEnable) + def splainPushOrReportNotFound(tree: Tree, param: Symbol, annotationMsg: String): String = + if (settings.Vimplicits) if (ImplicitErrors.nested) { splainPushNotFound(tree, param) - None + "" } else pluginsNoImplicitFoundError(param, ImplicitErrors.errors, formatImplicitError(param, ImplicitErrors.errors, annotationMsg)) - else None + else "" def splainPushNonconformantBonds( tpe: Type, @@ -39,9 +39,9 @@ trait SplainErrors { self: Analyzer with SplainFormatting => tparams: List[Symbol], originalError: Option[AbsTypeError], ): Unit = { - if (settings.implicitsSettingEnable) { - val err = ImplicitError.nonconformantBounds(tpe, candidate, ImplicitErrors.nesting)(targs, tparams, originalError) - ImplicitErrors.push(err) + if (settings.Vimplicits) { + val specifics = ImplicitErrorSpecifics.NonconformantBounds(targs, tparams, originalError) + ImplicitErrors.push(ImplicitError(tpe, candidate, ImplicitErrors.nesting, specifics)) } } @@ -49,15 +49,15 @@ trait SplainErrors { self: Analyzer with SplainFormatting => def pushImpFailure(fun: Tree, args: List[Tree]): Unit = { fun.tpe match { case PolyType(tparams, restpe) if tparams.nonEmpty && sameLength(tparams, args) => - val targs = mapList(args)(_.tpe) - splainPushNonconformantBonds(expectedType, implicitTree, targs, tparams, Some(originalError)) - case _ => () + splainPushNonconformantBonds(expectedType, implicitTree, mapList(args)(_.tpe), tparams, Some(originalError)) + case _ => } } - if (settings.implicitsSettingEnable) { - (implicitTree: @unchecked) match { - case TypeApply(fun, args) => pushImpFailure(fun, args) + if (settings.Vimplicits) { + implicitTree match { + case TypeApply(fun, args) => pushImpFailure(fun, args) case Apply(TypeApply(fun, args), _) => pushImpFailure(fun, args) + case _ => } } } diff --git a/src/compiler/scala/tools/nsc/typechecker/splain/SplainFormatData.scala b/src/compiler/scala/tools/nsc/typechecker/splain/SplainFormatData.scala index 46ba14800a06..0b473cdd57ad 100644 --- a/src/compiler/scala/tools/nsc/typechecker/splain/SplainFormatData.scala +++ b/src/compiler/scala/tools/nsc/typechecker/splain/SplainFormatData.scala @@ -14,157 +14,75 @@ package scala.tools.nsc package typechecker package splain -sealed trait Formatted -{ - def length: Int -} +import scala.annotation.tailrec object Formatted { - def comparator: Formatted => String = { - case Infix(left, _, _, _) => - comparator(left) - case Simple(tpe) => - tpe - case Qualified(Nil, tpe) => - tpe - case Qualified(path, tpe) => - s"${path.mkString}$tpe" - case UnitForm => - "()" - case Applied(cons, _) => - comparator(cons) - case TupleForm(Nil) => - "()" - case TupleForm(h :: _) => - comparator(h) - case FunctionForm(Nil, ret, _) => - comparator(ret) - case FunctionForm(h :: _, _, _) => - comparator(h) - case RefinedForm(Nil, _) => - "()" - case RefinedForm(h :: _, _) => - comparator(h) - case Diff(l, _) => - comparator(l) - case Decl(sym, _) => - comparator(sym) - case DeclDiff(sym, _, _) => - comparator(sym) - case ByName(tpe) => - comparator(tpe) + @tailrec def comparator(formatted: Formatted): String = formatted match { + case Infix(left, _, _, _) => comparator(left) + case Simple(tpe) => tpe + case Qualified(Nil, tpe) => tpe + case Qualified(path, tpe) => s"${path.mkString}$tpe" + case UnitForm => "()" + case Applied(cons, _) => comparator(cons) + case TupleForm(Nil) => "()" + case TupleForm(h :: _) => comparator(h) + case FunctionForm(Nil, ret, _) => comparator(ret) + case FunctionForm(h :: _, _, _) => comparator(h) + case RefinedForm(Nil, _) => "()" + case RefinedForm(h :: _, _) => comparator(h) + case Diff(l, _) => comparator(l) + case Decl(sym, _) => comparator(sym) + case DeclDiff(sym, _, _) => comparator(sym) + case ByName(tpe) => comparator(tpe) } - implicit def Ordering_Formatted: Ordering[Formatted] = - new Ordering[Formatted] { - def compare(x: Formatted, y: Formatted): Int = Ordering[String].compare(comparator(x), comparator(y)) - } -} - -case class Infix(infix: Formatted, left: Formatted, right: Formatted, - top: Boolean) -extends Formatted -{ - def length = List(infix, left, right).map(_.length).sum + 2 -} - -case class Simple(tpe: String) -extends Formatted -{ - def length = tpe.length -} - -case class Qualified(path: List[String], tpe: String) -extends Formatted -{ - def length: Int = path.map(_.length).sum + path.length + tpe.length -} - -case object UnitForm -extends Formatted -{ - def length = 4 -} - -case class Applied(cons: Formatted, args: List[Formatted]) -extends Formatted -{ - def length = args.map(_.length).sum + (args.length - 1) * 2 + cons.length + 2 -} - -case class TupleForm(elems: List[Formatted]) -extends Formatted -{ - def length = elems.map(_.length).sum + (elems.length - 1) + 2 -} - -case class FunctionForm(args: List[Formatted], ret: Formatted, top: Boolean) -extends Formatted -{ - def length = args.map(_.length).sum + (args.length - 1) + 2 + ret.length + 4 -} - -object FunctionForm -{ - def fromArgs(args: List[Formatted], top: Boolean) = { - val (params, returnt) = args.splitAt(args.length - 1) - FunctionForm(params, returnt.headOption.getOrElse(UnitForm), top) + implicit val Ord: Ordering[Formatted] = (x, y) => Ordering[String].compare(comparator(x), comparator(y)) +} + +sealed trait Formatted { + def length: Int = this match { + case Infix(infix, left, right, top) => infix.length + left.length + right.length + 2 + case Simple(tpe) => tpe.length + case Qualified(path, tpe) => path.map(_.length).sum + path.length + tpe.length + case UnitForm => 4 + case Applied(cons, args) => args.map(_.length).sum + ( args.length - 1) * 2 + cons.length + 2 + case TupleForm(elems) => elems.map(_.length).sum + (elems.length - 1) + 2 + case FunctionForm(args, ret, top) => args.map(_.length).sum + ( args.length - 1) + 2 + ret.length + 4 + case RefinedForm(elems, decls) => elems.map(_.length).sum + (elems.length - 1) * 6 + case Diff(lhs, rhs) => lhs.length + rhs.length + 1 + case Decl(sym, rhs) => sym.length + rhs.length + 8 + case DeclDiff(sym, lhs, rhs) => sym.length + lhs.length + rhs.length + 9 + case ByName(tpe) => tpe.length + 5 } } -case class RefinedForm(elems: List[Formatted], decls: List[Formatted]) -extends Formatted -{ - def length: Int = elems.map(_.length).sum + (elems.length - 1) * 6 -} - -case class Diff(left: Formatted, right: Formatted) -extends Formatted -{ - def length = left.length + right.length + 1 -} - -case class Decl(sym: Formatted, rhs: Formatted) -extends Formatted -{ - def length: Int = sym.length + rhs.length + 8 -} - -case class DeclDiff(sym: Formatted, left: Formatted, right: Formatted) -extends Formatted -{ - def length: Int = sym.length + left.length + right.length + 9 -} - -case class ByName(tpe: Formatted) -extends Formatted -{ - def length: Int = tpe.length + 5 -} - -sealed trait TypeRepr -{ - def broken: Boolean +case class Infix(infix: Formatted, left: Formatted, right: Formatted, top: Boolean) extends Formatted +case class Simple(tpe: String) extends Formatted +case class Qualified(path: List[String], tpe: String) extends Formatted +case object UnitForm extends Formatted +case class Applied(cons: Formatted, args: List[Formatted]) extends Formatted +case class TupleForm(elems: List[Formatted]) extends Formatted +case class FunctionForm(args: List[Formatted], ret: Formatted, top: Boolean) extends Formatted +case class RefinedForm(elems: List[Formatted], decls: List[Formatted]) extends Formatted +case class Diff(left: Formatted, right: Formatted) extends Formatted +case class Decl(sym: Formatted, rhs: Formatted) extends Formatted +case class DeclDiff(sym: Formatted, left: Formatted, right: Formatted) extends Formatted +case class ByName(tpe: Formatted) extends Formatted + +sealed trait TypeRepr { def flat: String def lines: List[String] - def tokenize = lines mkString " " - def joinLines = lines mkString "\n" + def tokenize: String = lines.mkString(" ") + def joinLines: String = lines.mkString("\n") def indent: TypeRepr } -case class BrokenType(lines: List[String]) -extends TypeRepr -{ - def broken = true - def flat = lines mkString " " - def indent = BrokenType(lines map (" " + _)) +case class BrokenType(lines: List[String]) extends TypeRepr { + def flat = lines.mkString(" ") + def indent = BrokenType(lines.map(" " + _)) } -case class FlatType(flat: String) -extends TypeRepr -{ - def broken = false - def length = flat.length - def lines = List(flat) - def indent = FlatType(" " + flat) +case class FlatType(flat: String) extends TypeRepr { + def lines = List(flat) + def indent = FlatType(s" $flat") } diff --git a/src/compiler/scala/tools/nsc/typechecker/splain/SplainFormatting.scala b/src/compiler/scala/tools/nsc/typechecker/splain/SplainFormatting.scala index 14fbfba729d3..4665bb0cd67f 100644 --- a/src/compiler/scala/tools/nsc/typechecker/splain/SplainFormatting.scala +++ b/src/compiler/scala/tools/nsc/typechecker/splain/SplainFormatting.scala @@ -14,83 +14,60 @@ package scala.tools.nsc package typechecker package splain -import collection.mutable +import scala.collection.mutable +import scala.reflect.internal.TypeDebugging.AnsiColor._ -import StringColor._ - -object Messages -{ - val hasMatching = "hasMatchingSymbol reported error: " - - val typingTypeApply = - "typing TypeApply reported errors for the implicit tree: " +class FormatCache[K, V](cache: mutable.Map[K, V]) { + def apply(k: K, orElse: => V): V = cache.getOrElseUpdate(k, orElse) } -class FormatCache[K, V](cache: mutable.Map[K, V], var hits: Long) -{ - def apply(k: K, orElse: => V) = { - if (cache.contains(k)) hits += 1 - cache.getOrElseUpdate(k, orElse) - } - - def stats = s"${cache.size}/$hits" +object FormatCache { + def apply[K, V]() = new FormatCache[K, V](mutable.Map()) } -object FormatCache -{ - def apply[K, V] = new FormatCache[K, V](mutable.Map(), 0) -} +trait SplainFormatters { + self: Analyzer => -trait SplainFormatters -{ self: Analyzer => - import global._ + import global._, definitions._ def formatType(tpe: Type, top: Boolean): Formatted object Refined { - def unapply(tpe: Type): Option[(List[Type], Scope)] = - tpe match { - case RefinedType(parents, decls) => - Some((parents, decls)) - case t @ SingleType(_, _) => - unapply(t.underlying) - case _ => - None - } + def unapply(tpe: Type): Option[(List[Type], Scope)] = tpe match { + case RefinedType(parents, decls) => Some((parents, decls)) + case t @ SingleType(_, _) => unapply(t.underlying) + case _ => None + } } - trait SpecialFormatter - { - def apply[A](tpe: Type, simple: String, args: List[A], - formattedArgs: => List[Formatted], top: Boolean, - rec: A => Boolean => Formatted): Option[Formatted] + trait SpecialFormatter { + def apply[A]( + tpe: Type, simple: String, args: List[A], formattedArgs: => List[Formatted], top: Boolean, + )(rec: (A, Boolean) => Formatted): Option[Formatted] def diff(left: Type, right: Type, top: Boolean): Option[Formatted] } - object FunctionFormatter - extends SpecialFormatter - { - def apply[A](tpe: Type, simple: String, args: List[A], - formattedArgs: => List[Formatted], top: Boolean, - rec: A => Boolean => Formatted) = { - if (simple.startsWith("Function")) - Some(FunctionForm.fromArgs(formattedArgs, top)) - else None + object FunctionFormatter extends SpecialFormatter { + def apply[A]( + tpe: Type, simple: String, args: List[A], formattedArgs: => List[Formatted], top: Boolean, + )(rec: (A, Boolean) => Formatted) = { + if (simple.startsWith("Function")) { + val fmtArgs = formattedArgs + val (params, returnt) = fmtArgs.splitAt(fmtArgs.length - 1) + Some(FunctionForm(params, returnt.headOption.getOrElse(UnitForm), top)) + } else None } def diff(left: Type, right: Type, top: Boolean) = None } - object TupleFormatter - extends SpecialFormatter - { - def apply[A](tpe: Type, simple: String, args: List[A], - formattedArgs: => List[Formatted], top: Boolean, - rec: A => Boolean => Formatted) = { - if (simple.startsWith("Tuple")) - Some(TupleForm(formattedArgs)) - else None + object TupleFormatter extends SpecialFormatter { + def apply[A]( + tpe: Type, simple: String, args: List[A], formattedArgs: => List[Formatted], top: Boolean + )(rec: (A, Boolean) => Formatted) = { + if (simple.startsWith("Tuple")) Some(TupleForm(formattedArgs)) + else None } def diff(left: Type, right: Type, top: Boolean) = None @@ -99,42 +76,29 @@ trait SplainFormatters object RefinedFormatter extends SpecialFormatter { object DeclSymbol { def unapply(sym: Symbol): Option[(Formatted, Formatted)] = - if (sym.hasRawInfo) - Some((Simple(sym.simpleName.toString), formatType(sym.rawInfo, true))) - else - None + if (sym.hasRawInfo) Some((Simple(sym.simpleName.toString), formatType(sym.rawInfo, true))) + else None } def ignoredTypes: List[Type] = List(typeOf[Object], typeOf[Any], typeOf[AnyRef]) def sanitizeParents: List[Type] => List[Type] = { - case List(tpe) => - List(tpe) - case tpes => - tpes.filterNot(t => ignoredTypes.exists(_ =:= t)) + case List(tpe) => List(tpe) + case tpes => tpes.filter(t => !ignoredTypes.exists(_ =:= t)) } def formatDecl: Symbol => Formatted = { - case DeclSymbol(n, t) => - Decl(n, t) - case sym => - Simple(sym.toString) + case DeclSymbol(n, t) => Decl(n, t) + case sym => Simple(sym.toString) } def apply[A]( - tpe: Type, - simple: String, - args: List[A], - formattedArgs: => List[Formatted], - top: Boolean, - rec: A => Boolean => Formatted, - ): Option[Formatted] = - tpe match { - case Refined(parents, decls) => - Some(RefinedForm(sanitizeParents(parents).map(formatType(_, top)), decls.toList.map(formatDecl))) - case _ => - None - } + tpe: Type, simple: String, args: List[A], formattedArgs: => List[Formatted], top: Boolean, + )(rec: (A, Boolean) => Formatted): Option[Formatted] = tpe match { + case Refined(parents, decls) => + Some(RefinedForm(sanitizeParents(parents).map(formatType(_, top)), decls.toList.map(formatDecl))) + case _ => None + } val none: Formatted = Simple("") @@ -149,37 +113,24 @@ trait SplainFormatters def matchTypes(left: List[Type], right: List[Type]): List[Formatted] = { val (common, uniqueLeft, uniqueRight) = separate(left.map(formatType(_, true)), right.map(formatType(_, true))) - val diffs = uniqueLeft - .toList - .zipAll(uniqueRight.toList, none, none) - .map { case (l, r) => - Diff(l, r) - } - common.toList ++ diffs + val diffs = uniqueLeft.zipAll(uniqueRight, none, none).map { case (l, r) => Diff(l, r) } + common ::: diffs } def filterDecls(syms: List[Symbol]): List[(Formatted, Formatted)] = - syms.collect { case DeclSymbol(sym, rhs) => - (sym, rhs) - } + syms.collect { case DeclSymbol(sym, rhs) => (sym, rhs) } def matchDecls(left: List[Symbol], right: List[Symbol]): List[Formatted] = { val (common, uniqueLeft, uniqueRight) = separate(filterDecls(left), filterDecls(right)) val diffs = uniqueLeft - .toList - .map(Some(_)) - .zipAll(uniqueRight.toList.map(Some(_)), None, None) - .collect { - case (Some((sym, l)), Some((_, r))) => - DeclDiff(sym, l, r) - case (None, Some((sym, r))) => - DeclDiff(sym, none, r) - case (Some((sym, l)), None) => - DeclDiff(sym, l, none) - } - common.toList.map { case (sym, rhs) => - Decl(sym, rhs) - } ++ diffs + .map(Some(_)) + .zipAll(uniqueRight.map(Some(_)), None, None) + .collect { + case (Some((sym, l)), Some((_, r))) => DeclDiff(sym, l, r) + case (None, Some((sym, r))) => DeclDiff(sym, none, r) + case (Some((sym, l)), None) => DeclDiff(sym, l, none) + } + common.map { case (sym, rhs) => Decl(sym, rhs) } ++ diffs } def diff(left: Type, right: Type, top: Boolean): Option[Formatted] = @@ -188,78 +139,54 @@ trait SplainFormatters val parents = matchTypes(sanitizeParents(leftParents), sanitizeParents(rightParents)).sorted val decls = matchDecls(leftDecls.toList, rightDecls.toList).sorted Some(RefinedForm(parents, decls)) - case _ => - None + case _ => None } } object ByNameFormatter extends SpecialFormatter { def apply[A]( - tpe: Type, - simple: String, - args: List[A], - formattedArgs: => List[Formatted], - top: Boolean, - rec: A => Boolean => Formatted, - ): Option[Formatted] = - tpe match { - case TypeRef(_, sym, List(a)) if sym.name.decodedName.toString == "" => - Some(ByName(formatType(a, true))) - case _ => - None - } + tpe: Type, simple: String, args: List[A], formattedArgs: => List[Formatted], top: Boolean, + )(rec: (A, Boolean) => Formatted): Option[Formatted] = tpe match { + case TypeRef(_, ByNameParamClass, List(a)) => Some(ByName(formatType(a, true))) + case _ => None + } def diff(left: Type, right: Type, top: Boolean): Option[Formatted] = None } } -trait SplainFormatting -extends SplainFormatters -{ self: Analyzer => - import global._ - - def breakInfixLength: Int = 70 +trait SplainFormatting extends SplainFormatters { + self: Analyzer => - def splainSettingTruncRefined: Option[Int] = { - val value = settings.VimplicitsMaxRefined.value - if (value == 0) None else Some(value) - } + import global._ - implicit def colors = - if(settings.implicitsSettingNoColor) StringColors.noColor - else StringColors.color + val breakInfixLength: Int = 70 def dealias(tpe: Type) = if (isAux(tpe)) tpe - else { - val actual = tpe match { - case ExistentialType(_, t) => t - case _ => tpe - } - actual.dealias - } - - def extractArgs(tpe: Type) = { - tpe match { - case PolyType(params, result) => - result.typeArgs.map { - case t if params.contains(t.typeSymbol) => WildcardType - case a => a - } - case t: AliasTypeRef if !isAux(tpe) => - t.betaReduce.typeArgs.map(a => if (a.typeSymbolDirect.isTypeParameter) WildcardType else a) - case _ => tpe.typeArgs + else (tpe match { + case ExistentialType(_, t) => t + case _ => tpe + }).dealias + + def extractArgs(tpe: Type) = tpe match { + case PolyType(params, result) => result.typeArgs.map { + case t if params.contains(t.typeSymbol) => WildcardType + case a => a } + case t: AliasTypeRef if !isAux(tpe) => + t.betaReduce.typeArgs.map(a => if (a.typeSymbolDirect.isTypeParameter) WildcardType else a) + case _ => tpe.typeArgs } def isRefined(tpe: Type) = tpe.dealias match { case RefinedType(_, _) => true - case _ => false + case _ => false } def isSymbolic(tpe: Type) = { val n = tpe.typeConstructor.typeSymbol.name - !isRefined(tpe) && (n.encodedName.toString != n.decodedName.toString) + !isRefined(tpe) && n.encodedName.toString != n.decodedName.toString } def ctorNames(tpe: Type): List[String] = @@ -270,10 +197,7 @@ extends SplainFormatters def isAux(tpe: Type) = ctorNames(tpe).lastOption.contains("Aux") def formatRefinement(sym: Symbol) = { - if (sym.hasRawInfo) { - val rhs = showType(sym.rawInfo) - s"$sym = $rhs" - } + if (sym.hasRawInfo) s"$sym = ${showType(sym.rawInfo)}" else sym.toString } @@ -290,149 +214,92 @@ extends SplainFormatters .reverse def sanitizePath(path: List[String]): List[String] = - path - .takeWhile(_ != "type") - .filterNot(_.contains("$")) + path.takeWhile(_ != "type").filter(!_.contains("$")) def pathPrefix: List[String] => String = { - case Nil => - "" - case List("") => - "" - case a => - a.mkString("", ".", ".") + case Nil => "" + case List("") => "" + case a => a.mkString("", ".", ".") } def qualifiedName(path: List[String], name: String): String = s"${pathPrefix(path)}$name" def stripModules(path: List[String], name: String): Option[Int] => String = { - case Some(keep) => - qualifiedName(path.takeRight(keep), name) - case None => - name + case Some(keep) => qualifiedName(path.takeRight(keep), name) + case None => name } case class TypeParts(sym: Symbol, tt: Type) { - - def modulePath: List[String] = - (tt, sym) match { - case (TypeRef(pre, _, _), _) if !pre.toString.isEmpty => - sanitizePath(pre.toString.split("\\.").toList) - case (SingleType(_, _), sym) => - symbolPath(sym).dropRight(1) - case (_, _) => - Nil - } + def modulePath: List[String] = (tt, sym) match { + case (TypeRef(pre, _, _), _) if !pre.toString.isEmpty => sanitizePath(pre.toString.split("\\.").toList) + case (SingleType(_, _), sym) => symbolPath(sym).dropRight(1) + case (_, _) => Nil + } def ownerPath: List[String] = { - val chain = sym.ownerChain.reverse - val parts = chain.map(_.name.decodedName.toString) - val (paths, names) = parts.splitAt( - Math.max(0, parts.size - 1), - ) - paths + val parts = sym.ownerChain.reverse.map(_.name.decodedName.toString) + parts.splitAt(Math.max(0, parts.size - 1))._1 } - def shortName: String = { - val prefixes = tt.prefixString.split('.').dropRight(1) - val prefix = prefixes.mkString(".") + "." - val name = tt.safeToString - name.stripPrefix(prefix) - } + def shortName: String = tt.safeToString.stripPrefix(tt.prefixString.split('.').dropRight(1).mkString(".") + ".") } - def stripType(tpe: Type): (List[String], String) = - tpe match { - case tt: SingletonType => - val sym = tt.termSymbol - val parts = TypeParts(sym, tt) - - parts.modulePath -> parts.shortName - - case tt: RefinedType => - val sym = tt.typeSymbol - val parts = TypeParts(sym, tt) - - parts.modulePath -> parts.shortName - - case _ => - // TODO: should this also use TypeParts ? - val sym = - if (tpe.takesTypeArgs) - tpe.typeSymbolDirect - else - tpe.typeSymbol - val symName = sym.name.decodedName.toString - val parts = TypeParts(sym, tpe) - - val name = - if (sym.isModuleClass) - s"$symName.type" - else - symName - (parts.modulePath, name) - } + def stripType(tpe: Type): (List[String], String) = tpe match { + case tt: SingletonType => + val parts = TypeParts(tt.termSymbol, tt) + parts.modulePath -> parts.shortName - def formatNormalSimple(tpe: Type): (List[String], String) = - tpe match { - case a @ WildcardType => - (Nil, a.toString) - case a => - stripType(a) - } + case tt: RefinedType => + val parts = TypeParts(tt.typeSymbol, tt) + parts.modulePath -> parts.shortName - def formatSimpleType(tpe: Type): (List[String], String) = - if (isAux(tpe)) - formatAuxSimple(tpe) - else - formatNormalSimple(tpe) + case _ => + // TODO: should this also use TypeParts ? + val sym = if (tpe.takesTypeArgs) tpe.typeSymbolDirect else tpe.typeSymbol + val symName = sym.name.decodedName.toString + val parts = TypeParts(sym, tpe) + (parts.modulePath, if (sym.isModuleClass) s"$symName.type" else symName) + } - def indentLine(line: String, n: Int = 1, prefix: String = " ") = (prefix * n) + line + def formatNormalSimple(tpe: Type): (List[String], String) = tpe match { + case a @ WildcardType => (Nil, a.toString) + case a => stripType(a) + } + def formatSimpleType(tpe: Type): (List[String], String) = + if (isAux(tpe)) formatAuxSimple(tpe) + else formatNormalSimple(tpe) + + def indentLine(line: String, n: Int = 1, prefix: String = " ") = (prefix * n) + line def indent(lines: List[String], n: Int = 1, prefix: String = " ") = lines.map(indentLine(_, n, prefix)) - /** - * If the args of an applied type constructor are multiline, create separate - * lines for the constructor name and the closing bracket; else return a - * single line. - */ - def showTypeApply - (cons: String, args: List[TypeRepr], break: Boolean) - : TypeRepr = { - val flatArgs = bracket(args map (_.flat)) - val flat = FlatType(s"$cons$flatArgs") + /** If the args of an applied type constructor are multiline, + * create separate lines for the constructor name and the closing bracket; + * else return a single line. */ + def showTypeApply(cons: String, args: List[TypeRepr], break: Boolean): TypeRepr = { + val flatArgs = bracket(args.map(_.flat)) + val flat = FlatType(s"$cons$flatArgs") def brokenArgs = args match { - case head :: tail => - tail.foldLeft(head.lines)((z, a) => z ::: "," :: a.lines) - case _ => Nil + case head :: tail => tail.foldLeft(head.lines)((z, a) => z ::: "," :: a.lines) + case _ => Nil } def broken = BrokenType(s"$cons[" :: indent(brokenArgs) ::: List("]")) if (break) decideBreak(flat, broken) else flat } - def showTuple(args: List[String]) = - args match { - case head :: Nil => - s"Tuple1[$head]" - case _ => - args.mkString("(", ",", ")") - } + def showTuple(args: List[String]) = args match { + case head :: Nil => s"Tuple1[$head]" + case _ => args.mkString("(", ",", ")") + } - def showFuncParams(args: List[String]) = - args match { - case head :: Nil => - head - case _ => - args.mkString("(", ",", ")") - } + def showFuncParams(args: List[String]) = args match { + case head :: Nil => head + case _ => args.mkString("(", ",", ")") + } def showRefined(parents: List[String], decls: List[String]) = { val p = parents.mkString(" with ") - val d = - if (decls.isEmpty) - "" - else - decls.mkString(" {", "; ", "}") + val d = if (decls.isEmpty) "" else decls.mkString(" {", "; ", "}") s"$p$d" } @@ -444,358 +311,210 @@ extends SplainFormatters } def decideBreak(flat: FlatType, broken: => BrokenType): TypeRepr = - if (flat.length > breakInfixLength) broken + if (flat.flat.length > breakInfixLength) broken else flat - /** - * Turn a nested infix type structure into a flat list - * ::[A, ::[B, C]]] => List(A, ::, B, ::, C) + /** Turn a nested infix type structure into a flat list + * {{{ + * ::[A, ::[B, C]]] => List(A, ::, B, ::, C) + * }}} */ def flattenInfix(tpe: Infix): List[Formatted] = { def step(tpe: Formatted): List[Formatted] = tpe match { - case Infix(infix, left, right, top) => - left :: infix :: step(right) - case a => List(a) + case Infix(infix, left, right, _) => left :: infix :: step(right) + case a => List(a) } step(tpe) } - /** - * Break a list produced by [[flattenInfix]] into lines by taking two - * elements at a time, then appending the terminal. - * If the expression's length is smaller than the threshold specified via - * plugin parameter, return a single line. - */ + /** Break a list produced by [[flattenInfix]] into lines by taking two + * elements at a time, then appending the terminal. + * If the expression's length is smaller than the threshold specified via + * plugin parameter, return a single line. */ def breakInfix(types: List[Formatted]): TypeRepr = { - val form = types map showFormattedLBreak - def broken: List[String] = form - .sliding(2, 2) - .toList - .flatMap { - case left :: right :: Nil => - (left, right) match { - case (FlatType(tpe), FlatType(infix)) => - List(s"$tpe $infix") - case _ => left.lines ++ right.lines - } - case last :: Nil => last.lines - // for exhaustiveness, cannot be reached - case l => l.flatMap(_.lines) - } - val flat = FlatType(form.flatMap(_.lines) mkString " ") - decideBreak(flat, BrokenType(broken)) + val form = types.map(showFormattedL(_, break = true)) + def broken = form.sliding(2, 2).flatMap { + case FlatType(tpe) :: FlatType(infix) :: Nil => List(s"$tpe $infix") + case left :: right :: Nil => left.lines ++ right.lines + case last :: Nil => last.lines + case _ => Nil + }.toList + decideBreak(FlatType(form.flatMap(_.lines).mkString(" ")), BrokenType(broken)) } - val showFormattedLCache = FormatCache[(Formatted, Boolean), TypeRepr] + val showFormattedLCache = FormatCache[(Formatted, Boolean), TypeRepr]() + val formatTypeCache = FormatCache[(Type, Boolean), Formatted]() + val formatDiffCache = FormatCache[(Type, Type, Boolean), Formatted]() - def truncateDecls(decls: List[Formatted]): Boolean = splainSettingTruncRefined.exists(_ < decls.map(_.length).sum) + val specialFormatters: List[SpecialFormatter] = + List(FunctionFormatter, TupleFormatter, RefinedFormatter, ByNameFormatter) - def showFormattedQualified(path: List[String], name: String): TypeRepr = - FlatType(name) + def truncateDecls(decls: List[Formatted]): Boolean = settings.VimplicitsMaxRefined.value < decls.map(_.length).sum - def formattedDiff: (Formatted, Formatted) => String = { + def formattedDiff(left: Formatted, right: Formatted): String = (left, right) match { case (Qualified(lpath, lname), Qualified(rpath, rname)) if lname == rname => - val prefix = - lpath - .reverse - .zip(rpath.reverse) - .takeWhile { case (l, r) => - l == r - } - .size + 1 + val prefix = lpath.reverseIterator.zip(rpath.reverseIterator).takeWhile { case (l, r) => l == r }.size + 1 s"${qualifiedName(lpath.takeRight(prefix), lname).red}|${qualifiedName(rpath.takeRight(prefix), rname).green}" case (left, right) => - val l = showFormattedNoBreak(left) - val r = showFormattedNoBreak(right) + val l = showFormatted(left) + val r = showFormatted(right) s"${l.red}|${r.green}" } - def showFormattedLImpl(tpe: Formatted, break: Boolean): TypeRepr = - tpe match { - case Simple(name) => - FlatType(name) - case Qualified(Nil, name) => - FlatType(name) - case Qualified(path, name) => - showFormattedQualified(path, name) - case Applied(cons, args) => - val reprs = args.map(showFormattedL(_, break)) - showTypeApply(showFormattedNoBreak(cons), reprs, break) - case tpe @ Infix(_, _, _, top) => - val flat = flattenInfix(tpe) - val broken: TypeRepr = - if (break) - breakInfix(flat) - else - FlatType(flat.map(showFormattedNoBreak).mkString(" ")) - wrapParensRepr(broken, top) - case UnitForm => - FlatType("Unit") - case FunctionForm(args, ret, top) => - val a = showFuncParams(args.map(showFormattedNoBreak)) - val r = showFormattedNoBreak(ret) - FlatType(wrapParens(s"$a => $r", top)) - case TupleForm(elems) => - FlatType(showTuple(elems.map(showFormattedNoBreak))) - case RefinedForm(elems, decls) if truncateDecls(decls) => - FlatType(showRefined(elems.map(showFormattedNoBreak), List("..."))) - case RefinedForm(elems, decls) => - FlatType(showRefined(elems.map(showFormattedNoBreak), decls.map(showFormattedNoBreak))) - case Diff(left, right) => - FlatType(formattedDiff(left, right)) - case Decl(sym, rhs) => - val s = showFormattedNoBreak(sym) - val r = showFormattedNoBreak(rhs) - FlatType(s"type $s = $r") - case DeclDiff(sym, left, right) => - val s = showFormattedNoBreak(sym) - val diff = formattedDiff(left, right) - FlatType(s"type $s = $diff") - case ByName(tpe) => - val t = showFormattedNoBreak(tpe) - FlatType(s"(=> $t)") - } - - def showFormattedL(tpe: Formatted, break: Boolean): TypeRepr = { - val key = (tpe, break) - showFormattedLCache(key, showFormattedLImpl(tpe, break)) + def showFormattedLImpl(tpe: Formatted, break: Boolean): TypeRepr = tpe match { + case Simple(name) => FlatType(name) + case Qualified(_, name) => FlatType(name) + case Applied(cons, args) => showTypeApply(showFormatted(cons), args.map(showFormattedL(_, break)), break) + case tpe @ Infix(_, _, _, top) => wrapParensRepr(if (break) breakInfix(flattenInfix(tpe)) else FlatType(flattenInfix(tpe).map(showFormatted).mkString(" ")), top) + case UnitForm => FlatType("Unit") + case FunctionForm(args, ret, top) => FlatType(wrapParens(s"${showFuncParams(args.map(showFormatted))} => ${showFormatted(ret)}", top)) + case TupleForm(elems) => FlatType(showTuple(elems.map(showFormatted))) + case RefinedForm(elems, decls) => FlatType(showRefined(elems.map(showFormatted), if (truncateDecls(decls)) List("...") else decls.map(showFormatted))) + case Diff(left, right) => FlatType(formattedDiff(left, right)) + case Decl(sym, rhs) => FlatType(s"type ${showFormatted(sym)} = ${showFormatted(rhs)}") + case DeclDiff(sym, left, right) => FlatType(s"type ${showFormatted(sym)} = ${formattedDiff(left, right)}") + case ByName(tpe) => FlatType(s"(=> ${showFormatted(tpe)})") } - def showFormattedLBreak(tpe: Formatted): TypeRepr = showFormattedL(tpe, true) - - def showFormattedLNoBreak(tpe: Formatted): TypeRepr = showFormattedL(tpe, false) - - def showFormatted(tpe: Formatted, break: Boolean): String = showFormattedL(tpe, break).joinLines - - def showFormattedNoBreak(tpe: Formatted): String = showFormattedLNoBreak(tpe).tokenize + def showFormattedL(tpe: Formatted, break: Boolean): TypeRepr = showFormattedLCache((tpe, break), showFormattedLImpl(tpe, break)) + def showFormatted(tpe: Formatted): String = showFormattedL(tpe, break = false).tokenize + def showType(tpe: Type): String = showFormattedL(formatType(tpe, top = true), break = false).joinLines + def showTypeBreakL(tpe: Type): List[String] = showFormattedL(formatType(tpe, top = true), break = true).lines - def showType(tpe: Type): String = showFormatted(formatType(tpe, true), false) + def wrapParens(expr: String, top: Boolean): String = if (top) expr else s"($expr)" - def showTypeBreak(tpe: Type): String = showFormatted(formatType(tpe, true), true) - - def showTypeBreakL(tpe: Type): List[String] = showFormattedL(formatType(tpe, true), true).lines - - def wrapParens(expr: String, top: Boolean): String = - if (top) - expr - else - s"($expr)" - - def wrapParensRepr(tpe: TypeRepr, top: Boolean): TypeRepr = - tpe match { - case FlatType(tpe) => - FlatType(wrapParens(tpe, top)) - case BrokenType(lines) => - if (top) - tpe - else - BrokenType("(" :: indent(lines) ::: List(")")) - } - - val specialFormatters: List[SpecialFormatter] = - List( - FunctionFormatter, - TupleFormatter, - RefinedFormatter, - ByNameFormatter, - ) - - def formatSpecial[A](tpe: Type, simple: String, args: List[A], formattedArgs: => List[Formatted], top: Boolean, - rec: A => Boolean => Formatted) - : Option[Formatted] = { - specialFormatters - .map(_.apply(tpe, simple, args, formattedArgs, top, rec)) - .collectFirst { case Some(a) => a } - .headOption + def wrapParensRepr(tpe: TypeRepr, top: Boolean): TypeRepr = tpe match { + case FlatType(tpe) => FlatType(wrapParens(tpe, top)) + case BrokenType(lines) => if (top) tpe else BrokenType("(" :: indent(lines) ::: List(")")) } + def formatSpecial[A]( + tpe: Type, simple: String, args: List[A], formattedArgs: => List[Formatted], top: Boolean, + )(rec: (A, Boolean) => Formatted): Option[Formatted] = + specialFormatters.iterator.map(_.apply(tpe, simple, args, formattedArgs, top)(rec)).collectFirst { case Some(a) => a } + def formatInfix[A]( - path: List[String], - simple: String, - left: A, - right: A, - top: Boolean, - rec: A => Boolean => Formatted, - ) = { - val l = rec(left)(false) - val r = rec(right)(false) - Infix(Qualified(path, simple), l, r, top) - } - - def formatWithInfix[A](tpe: Type, args: List[A], top: Boolean, rec: A => Boolean => Formatted): Formatted = { - val (path, simple) = formatSimpleType(tpe) - lazy val formattedArgs = args.map(rec(_)(true)) - formatSpecial(tpe, simple, args, formattedArgs, top, rec).getOrElse { + path: List[String], simple: String, left: A, right: A, top: Boolean, + )(rec: (A, Boolean) => Formatted): Formatted = + Infix(Qualified(path, simple), rec(left, false), rec(right, false), top) + + def formatWithInfix[A](tpe: Type, args: List[A], top: Boolean)(rec: (A, Boolean) => Formatted): Formatted = { + val (path, simple) = formatSimpleType(tpe) + lazy val formattedArgs = args.map(rec(_, true)) + formatSpecial(tpe, simple, args, formattedArgs, top)(rec).getOrElse { args match { - case left :: right :: Nil if isSymbolic(tpe) => - formatInfix(path, simple, left, right, top, rec) - case _ :: _ => - Applied(Qualified(path, simple), formattedArgs) - case _ => - Qualified(path, simple) + case left :: right :: Nil if isSymbolic(tpe) => formatInfix(path, simple, left, right, top)(rec) + case _ :: _ => Applied(Qualified(path, simple), formattedArgs) + case _ => Qualified(path, simple) } } } def formatTypeImpl(tpe: Type, top: Boolean): Formatted = { val dtpe = dealias(tpe) - val rec = (tp: Type) => (t: Boolean) => formatType(tp, t) - formatWithInfix(dtpe, extractArgs(dtpe), top, rec) + formatWithInfix(dtpe, extractArgs(dtpe), top)(formatType) } - val formatTypeCache = FormatCache[(Type, Boolean), Formatted] + def formatType(tpe: Type, top: Boolean): Formatted = formatTypeCache((tpe, top), formatTypeImpl(tpe, top)) - def formatType(tpe: Type, top: Boolean): Formatted = { - val key = (tpe, top) - formatTypeCache(key, formatTypeImpl(tpe, top)) - } + def formatDiffInfix(left: Type, right: Type, top: Boolean): Formatted = + formatWithInfix(left, extractArgs(left).zip(extractArgs(right)), top) { case ((l, r), t) => formatDiff(l, r, t) } - def formatDiffInfix(left: Type, right: Type, top: Boolean): Formatted = { - val rec = (l: Type, r: Type) => (t: Boolean) => formatDiff(l, r, t) - val recT = rec.tupled - val args = extractArgs(left) zip extractArgs(right) - formatWithInfix(left, args, top, recT) - } + def formatDiffSpecial(left: Type, right: Type, top: Boolean): Option[Formatted] = + specialFormatters.iterator.map(_.diff(left, right, top)).collectFirst { case Some(a) => a } - def formatDiffSpecial(left: Type, right: Type, top: Boolean): Option[Formatted] = { - specialFormatters.map(_.diff(left, right, top)) - .collectFirst { case Some(a) => a } - .headOption - } - - def formatDiffSimple(left: Type, right: Type): Formatted = { - val l = formatType(left, true) - val r = formatType(right, true) - Diff(l, r) - } + def formatDiffSimple(left: Type, right: Type): Formatted = + Diff(formatType(left, true), formatType(right, true)) def formatDiffImpl(found: Type, req: Type, top: Boolean): Formatted = { val (left, right) = dealias(found) -> dealias(req) - if (left =:= right) - formatType(left, top) - else if (left.typeSymbol == right.typeSymbol) - formatDiffInfix(left, right, top) - else - formatDiffSpecial(left, right, top) getOrElse - formatDiffSimple(left, right) + if (left =:= right) formatType(left, top) + else if (left.typeSymbol == right.typeSymbol) formatDiffInfix(left, right, top) + else formatDiffSpecial(left, right, top).getOrElse(formatDiffSimple(left, right)) } - val formatDiffCache = FormatCache[(Type, Type, Boolean), Formatted] - - def formatDiff(left: Type, right: Type, top: Boolean): Formatted = { - val key = (left, right, top) - formatDiffCache(key, formatDiffImpl(left, right, top)) - } + def formatDiff(left: Type, right: Type, top: Boolean): Formatted = + formatDiffCache((left, right, top), formatDiffImpl(left, right, top)) def formatNonConfBounds(err: ImplicitErrorSpecifics.NonconformantBounds): List[String] = { val params = bracket(err.tparams.map(_.defString)) - val tpes = bracket(err.targs map showType) - List("nonconformant bounds;", tpes.red, params.green) + val types = bracket(err.targs.map(showType)) + List("nonconformant bounds;", types.red, params.green) } def formatNestedImplicit(err: ImplicitError): (String, List[String], Int) = { val candidate = ImplicitError.cleanCandidate(err) - val problem = s"${candidate.red} invalid because" - val reason = err.specifics match { - case e: ImplicitErrorSpecifics.NotFound => - implicitMessage(e.param, NoImplicitFoundAnnotation(err.candidate, e.param).map(_._2)) - case e: ImplicitErrorSpecifics.NonconformantBounds => - formatNonConfBounds(e) + val problem = s"${candidate.red} invalid because" + val reason = err.specifics match { + case e: ImplicitErrorSpecifics.NotFound => implicitMessage(e.param, NoImplicitFoundAnnotation(err.candidate, e.param)._2) + case e: ImplicitErrorSpecifics.NonconformantBounds => formatNonConfBounds(e) } (problem, reason, err.nesting) } - def hideImpError(error: ImplicitError): Boolean = - error.specifics match { - case ImplicitErrorSpecifics.NonconformantBounds(_, _, _) => true - case ImplicitErrorSpecifics.NotFound(_) => false - } + def hideImpError(error: ImplicitError): Boolean = error.specifics match { + case ImplicitErrorSpecifics.NonconformantBounds(_, _, _) => true + case ImplicitErrorSpecifics.NotFound(_) => false + } def indentTree(tree: List[(String, List[String], Int)], baseIndent: Int): List[String] = { val nestings = tree.map(_._3).distinct.sorted - tree - .flatMap { - case (head, tail, nesting) => - val ind = baseIndent + nestings.indexOf(nesting).abs - indentLine(head, ind, "――") :: indent(tail, ind) - } + tree.flatMap { case (head, tail, nesting) => + val ind = baseIndent + nestings.indexOf(nesting).abs + indentLine(head, ind, "――") :: indent(tail, ind) + } } - def formatIndentTree(chain: List[ImplicitError], baseIndent: Int) = { - val formatted = chain map formatNestedImplicit - indentTree(formatted, baseIndent) - } + def formatIndentTree(chain: List[ImplicitError], baseIndent: Int) = + indentTree(chain.map(formatNestedImplicit), baseIndent) - def deepestLevel(chain: List[ImplicitError]) = { + def deepestLevel(chain: List[ImplicitError]) = chain.foldLeft(0)((z, a) => if (a.nesting > z) a.nesting else z) - } def formatImplicitChainTreeCompact(chain: List[ImplicitError]): Option[List[String]] = { - chain - .headOption - .map { head => - val max = deepestLevel(chain) - val leaves = chain.drop(1).dropWhile(_.nesting < max) - val base = if (head.nesting == 0) 0 else 1 - val (fhh, fht, fhn) = formatNestedImplicit(head) - val spacer = if (leaves.nonEmpty && leaves.length < chain.length) List("⋮".blue) else Nil - val fh = (fhh, fht ++ spacer, fhn) - val ft = leaves map formatNestedImplicit - indentTree(fh :: ft, base) - } + chain.headOption.map { head => + val max = deepestLevel(chain) + val leaves = chain.drop(1).dropWhile(_.nesting < max) + val base = if (head.nesting == 0) 0 else 1 + val (fhh, fht, fhn) = formatNestedImplicit(head) + val spacer = if (leaves.nonEmpty && leaves.length < chain.length) List("⋮".blue) else Nil + val fh = (fhh, fht ++ spacer, fhn) + val ft = leaves.map(formatNestedImplicit) + indentTree(fh :: ft, base) + } } - def formatImplicitChainTreeFull(chain: List[ImplicitError]): List[String] = { - val baseIndent = chain.headOption.map(_.nesting).getOrElse(0) - formatIndentTree(chain, baseIndent) - } + def formatImplicitChainTreeFull(chain: List[ImplicitError]): List[String] = + formatIndentTree(chain, chain.headOption.map(_.nesting).getOrElse(0)) - def formatImplicitChainFlat(chain: List[ImplicitError]): List[String] = { - chain map formatNestedImplicit flatMap { case (h, t, _) => h :: t } - } + def formatImplicitChainFlat(chain: List[ImplicitError]): List[String] = + chain.map(formatNestedImplicit).flatMap { case (h, t, _) => h :: t } def formatImplicitChain(chain: List[ImplicitError]): List[String] = { - val compact = if (settings.implicitsSettingVerboseTree) None else formatImplicitChainTreeCompact(chain) - compact getOrElse formatImplicitChainTreeFull(chain) + val compact = if (settings.VimplicitsVerboseTree) None else formatImplicitChainTreeCompact(chain) + compact.getOrElse(formatImplicitChainTreeFull(chain)) } - /** - * Remove duplicates and special cases that should not be shown. - * In some cases, candidates are reported twice, once as `Foo.f` and once as - * `f`. `ImplicitError.equals` checks the simple names for identity, which - * is suboptimal, but works for 99% of cases. - * Special cases are handled in [[hideImpError]] - */ + /** Remove duplicates and special cases that should not be shown. + * In some cases, candidates are reported twice, once as `Foo.f` and once as + * `f`. `ImplicitError.equals` checks the simple names for identity, which + * is suboptimal, but works for 99% of cases. + * Special cases are handled in [[hideImpError]] */ def formatNestedImplicits(errors: List[ImplicitError]) = { - val visible = errors filterNot hideImpError - val chains = splitChains(visible).map(_.distinct).distinct - chains map formatImplicitChain flatMap ("" :: _) drop 1 - } - - def formatImplicitParam(sym: Symbol) = sym.name.toString - - def effectiveImplicitType(tpe: Type) = { - if (tpe.typeSymbol.name.toString == "Lazy") - tpe.typeArgs.headOption.getOrElse(tpe) - else tpe + val visible = errors.filterNot(hideImpError) + val chains = splitChains(visible).map(_.distinct).distinct + chains.map(formatImplicitChain).flatMap("" :: _).drop(1) } - def implicitMessage(param: Symbol, annotationMsg: Option[String]): List[String] = { + def implicitMessage(param: Symbol, annotationMsg: String): List[String] = { val tpe = param.tpe - val msg = annotationMsg match { - case Some(msg) => msg.split("\n").toList.map(_.blue) ++ List("") - case _ => Nil - } - val effTpe = effectiveImplicitType(tpe) - val paramName = formatImplicitParam(param) - val bang = "!" - val i = "I" - val head = s"${bang.red}${i.blue} ${paramName.yellow}:" - val lines = showTypeBreakL(effTpe) match { - case single :: Nil => List(s"$head ${single.green}") - case l => head :: indent(l).map(_.green) + val msg = if (annotationMsg.isEmpty) Nil else annotationMsg.split("\n").toList.map(_.blue) :+ "" + val head = s"${"!".red}${"I".blue} ${param.name.toString.yellow}:" + val lines = showTypeBreakL(tpe).map(_.green) match { + case single :: Nil => List(s"$head $single") + case l => head :: indent(l) } - lines ++ indent(msg) + lines ::: indent(msg) } def splitChains(errors: List[ImplicitError]): List[List[ImplicitError]] = { @@ -803,24 +522,10 @@ extends SplainFormatters case (a, chains @ ((chain @ (prev :: _)) :: tail)) => if (a.nesting > prev.nesting) List(a) :: chains else (a :: chain) :: tail - case (a, _) => - List(List(a)) + case (a, _) => List(List(a)) } } - def formatImplicitError(param: Symbol, errors: List[ImplicitError], annotationMsg: Option[String]) = { - val stack = formatNestedImplicits(errors) - val nl = if (errors.nonEmpty) "\n" else "" - val ex = stack.mkString("\n") - val pre = "implicit error;\n" - val msg = implicitMessage(param, annotationMsg).mkString("\n") - s"$pre$msg$nl$ex" - } - - def cacheStats = { - val sfl = showFormattedLCache.stats - val ft = formatTypeCache.stats - val df = formatDiffCache.stats - s"showFormatted -> $sfl, formatType -> $ft, formatDiff -> $df" - } + def formatImplicitError(param: Symbol, errors: List[ImplicitError], annotationMsg: String) = + ("implicit error;" :: implicitMessage(param, annotationMsg) ::: formatNestedImplicits(errors)).mkString("\n") } diff --git a/src/compiler/scala/tools/reflect/ToolBox.scala b/src/compiler/scala/tools/reflect/ToolBox.scala index cccce85741c4..fa77e7341c4c 100644 --- a/src/compiler/scala/tools/reflect/ToolBox.scala +++ b/src/compiler/scala/tools/reflect/ToolBox.scala @@ -84,6 +84,7 @@ trait ToolBox[U <: scala.reflect.api.Universe] { * * If `silent` is false, `ToolBoxError` will be thrown in case of an inference error. * If `silent` is true, the typecheck is silent and will return `EmptyTree` if an error occurs. + * Such errors don't vanish and can be inspected by turning on -Vimplicits. * Unlike in `typecheck`, `silent` is true by default. */ def inferImplicitValue(pt: u.Type, silent: Boolean = true, withMacrosDisabled: Boolean = false, pos: u.Position = u.NoPosition): u.Tree @@ -97,6 +98,7 @@ trait ToolBox[U <: scala.reflect.api.Universe] { * * If `silent` is false, `ToolBoxError` will be thrown in case of an inference error. * If `silent` is true, the typecheck is silent and will return `EmptyTree` if an error occurs. + * Such errors don't vanish and can be inspected by turning on -Vimplicits. * Unlike in `typecheck`, `silent` is true by default. */ def inferImplicitView(tree: u.Tree, from: u.Type, to: u.Type, silent: Boolean = true, withMacrosDisabled: Boolean = false, pos: u.Position = u.NoPosition): u.Tree diff --git a/src/reflect/scala/reflect/internal/TypeDebugging.scala b/src/reflect/scala/reflect/internal/TypeDebugging.scala index 550bd11bb433..2be3f520345a 100644 --- a/src/reflect/scala/reflect/internal/TypeDebugging.scala +++ b/src/reflect/scala/reflect/internal/TypeDebugging.scala @@ -63,24 +63,7 @@ trait TypeDebugging { /** Light color wrappers. */ - object typeDebug { - import scala.io.AnsiColor._ - - private[this] val colorsOk = scala.util.Properties.coloredOutputEnabled - private def inColor(s: String, color: String) = if (colorsOk && s != "") color + s + RESET else s - private def inBold(s: String, color: String) = if (colorsOk && s != "") color + BOLD + s + RESET else s - - def inLightRed(s: String) = inColor(s, RED) - def inLightGreen(s: String) = inColor(s, GREEN) - def inLightMagenta(s: String) = inColor(s, MAGENTA) - def inLightCyan(s: String): String = inColor(s, CYAN) - def inGreen(s: String): String = inBold(s, GREEN) - def inRed(s: String): String = inBold(s, RED) - def inBlue(s: String): String = inBold(s, BLUE) - def inCyan(s: String): String = inBold(s, CYAN) - def inMagenta(s: String) = inBold(s, MAGENTA) - def resetColor(s: String): String = if (colorsOk) s + RESET else s - + object typeDebug extends TypeDebugging.AnsiColor { private def to_s(x: Any): String = x match { // otherwise case classes are caught looking like products case _: Tree | _: Type => "" + x @@ -160,3 +143,33 @@ trait TypeDebugging { def typeParamsString(tp: Type) = typeDebug.str brackets (tp.typeParams map (_.defString)) def debugString(tp: Type) = typeDebug debugString tp } + +object TypeDebugging { + object AnsiColor extends AnsiColor { + implicit class StringColorOps(private val s: String) extends AnyVal { + def red = inLightRed(s) + def green = inLightGreen(s) + def yellow = inLightYellow(s) + def blue = inLightBlue(s) + } + } + + trait AnsiColor extends scala.io.AnsiColor { + private[this] val colorsOk = scala.util.Properties.coloredOutputEnabled + private def inColor(s: String, color: String) = if (colorsOk && s != "") color + s + RESET else s + private def inBold(s: String, color: String) = if (colorsOk && s != "") color + BOLD + s + RESET else s + + def inLightRed(s: String) = inColor(s, RED) + def inLightBlue(s: String) = inColor(s, BLUE) + def inLightGreen(s: String) = inColor(s, GREEN) + def inLightYellow(s: String): String = inColor(s, YELLOW) + def inLightMagenta(s: String) = inColor(s, MAGENTA) + def inLightCyan(s: String): String = inColor(s, CYAN) + def inGreen(s: String): String = inBold(s, GREEN) + def inRed(s: String): String = inBold(s, RED) + def inBlue(s: String): String = inBold(s, BLUE) + def inCyan(s: String): String = inBold(s, CYAN) + def inMagenta(s: String) = inBold(s, MAGENTA) + def resetColor(s: String): String = if (colorsOk) s + RESET else s + } +} diff --git a/src/reflect/scala/reflect/macros/Typers.scala b/src/reflect/scala/reflect/macros/Typers.scala index 90f360901c64..e702f21ebbb1 100644 --- a/src/reflect/scala/reflect/macros/Typers.scala +++ b/src/reflect/scala/reflect/macros/Typers.scala @@ -91,6 +91,7 @@ trait Typers { * * If `silent` is false, `TypecheckException` will be thrown in case of an inference error. * If `silent` is true, the typecheck is silent and will return `EmptyTree` if an error occurs. + * Such errors don't vanish and can be inspected by turning on -Vimplicits. * Unlike in `typecheck`, `silent` is true by default. * * @throws scala.reflect.macros.TypecheckException @@ -102,6 +103,7 @@ trait Typers { * * If `silent` is false, `TypecheckException` will be thrown in case of an inference error. * If `silent` is true, the typecheck is silent and will return `EmptyTree` if an error occurs. + * Such errors don't vanish and can be inspected by turning on -Vimplicits. * Unlike in `typecheck`, `silent` is true by default. * * @throws scala.reflect.macros.TypecheckException diff --git a/test/files/neg/implicit-any2stringadd.scala b/test/files/neg/implicit-any2stringadd.scala index 32984ab85dbb..7e86c89cd31f 100644 --- a/test/files/neg/implicit-any2stringadd.scala +++ b/test/files/neg/implicit-any2stringadd.scala @@ -1,4 +1,4 @@ -// scalac: -Xsource:3 +// scalac: -Xsource:3 -Vimplicits // object Test { true + "what" diff --git a/test/files/neg/implicit-log.scala b/test/files/neg/implicit-log.scala index 0e5d3f53ad77..f77085e3c2af 100644 --- a/test/files/neg/implicit-log.scala +++ b/test/files/neg/implicit-log.scala @@ -1,4 +1,4 @@ -/* scalac: -Xsource:3 -Xfatal-warnings */ +/* scalac: -Vimplicits -Xsource:3 -Xfatal-warnings */ package foo diff --git a/test/files/neg/implicit-shadow.check b/test/files/neg/implicit-shadow.check index a36b502f43af..d7909b9c3a11 100644 --- a/test/files/neg/implicit-shadow.check +++ b/test/files/neg/implicit-shadow.check @@ -1,4 +1,4 @@ -implicit-shadow.scala:4: error: value isEmpty is not a member of Int +implicit-shadow.scala:6: error: value isEmpty is not a member of Int 1.isEmpty ^ 1 error diff --git a/test/files/neg/implicit-shadow.scala b/test/files/neg/implicit-shadow.scala index ec7f70b6d01e..33725ece13f1 100644 --- a/test/files/neg/implicit-shadow.scala +++ b/test/files/neg/implicit-shadow.scala @@ -1,3 +1,5 @@ +// scalac: -Vimplicits +// object Test { import B._, C._ diff --git a/test/files/neg/t6323a.scala b/test/files/neg/t6323a.scala index 30f5bac00ede..182c31c609a1 100644 --- a/test/files/neg/t6323a.scala +++ b/test/files/neg/t6323a.scala @@ -1,4 +1,4 @@ -// scalac: -Vimplicits no-color +// scalac: -Vimplicits // import scala.reflect.runtime.universe._ import scala.reflect.runtime.{currentMirror => m} diff --git a/test/files/run/splain-tree.scala b/test/files/run/splain-tree.scala index 372eb8a17006..d660ee85d3f2 100644 --- a/test/files/run/splain-tree.scala +++ b/test/files/run/splain-tree.scala @@ -1,9 +1,7 @@ import scala.tools.partest._ -object Test -extends DirectTest -{ - override def extraSettings: String = "-usejavacp -Vimplicits:verbose-tree,no-color" +object Test extends DirectTest { + override def extraSettings: String = "-usejavacp -Vimplicits -Vimplicits-verbose-tree" def code: String = "" diff --git a/test/files/run/splain-truncrefined.scala b/test/files/run/splain-truncrefined.scala index da24f448bbe7..2be99a6350bb 100644 --- a/test/files/run/splain-truncrefined.scala +++ b/test/files/run/splain-truncrefined.scala @@ -1,9 +1,7 @@ import scala.tools.partest._ -object Test -extends DirectTest -{ - override def extraSettings: String = "-usejavacp -Vimplicits:no-color -Vtype-diffs -Vimplicits-max-refined 5" +object Test extends DirectTest { + override def extraSettings: String = "-usejavacp -Vimplicits -Vtype-diffs -Vimplicits-max-refined 5" def code: String = "" diff --git a/test/files/run/splain.check b/test/files/run/splain.check index 1e534c40a026..60b373684230 100644 --- a/test/files/run/splain.check +++ b/test/files/run/splain.check @@ -13,7 +13,6 @@ newSource1.scala:6: error: type mismatch; ^ newSource1.scala:7: error: implicit error; !I e: F[Arg] - implicitly[F[Arg]] ^ newSource1.scala:4: error: implicit error; diff --git a/test/files/run/splain.scala b/test/files/run/splain.scala index 62a829996302..5c851b76ba9a 100644 --- a/test/files/run/splain.scala +++ b/test/files/run/splain.scala @@ -3,7 +3,7 @@ import scala.tools.partest._ object Test extends DirectTest { - override def extraSettings: String = "-usejavacp -Vimplicits no-color -Vtype-diffs" + override def extraSettings: String = "-usejavacp -Vimplicits -Vtype-diffs" def code: String = "" From 5d4755fbf45a691e057973146eaa8823651944f3 Mon Sep 17 00:00:00 2001 From: Dale Wijnand Date: Tue, 6 Apr 2021 16:56:36 +0100 Subject: [PATCH 087/769] Default -Vimplicits to true to see test impact --- .../tools/nsc/settings/ScalaSettings.scala | 2 +- .../annotated-literal-annotation-arg.check | 10 +- test/files/neg/classtags_contextbound_a.check | 5 +- test/files/neg/classtags_contextbound_b.check | 5 +- test/files/neg/classtags_contextbound_c.check | 5 +- .../neg/classtags_dont_use_typetags.check | 5 +- test/files/neg/implicits.check | 3 +- ...op_abstypetags_arenot_classmanifests.check | 5 +- ...interop_abstypetags_arenot_classtags.check | 5 +- ...interop_abstypetags_arenot_manifests.check | 5 +- ...terop_classmanifests_arenot_typetags.check | 5 +- .../interop_classtags_arenot_manifests.check | 5 +- ...terop_typetags_arenot_classmanifests.check | 5 +- .../interop_typetags_arenot_classtags.check | 5 +- test/files/neg/leibniz-liskov.check | 35 +- test/files/neg/literate_existentials.check | 5 +- test/files/neg/macro-cyclic.check | 3 +- .../neg/macro-divergence-controlled.check | 3 +- ...ro-reify-typetag-hktypeparams-notags.check | 10 +- ...acro-reify-typetag-typeparams-notags.check | 10 +- .../macro-reify-typetag-useabstypetag.check | 10 +- test/files/neg/missing-implicit.check | 50 ++- test/files/neg/sortedImplicitNotFound.check | 316 ++++++++++++++++-- test/files/neg/t0226.check | 3 +- test/files/neg/t10066.check | 6 +- test/files/neg/t10156.check | 3 +- test/files/neg/t10279.check | 9 +- test/files/neg/t11591.check | 5 +- test/files/neg/t11643.check | 6 +- test/files/neg/t11823.check | 6 +- test/files/neg/t2405.check | 3 +- test/files/neg/t2421b.check | 3 +- test/files/neg/t2462a.check | 5 +- test/files/neg/t2462c.check | 25 +- test/files/neg/t3346b.check | 3 +- test/files/neg/t3399.check | 5 +- test/files/neg/t3507-old.check | 5 +- test/files/neg/t3977.check | 3 +- test/files/neg/t4079.check | 3 +- test/files/neg/t4270.check | 3 +- test/files/neg/t4889.check | 3 +- test/files/neg/t550.check | 3 +- test/files/neg/t5553_2.check | 12 +- test/files/neg/t5801.check | 6 +- test/files/neg/t5803.check | 3 +- test/files/neg/t6528.check | 3 +- test/files/neg/t7289.check | 5 +- test/files/neg/t7289_status_quo.check | 19 +- test/files/neg/t7509.check | 3 +- test/files/neg/t7686.check | 15 +- test/files/neg/t8104.check | 3 +- test/files/neg/t8291.check | 10 +- test/files/neg/t8372.check | 10 +- test/files/neg/t9041.check | 3 +- test/files/neg/t9717.check | 6 +- test/files/neg/t9960.check | 10 +- ...without_scala_reflect_typetag_lookup.check | 3 +- ...ala_reflect_typetag_manifest_interop.check | 5 +- 58 files changed, 607 insertions(+), 123 deletions(-) diff --git a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala index c3b224d888c0..42d09f7c81de 100644 --- a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala +++ b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala @@ -501,7 +501,7 @@ trait ScalaSettings extends StandardScalaSettings with Warnings { _: MutableSett .withAbbreviation("-Yhot-statistics") val Yshowsyms = BooleanSetting("-Vsymbols", "Print the AST symbol hierarchy after each phase.") withAbbreviation "-Yshow-syms" val Ytyperdebug = BooleanSetting("-Vtyper", "Trace type assignments.") withAbbreviation "-Ytyper-debug" - val Vimplicits = BooleanSetting("-Vimplicits", "Print dependent missing implicits.").withAbbreviation("-Xlog-implicits") + val Vimplicits = BooleanSetting("-Vimplicits", "Print dependent missing implicits.", true).withAbbreviation("-Xlog-implicits") val VimplicitsVerboseTree = BooleanSetting("-Vimplicits-verbose-tree", "Display all intermediate implicits in a chain.") val VimplicitsMaxRefined = IntSetting("-Vimplicits-max-refined", "max chars for printing refined types, abbreviate to `F {...}`", Int.MaxValue, Some((0, Int.MaxValue)), _ => None) val VtypeDiffs = BooleanSetting("-Vtype-diffs", "Print found/required error messages as colored diffs.") diff --git a/test/files/neg/annotated-literal-annotation-arg.check b/test/files/neg/annotated-literal-annotation-arg.check index 220ab9a992f3..311092260ed5 100644 --- a/test/files/neg/annotated-literal-annotation-arg.check +++ b/test/files/neg/annotated-literal-annotation-arg.check @@ -1,7 +1,13 @@ -annotated-literal-annotation-arg.scala:14: error: $foo +annotated-literal-annotation-arg.scala:14: error: implicit error; +!I e: Foo + $foo + implicitly[Foo] ^ -annotated-literal-annotation-arg.scala:15: error: bar +annotated-literal-annotation-arg.scala:15: error: implicit error; +!I e: Bar + bar + implicitly[Bar] ^ 2 errors diff --git a/test/files/neg/classtags_contextbound_a.check b/test/files/neg/classtags_contextbound_a.check index b74d7f8b5843..850688d2c1d9 100644 --- a/test/files/neg/classtags_contextbound_a.check +++ b/test/files/neg/classtags_contextbound_a.check @@ -1,4 +1,7 @@ -classtags_contextbound_a.scala:2: error: No ClassTag available for T +classtags_contextbound_a.scala:2: error: implicit error; +!I evidence$5: ClassTag[T] + No ClassTag available for T + def foo[T] = Array[T]() ^ 1 error diff --git a/test/files/neg/classtags_contextbound_b.check b/test/files/neg/classtags_contextbound_b.check index 42e8e68467c0..124afe85a058 100644 --- a/test/files/neg/classtags_contextbound_b.check +++ b/test/files/neg/classtags_contextbound_b.check @@ -1,4 +1,7 @@ -classtags_contextbound_b.scala:5: error: No ClassTag available for T +classtags_contextbound_b.scala:5: error: implicit error; +!I evidence$1: ClassTag[T] + No ClassTag available for T + def foo[T] = mkArray[T] ^ 1 error diff --git a/test/files/neg/classtags_contextbound_c.check b/test/files/neg/classtags_contextbound_c.check index 8bab1bfd4a9b..e191cdf852a5 100644 --- a/test/files/neg/classtags_contextbound_c.check +++ b/test/files/neg/classtags_contextbound_c.check @@ -1,4 +1,7 @@ -classtags_contextbound_c.scala:4: error: No ClassTag available for T +classtags_contextbound_c.scala:4: error: implicit error; +!I evidence$5: ClassTag[T] + No ClassTag available for T + def mkArray[T] = Array[T]() ^ 1 error diff --git a/test/files/neg/classtags_dont_use_typetags.check b/test/files/neg/classtags_dont_use_typetags.check index 5c7bd9492a22..c305f45a86c5 100644 --- a/test/files/neg/classtags_dont_use_typetags.check +++ b/test/files/neg/classtags_dont_use_typetags.check @@ -1,4 +1,7 @@ -classtags_dont_use_typetags.scala:4: error: No ClassTag available for T +classtags_dont_use_typetags.scala:4: error: implicit error; +!I evidence$5: ClassTag[T] + No ClassTag available for T + def foo[T: TypeTag] = Array[T]() ^ 1 error diff --git a/test/files/neg/implicits.check b/test/files/neg/implicits.check index 2eb03eb5f3db..e76441ad82e4 100644 --- a/test/files/neg/implicits.check +++ b/test/files/neg/implicits.check @@ -13,7 +13,8 @@ implicits.scala:47: error: type mismatch; required: Mxml case a => List(a) ^ -implicits.scala:59: error: could not find implicit value for parameter x: Nothing +implicits.scala:59: error: implicit error; +!I x: Nothing foo { ^ 4 errors diff --git a/test/files/neg/interop_abstypetags_arenot_classmanifests.check b/test/files/neg/interop_abstypetags_arenot_classmanifests.check index d918e02840dd..cc6806d64c4f 100644 --- a/test/files/neg/interop_abstypetags_arenot_classmanifests.check +++ b/test/files/neg/interop_abstypetags_arenot_classmanifests.check @@ -1,4 +1,7 @@ -interop_abstypetags_arenot_classmanifests.scala:6: error: No ClassManifest available for T. +interop_abstypetags_arenot_classmanifests.scala:6: error: implicit error; +!I e: ClassTag[T] + No ClassManifest available for T. + println(implicitly[ClassManifest[T]]) ^ 1 error diff --git a/test/files/neg/interop_abstypetags_arenot_classtags.check b/test/files/neg/interop_abstypetags_arenot_classtags.check index 2cae95fc39f5..c1f7248b3c08 100644 --- a/test/files/neg/interop_abstypetags_arenot_classtags.check +++ b/test/files/neg/interop_abstypetags_arenot_classtags.check @@ -1,4 +1,7 @@ -interop_abstypetags_arenot_classtags.scala:6: error: No ClassTag available for T +interop_abstypetags_arenot_classtags.scala:6: error: implicit error; +!I ctag: ClassTag[T] + No ClassTag available for T + println(classTag[T]) ^ 1 error diff --git a/test/files/neg/interop_abstypetags_arenot_manifests.check b/test/files/neg/interop_abstypetags_arenot_manifests.check index 3c3668f6128a..5b3f97afca98 100644 --- a/test/files/neg/interop_abstypetags_arenot_manifests.check +++ b/test/files/neg/interop_abstypetags_arenot_manifests.check @@ -1,4 +1,7 @@ -interop_abstypetags_arenot_manifests.scala:5: error: No Manifest available for T. +interop_abstypetags_arenot_manifests.scala:5: error: implicit error; +!I m: Manifest[T] + No Manifest available for T. + println(manifest[T]) ^ 1 error diff --git a/test/files/neg/interop_classmanifests_arenot_typetags.check b/test/files/neg/interop_classmanifests_arenot_typetags.check index fdc7eafe2a15..c323a5c0dfd9 100644 --- a/test/files/neg/interop_classmanifests_arenot_typetags.check +++ b/test/files/neg/interop_classmanifests_arenot_typetags.check @@ -1,4 +1,7 @@ -interop_classmanifests_arenot_typetags.scala:6: error: No TypeTag available for T +interop_classmanifests_arenot_typetags.scala:6: error: implicit error; +!I e: TypeTag[T] + No TypeTag available for T + println(implicitly[TypeTag[T]]) ^ 1 error diff --git a/test/files/neg/interop_classtags_arenot_manifests.check b/test/files/neg/interop_classtags_arenot_manifests.check index 3fe0b90be4e1..13f5fc54947f 100644 --- a/test/files/neg/interop_classtags_arenot_manifests.check +++ b/test/files/neg/interop_classtags_arenot_manifests.check @@ -1,4 +1,7 @@ -interop_classtags_arenot_manifests.scala:5: error: No Manifest available for T. +interop_classtags_arenot_manifests.scala:5: error: implicit error; +!I m: Manifest[T] + No Manifest available for T. + println(manifest[T]) ^ 1 error diff --git a/test/files/neg/interop_typetags_arenot_classmanifests.check b/test/files/neg/interop_typetags_arenot_classmanifests.check index 0925e6ffba7e..29b66cb995bc 100644 --- a/test/files/neg/interop_typetags_arenot_classmanifests.check +++ b/test/files/neg/interop_typetags_arenot_classmanifests.check @@ -1,4 +1,7 @@ -interop_typetags_arenot_classmanifests.scala:6: error: No ClassManifest available for T. +interop_typetags_arenot_classmanifests.scala:6: error: implicit error; +!I e: ClassTag[T] + No ClassManifest available for T. + println(implicitly[ClassManifest[T]]) ^ 1 error diff --git a/test/files/neg/interop_typetags_arenot_classtags.check b/test/files/neg/interop_typetags_arenot_classtags.check index 7eaad2efd641..fb469c8108aa 100644 --- a/test/files/neg/interop_typetags_arenot_classtags.check +++ b/test/files/neg/interop_typetags_arenot_classtags.check @@ -1,4 +1,7 @@ -interop_typetags_arenot_classtags.scala:6: error: No ClassTag available for T +interop_typetags_arenot_classtags.scala:6: error: implicit error; +!I ctag: ClassTag[T] + No ClassTag available for T + println(classTag[T]) ^ 1 error diff --git a/test/files/neg/leibniz-liskov.check b/test/files/neg/leibniz-liskov.check index c760861dbbf6..e990ac07b197 100644 --- a/test/files/neg/leibniz-liskov.check +++ b/test/files/neg/leibniz-liskov.check @@ -1,19 +1,37 @@ -leibniz-liskov.scala:7: error: Cannot prove that LeibnizLiskov.this.A =:= LeibnizLiskov.this.B. +leibniz-liskov.scala:7: error: implicit error; +!I e: A =:= B + Cannot prove that LeibnizLiskov.this.A =:= LeibnizLiskov.this.B. + implicitly[A =:= B] ^ -leibniz-liskov.scala:8: error: Cannot prove that LeibnizLiskov.this.B =:= LeibnizLiskov.this.A. +leibniz-liskov.scala:8: error: implicit error; +!I e: B =:= A + Cannot prove that LeibnizLiskov.this.B =:= LeibnizLiskov.this.A. + implicitly[B =:= A] ^ -leibniz-liskov.scala:11: error: Cannot prove that LeibnizLiskov.this.A <:< LeibnizLiskov.this.SA. +leibniz-liskov.scala:11: error: implicit error; +!I e: A <:< SA + Cannot prove that LeibnizLiskov.this.A <:< LeibnizLiskov.this.SA. + implicitly[A <:< SA] ^ -leibniz-liskov.scala:12: error: Cannot prove that LeibnizLiskov.this.SB <:< LeibnizLiskov.this.B. +leibniz-liskov.scala:12: error: implicit error; +!I e: SB <:< B + Cannot prove that LeibnizLiskov.this.SB <:< LeibnizLiskov.this.B. + implicitly[SB <:< B] ^ -leibniz-liskov.scala:13: error: Cannot prove that LeibnizLiskov.this.SA <:< LeibnizLiskov.this.B. +leibniz-liskov.scala:13: error: implicit error; +!I e: SA <:< B + Cannot prove that LeibnizLiskov.this.SA <:< LeibnizLiskov.this.B. + implicitly[SA <:< B] ^ -leibniz-liskov.scala:14: error: Cannot prove that LeibnizLiskov.this.A <:< LeibnizLiskov.this.SB. +leibniz-liskov.scala:14: error: implicit error; +!I e: A <:< SB + Cannot prove that LeibnizLiskov.this.A <:< LeibnizLiskov.this.SB. + implicitly[A <:< SB] ^ leibniz-liskov.scala:18: error: no type parameters for method substituteCo: (ff: F[LeibnizLiskov.this.A]): F[LeibnizLiskov.this.B] exist so that it can be applied to arguments (List[LeibnizLiskov.this.B]) @@ -40,7 +58,10 @@ leibniz-liskov.scala:19: error: type mismatch; required: F[LeibnizLiskov.this.B] aEqB.substituteContra(List(A(), A(), A())) ^ -leibniz-liskov.scala:20: error: Cannot prove that xs.type <:< List[LeibnizLiskov.this.B]. +leibniz-liskov.scala:20: error: implicit error; +!I e: xs.type <:< List[B] + Cannot prove that xs.type <:< List[LeibnizLiskov.this.B]. + locally { val xs = aEqB.flip.liftCo[List](List(B(), B(), B())); implicitly[xs.type <:< List[B]] } ^ leibniz-liskov.scala:21: error: no type parameters for method substituteContra: (ft: F[U]): F[T] exist so that it can be applied to arguments (List[T]) diff --git a/test/files/neg/literate_existentials.check b/test/files/neg/literate_existentials.check index 73b45c0af204..a7b4eeacae2c 100644 --- a/test/files/neg/literate_existentials.check +++ b/test/files/neg/literate_existentials.check @@ -1,4 +1,7 @@ -literate_existentials.scala:189: error: Cannot prove that Int <:< M forSome { type M <: String }. +literate_existentials.scala:189: error: implicit error; +!I e: Int <:< M + Cannot prove that Int <:< M forSome { type M <: String }. + implicitly[Int <:< (M forSome { type M >: Nothing <: String })] // fails ^ 1 error diff --git a/test/files/neg/macro-cyclic.check b/test/files/neg/macro-cyclic.check index 79dadefb66c0..e39b8a45c044 100644 --- a/test/files/neg/macro-cyclic.check +++ b/test/files/neg/macro-cyclic.check @@ -1,4 +1,5 @@ -Impls_Macros_1.scala:6: error: could not find implicit value for parameter e: SourceLocation +Impls_Macros_1.scala:6: error: implicit error; +!I e: SourceLocation c.universe.reify { implicitly[SourceLocation] } ^ 1 error diff --git a/test/files/neg/macro-divergence-controlled.check b/test/files/neg/macro-divergence-controlled.check index 030a8c40ffc3..a7cdab37cc99 100644 --- a/test/files/neg/macro-divergence-controlled.check +++ b/test/files/neg/macro-divergence-controlled.check @@ -1,4 +1,5 @@ -Test_2.scala:2: error: could not find implicit value for parameter e: Complex[Foo] +Test_2.scala:2: error: implicit error; +!I e: Complex[Foo] println(implicitly[Complex[Foo]]) ^ 1 error diff --git a/test/files/neg/macro-reify-typetag-hktypeparams-notags.check b/test/files/neg/macro-reify-typetag-hktypeparams-notags.check index ce218cdbc28e..c000a798132e 100644 --- a/test/files/neg/macro-reify-typetag-hktypeparams-notags.check +++ b/test/files/neg/macro-reify-typetag-hktypeparams-notags.check @@ -1,7 +1,13 @@ -Test.scala:5: error: No TypeTag available for C[T] +Test.scala:5: error: implicit error; +!I e: TypeTag[C[T]] + No TypeTag available for C[T] + println(implicitly[TypeTag[C[T]]]) ^ -Test.scala:6: error: No TypeTag available for List[C[T]] +Test.scala:6: error: implicit error; +!I e: TypeTag[List[C[T]]] + No TypeTag available for List[C[T]] + println(implicitly[TypeTag[List[C[T]]]]) ^ 2 errors diff --git a/test/files/neg/macro-reify-typetag-typeparams-notags.check b/test/files/neg/macro-reify-typetag-typeparams-notags.check index 65a08a6d3e7c..251622e82e5e 100644 --- a/test/files/neg/macro-reify-typetag-typeparams-notags.check +++ b/test/files/neg/macro-reify-typetag-typeparams-notags.check @@ -1,7 +1,13 @@ -Test.scala:5: error: No TypeTag available for T +Test.scala:5: error: implicit error; +!I e: TypeTag[T] + No TypeTag available for T + println(implicitly[TypeTag[T]]) ^ -Test.scala:6: error: No TypeTag available for List[T] +Test.scala:6: error: implicit error; +!I e: TypeTag[List[T]] + No TypeTag available for List[T] + println(implicitly[TypeTag[List[T]]]) ^ 2 errors diff --git a/test/files/neg/macro-reify-typetag-useabstypetag.check b/test/files/neg/macro-reify-typetag-useabstypetag.check index 65a08a6d3e7c..251622e82e5e 100644 --- a/test/files/neg/macro-reify-typetag-useabstypetag.check +++ b/test/files/neg/macro-reify-typetag-useabstypetag.check @@ -1,7 +1,13 @@ -Test.scala:5: error: No TypeTag available for T +Test.scala:5: error: implicit error; +!I e: TypeTag[T] + No TypeTag available for T + println(implicitly[TypeTag[T]]) ^ -Test.scala:6: error: No TypeTag available for List[T] +Test.scala:6: error: implicit error; +!I e: TypeTag[List[T]] + No TypeTag available for List[T] + println(implicitly[TypeTag[List[T]]]) ^ 2 errors diff --git a/test/files/neg/missing-implicit.check b/test/files/neg/missing-implicit.check index bc043b4b2958..1f4703a20e94 100644 --- a/test/files/neg/missing-implicit.check +++ b/test/files/neg/missing-implicit.check @@ -1,31 +1,61 @@ -missing-implicit.scala:23: error: could not find implicit value for parameter e: TC[String]{type Int} (foo) +missing-implicit.scala:23: error: implicit error; +!I e: TC[String] {type Int = } + (foo) + implicitly[TC[String] { type Int}] ^ -missing-implicit.scala:24: error: bar +missing-implicit.scala:24: error: implicit error; +!I e: XC[String] + bar + implicitly[XC[String]] ^ -missing-implicit.scala:25: error: could not find implicit value for parameter e: U (nope) +missing-implicit.scala:25: error: implicit error; +!I e: U + (nope) + implicitly[U] ^ -missing-implicit.scala:26: error: no way +missing-implicit.scala:26: error: implicit error; +!I e: V + no way + implicitly[V] ^ -missing-implicit.scala:31: error: no way +missing-implicit.scala:31: error: implicit error; +!I v: V + no way + f ^ -missing-implicit.scala:32: error: huh +missing-implicit.scala:32: error: implicit error; +!I v: V + huh + g ^ -missing-implicit.scala:49: error: No F of Int +missing-implicit.scala:49: error: implicit error; +!I e: F[Int] + No F of Int + implicitly[F[Int]] ^ -missing-implicit.scala:50: error: could not find implicit value for parameter e: M[Int] (No F of Int) +missing-implicit.scala:50: error: implicit error; +!I e: M[Int] + (No F of Int) + implicitly[M[Int]] ^ -missing-implicit.scala:51: error: could not find implicit value for parameter e: AX (No F of String) +missing-implicit.scala:51: error: implicit error; +!I e: AX + (No F of String) + implicitly[AX] ^ -missing-implicit.scala:52: error: could not find implicit value for parameter e: X0 (Missing X3 of Char and Int and String) +missing-implicit.scala:52: error: implicit error; +!I e: X0 + (Missing X3 of Char and Int and String) + implicitly[X0] ^ 10 errors diff --git a/test/files/neg/sortedImplicitNotFound.check b/test/files/neg/sortedImplicitNotFound.check index 788c9a022085..28102161b260 100644 --- a/test/files/neg/sortedImplicitNotFound.check +++ b/test/files/neg/sortedImplicitNotFound.check @@ -1,80 +1,346 @@ -sortedImplicitNotFound.scala:10: error: No implicit Ordering[Object] found to build a SortedSet[Object]. You may want to upcast to a Set[Int] first by calling `unsorted`. +sortedImplicitNotFound.scala:10: error: implicit error; +!I ev: Ordering[Object] + No implicit Ordering[Object] found to build a SortedSet[Object]. You may want to upcast to a Set[Int] first by calling `unsorted`. + +Ordering.ordered invalid because +!I asComparable: Object => Comparable[_$2] + No implicit view available from Object => Comparable[_ >: Object]. + +⋮ +Ordering.comparatorToOrdering invalid because +!I cmp: Comparator[Object] ms.map(_ => o) ^ -sortedImplicitNotFound.scala:13: error: No implicit Ordering[Object] found to build a SortedSet[Object]. You may want to upcast to a Set[Int] first by calling `unsorted`. +sortedImplicitNotFound.scala:13: error: implicit error; +!I ev: Ordering[Object] + No implicit Ordering[Object] found to build a SortedSet[Object]. You may want to upcast to a Set[Int] first by calling `unsorted`. + +Ordering.ordered invalid because +!I asComparable: Object => Comparable[_$2] + No implicit view available from Object => Comparable[_ >: Object]. + +⋮ +Ordering.comparatorToOrdering invalid because +!I cmp: Comparator[Object] ms.flatMap(_ => List(o)) ^ -sortedImplicitNotFound.scala:16: error: No implicit Ordering[Object] found to build a SortedSet[(Int, Object)]. You may want to upcast to a Set[Int] first by calling `unsorted`. +sortedImplicitNotFound.scala:16: error: implicit error; +!I ev: Ordering[(Int,Object)] + No implicit Ordering[Object] found to build a SortedSet[(Int, Object)]. You may want to upcast to a Set[Int] first by calling `unsorted`. + +Ordering.ordered invalid because +!I asComparable: (Int,Object) => Comparable[_$2] + No implicit view available from (Int, Object) => Comparable[_ >: (Int, Object)]. + +⋮ +――Ordering.ordered invalid because + !I asComparable: Object => Comparable[_$2] + No implicit view available from Object => Comparable[_ >: Object]. + +――Ordering.comparatorToOrdering invalid because + !I cmp: Comparator[Object] ms.zip(List(o)) ^ -sortedImplicitNotFound.scala:19: error: No implicit Ordering[Object] found to build a SortedSet[Object]. You may want to upcast to a Set[Int] first by calling `unsorted`. +sortedImplicitNotFound.scala:19: error: implicit error; +!I ev: Ordering[Object] + No implicit Ordering[Object] found to build a SortedSet[Object]. You may want to upcast to a Set[Int] first by calling `unsorted`. + +Ordering.ordered invalid because +!I asComparable: Object => Comparable[_$2] + No implicit view available from Object => Comparable[_ >: Object]. + +⋮ +Ordering.comparatorToOrdering invalid because +!I cmp: Comparator[Object] ms.collect{case _ => o} ^ -sortedImplicitNotFound.scala:24: error: No implicit Ordering[Object] found to build a SortedSet[Object]. You may want to upcast to a Set[Int] first by calling `unsorted`. +sortedImplicitNotFound.scala:24: error: implicit error; +!I ev: Ordering[Object] + No implicit Ordering[Object] found to build a SortedSet[Object]. You may want to upcast to a Set[Int] first by calling `unsorted`. + +Ordering.ordered invalid because +!I asComparable: Object => Comparable[_$2] + No implicit view available from Object => Comparable[_ >: Object]. + +⋮ +Ordering.comparatorToOrdering invalid because +!I cmp: Comparator[Object] is.map(_ => o) ^ -sortedImplicitNotFound.scala:27: error: No implicit Ordering[Object] found to build a SortedSet[Object]. You may want to upcast to a Set[Int] first by calling `unsorted`. +sortedImplicitNotFound.scala:27: error: implicit error; +!I ev: Ordering[Object] + No implicit Ordering[Object] found to build a SortedSet[Object]. You may want to upcast to a Set[Int] first by calling `unsorted`. + +Ordering.ordered invalid because +!I asComparable: Object => Comparable[_$2] + No implicit view available from Object => Comparable[_ >: Object]. + +⋮ +Ordering.comparatorToOrdering invalid because +!I cmp: Comparator[Object] is.flatMap(_ => List(o)) ^ -sortedImplicitNotFound.scala:30: error: No implicit Ordering[Object] found to build a SortedSet[(Int, Object)]. You may want to upcast to a Set[Int] first by calling `unsorted`. +sortedImplicitNotFound.scala:30: error: implicit error; +!I ev: Ordering[(Int,Object)] + No implicit Ordering[Object] found to build a SortedSet[(Int, Object)]. You may want to upcast to a Set[Int] first by calling `unsorted`. + +Ordering.ordered invalid because +!I asComparable: (Int,Object) => Comparable[_$2] + No implicit view available from (Int, Object) => Comparable[_ >: (Int, Object)]. + +⋮ +――Ordering.ordered invalid because + !I asComparable: Object => Comparable[_$2] + No implicit view available from Object => Comparable[_ >: Object]. + +――Ordering.comparatorToOrdering invalid because + !I cmp: Comparator[Object] is.zip(List(o)) ^ -sortedImplicitNotFound.scala:33: error: No implicit Ordering[Object] found to build a SortedSet[Object]. You may want to upcast to a Set[Int] first by calling `unsorted`. +sortedImplicitNotFound.scala:33: error: implicit error; +!I ev: Ordering[Object] + No implicit Ordering[Object] found to build a SortedSet[Object]. You may want to upcast to a Set[Int] first by calling `unsorted`. + +Ordering.ordered invalid because +!I asComparable: Object => Comparable[_$2] + No implicit view available from Object => Comparable[_ >: Object]. + +⋮ +Ordering.comparatorToOrdering invalid because +!I cmp: Comparator[Object] is.collect{case _ => o} ^ -sortedImplicitNotFound.scala:39: error: No implicit Ordering[Object] found to build a SortedSet[Object]. You may want to upcast to a Set[Int] first by calling `unsorted`. +sortedImplicitNotFound.scala:39: error: implicit error; +!I ev: Ordering[Object] + No implicit Ordering[Object] found to build a SortedSet[Object]. You may want to upcast to a Set[Int] first by calling `unsorted`. + +Ordering.ordered invalid because +!I asComparable: Object => Comparable[_$2] + No implicit view available from Object => Comparable[_ >: Object]. + +⋮ +Ordering.comparatorToOrdering invalid because +!I cmp: Comparator[Object] mb.map(_ => o) ^ -sortedImplicitNotFound.scala:43: error: No implicit Ordering[Object] found to build a SortedSet[Object]. You may want to upcast to a Set[Int] first by calling `unsorted`. +sortedImplicitNotFound.scala:43: error: implicit error; +!I ev: Ordering[Object] + No implicit Ordering[Object] found to build a SortedSet[Object]. You may want to upcast to a Set[Int] first by calling `unsorted`. + +Ordering.ordered invalid because +!I asComparable: Object => Comparable[_$2] + No implicit view available from Object => Comparable[_ >: Object]. + +⋮ +Ordering.comparatorToOrdering invalid because +!I cmp: Comparator[Object] mb.flatMap(_ => List(o)) ^ -sortedImplicitNotFound.scala:47: error: No implicit Ordering[Object] found to build a SortedSet[(Int, Object)]. You may want to upcast to a Set[Int] first by calling `unsorted`. +sortedImplicitNotFound.scala:47: error: implicit error; +!I ev: Ordering[(Int,Object)] + No implicit Ordering[Object] found to build a SortedSet[(Int, Object)]. You may want to upcast to a Set[Int] first by calling `unsorted`. + +Ordering.ordered invalid because +!I asComparable: (Int,Object) => Comparable[_$2] + No implicit view available from (Int, Object) => Comparable[_ >: (Int, Object)]. + +⋮ +――Ordering.ordered invalid because + !I asComparable: Object => Comparable[_$2] + No implicit view available from Object => Comparable[_ >: Object]. + +――Ordering.comparatorToOrdering invalid because + !I cmp: Comparator[Object] mb.zip(List(o)) ^ -sortedImplicitNotFound.scala:51: error: No implicit Ordering[Object] found to build a SortedSet[Object]. You may want to upcast to a Set[Int] first by calling `unsorted`. +sortedImplicitNotFound.scala:51: error: implicit error; +!I ev: Ordering[Object] + No implicit Ordering[Object] found to build a SortedSet[Object]. You may want to upcast to a Set[Int] first by calling `unsorted`. + +Ordering.ordered invalid because +!I asComparable: Object => Comparable[_$2] + No implicit view available from Object => Comparable[_ >: Object]. + +⋮ +Ordering.comparatorToOrdering invalid because +!I cmp: Comparator[Object] mb.collect{case _ => o} ^ -sortedImplicitNotFound.scala:57: error: No implicit Ordering[Object] found to build a SortedSet[Object]. You may want to upcast to a Set[Int] first by calling `unsorted`. +sortedImplicitNotFound.scala:57: error: implicit error; +!I ev: Ordering[Object] + No implicit Ordering[Object] found to build a SortedSet[Object]. You may want to upcast to a Set[Int] first by calling `unsorted`. + +Ordering.ordered invalid because +!I asComparable: Object => Comparable[_$2] + No implicit view available from Object => Comparable[_ >: Object]. + +⋮ +Ordering.comparatorToOrdering invalid because +!I cmp: Comparator[Object] ib.map(_ => o) ^ -sortedImplicitNotFound.scala:61: error: No implicit Ordering[Object] found to build a SortedSet[Object]. You may want to upcast to a Set[Int] first by calling `unsorted`. +sortedImplicitNotFound.scala:61: error: implicit error; +!I ev: Ordering[Object] + No implicit Ordering[Object] found to build a SortedSet[Object]. You may want to upcast to a Set[Int] first by calling `unsorted`. + +Ordering.ordered invalid because +!I asComparable: Object => Comparable[_$2] + No implicit view available from Object => Comparable[_ >: Object]. + +⋮ +Ordering.comparatorToOrdering invalid because +!I cmp: Comparator[Object] ib.flatMap(_ => List(o)) ^ -sortedImplicitNotFound.scala:65: error: No implicit Ordering[Object] found to build a SortedSet[(Int, Object)]. You may want to upcast to a Set[Int] first by calling `unsorted`. +sortedImplicitNotFound.scala:65: error: implicit error; +!I ev: Ordering[(Int,Object)] + No implicit Ordering[Object] found to build a SortedSet[(Int, Object)]. You may want to upcast to a Set[Int] first by calling `unsorted`. + +Ordering.ordered invalid because +!I asComparable: (Int,Object) => Comparable[_$2] + No implicit view available from (Int, Object) => Comparable[_ >: (Int, Object)]. + +⋮ +――Ordering.ordered invalid because + !I asComparable: Object => Comparable[_$2] + No implicit view available from Object => Comparable[_ >: Object]. + +――Ordering.comparatorToOrdering invalid because + !I cmp: Comparator[Object] ib.zip(List(o)) ^ -sortedImplicitNotFound.scala:69: error: No implicit Ordering[Object] found to build a SortedSet[Object]. You may want to upcast to a Set[Int] first by calling `unsorted`. +sortedImplicitNotFound.scala:69: error: implicit error; +!I ev: Ordering[Object] + No implicit Ordering[Object] found to build a SortedSet[Object]. You may want to upcast to a Set[Int] first by calling `unsorted`. + +Ordering.ordered invalid because +!I asComparable: Object => Comparable[_$2] + No implicit view available from Object => Comparable[_ >: Object]. + +⋮ +Ordering.comparatorToOrdering invalid because +!I cmp: Comparator[Object] ib.collect{case _ => o} ^ -sortedImplicitNotFound.scala:74: error: No implicit Ordering[Object] found to build a SortedSet[Object]. You may want to upcast to a Set[Value] first by calling `unsorted`. +sortedImplicitNotFound.scala:74: error: implicit error; +!I ev: Ordering[Object] + No implicit Ordering[Object] found to build a SortedSet[Object]. You may want to upcast to a Set[Value] first by calling `unsorted`. + +Ordering.ordered invalid because +!I asComparable: Object => Comparable[_$2] + No implicit view available from Object => Comparable[_ >: Object]. + +⋮ +Ordering.comparatorToOrdering invalid because +!I cmp: Comparator[Object] es.map(_ => o) ^ -sortedImplicitNotFound.scala:77: error: No implicit Ordering[Object] found to build a SortedSet[Object]. You may want to upcast to a Set[Value] first by calling `unsorted`. +sortedImplicitNotFound.scala:77: error: implicit error; +!I ev: Ordering[Object] + No implicit Ordering[Object] found to build a SortedSet[Object]. You may want to upcast to a Set[Value] first by calling `unsorted`. + +Ordering.ordered invalid because +!I asComparable: Object => Comparable[_$2] + No implicit view available from Object => Comparable[_ >: Object]. + +⋮ +Ordering.comparatorToOrdering invalid because +!I cmp: Comparator[Object] es.flatMap(_ => List(o)) ^ sortedImplicitNotFound.scala:80: error: diverging implicit expansion for type Ordering[(WeekDay.Value, Object)] starting with method orderingToOrdered in object Ordered es.zip(List(o)) // ah well...: diverging implicit expansion for type Ordering[(WeekDay.Value, Object)] ^ -sortedImplicitNotFound.scala:83: error: No implicit Ordering[Object] found to build a SortedSet[Object]. You may want to upcast to a Set[Value] first by calling `unsorted`. +sortedImplicitNotFound.scala:83: error: implicit error; +!I ev: Ordering[Object] + No implicit Ordering[Object] found to build a SortedSet[Object]. You may want to upcast to a Set[Value] first by calling `unsorted`. + +Ordering.ordered invalid because +!I asComparable: Object => Comparable[_$2] + No implicit view available from Object => Comparable[_ >: Object]. + +⋮ +Ordering.comparatorToOrdering invalid because +!I cmp: Comparator[Object] es.collect{case _ => o} ^ -sortedImplicitNotFound.scala:88: error: No implicit Ordering[Object] found to build a SortedMap[Object, Object]. You may want to upcast to a Map[Int, Object] first by calling `unsorted`. +sortedImplicitNotFound.scala:88: error: implicit error; +!I ordering: Ordering[Object] + No implicit Ordering[Object] found to build a SortedMap[Object, Object]. You may want to upcast to a Map[Int, Object] first by calling `unsorted`. + +Ordering.ordered invalid because +!I asComparable: Object => Comparable[_$2] + No implicit view available from Object => Comparable[_ >: Object]. + +⋮ +Ordering.comparatorToOrdering invalid because +!I cmp: Comparator[Object] mm.map(_ => (o, o)) ^ -sortedImplicitNotFound.scala:91: error: No implicit Ordering[Object] found to build a SortedMap[Object, Object]. You may want to upcast to a Map[Int, Object] first by calling `unsorted`. +sortedImplicitNotFound.scala:91: error: implicit error; +!I ordering: Ordering[Object] + No implicit Ordering[Object] found to build a SortedMap[Object, Object]. You may want to upcast to a Map[Int, Object] first by calling `unsorted`. + +Ordering.ordered invalid because +!I asComparable: Object => Comparable[_$2] + No implicit view available from Object => Comparable[_ >: Object]. + +⋮ +Ordering.comparatorToOrdering invalid because +!I cmp: Comparator[Object] mm.flatMap(_ => List((o, o))) ^ -sortedImplicitNotFound.scala:94: error: No implicit Ordering[Object] found to build a SortedMap[Object, Object]. You may want to upcast to a Map[Int, Object] first by calling `unsorted`. +sortedImplicitNotFound.scala:94: error: implicit error; +!I ordering: Ordering[Object] + No implicit Ordering[Object] found to build a SortedMap[Object, Object]. You may want to upcast to a Map[Int, Object] first by calling `unsorted`. + +Ordering.ordered invalid because +!I asComparable: Object => Comparable[_$2] + No implicit view available from Object => Comparable[_ >: Object]. + +⋮ +Ordering.comparatorToOrdering invalid because +!I cmp: Comparator[Object] mm.collect{case _ => (o, o)} ^ -sortedImplicitNotFound.scala:99: error: No implicit Ordering[Object] found to build a SortedMap[Object, Object]. You may want to upcast to a Map[Int, Object] first by calling `unsorted`. +sortedImplicitNotFound.scala:99: error: implicit error; +!I ordering: Ordering[Object] + No implicit Ordering[Object] found to build a SortedMap[Object, Object]. You may want to upcast to a Map[Int, Object] first by calling `unsorted`. + +Ordering.ordered invalid because +!I asComparable: Object => Comparable[_$2] + No implicit view available from Object => Comparable[_ >: Object]. + +⋮ +Ordering.comparatorToOrdering invalid because +!I cmp: Comparator[Object] im.map(_ => (o, o)) ^ -sortedImplicitNotFound.scala:102: error: No implicit Ordering[Object] found to build a SortedMap[Object, Object]. You may want to upcast to a Map[Int, Object] first by calling `unsorted`. +sortedImplicitNotFound.scala:102: error: implicit error; +!I ordering: Ordering[Object] + No implicit Ordering[Object] found to build a SortedMap[Object, Object]. You may want to upcast to a Map[Int, Object] first by calling `unsorted`. + +Ordering.ordered invalid because +!I asComparable: Object => Comparable[_$2] + No implicit view available from Object => Comparable[_ >: Object]. + +⋮ +Ordering.comparatorToOrdering invalid because +!I cmp: Comparator[Object] im.flatMap(_ => List((o, o))) ^ -sortedImplicitNotFound.scala:105: error: No implicit Ordering[Object] found to build a SortedMap[Object, Object]. You may want to upcast to a Map[Int, Object] first by calling `unsorted`. +sortedImplicitNotFound.scala:105: error: implicit error; +!I ordering: Ordering[Object] + No implicit Ordering[Object] found to build a SortedMap[Object, Object]. You may want to upcast to a Map[Int, Object] first by calling `unsorted`. + +Ordering.ordered invalid because +!I asComparable: Object => Comparable[_$2] + No implicit view available from Object => Comparable[_ >: Object]. + +⋮ +Ordering.comparatorToOrdering invalid because +!I cmp: Comparator[Object] im.collect{case _ => (o, o)} ^ 26 errors diff --git a/test/files/neg/t0226.check b/test/files/neg/t0226.check index 7c7391c8452a..860b5a70bdf8 100644 --- a/test/files/neg/t0226.check +++ b/test/files/neg/t0226.check @@ -4,7 +4,8 @@ t0226.scala:5: error: not found: type A1 t0226.scala:5: error: not found: type A1 (implicit _1: Foo[List[A1]], _2: Foo[A2]): Foo[Tuple2[List[A1], A2]] = ^ -t0226.scala:8: error: could not find implicit value for parameter rep: Test.this.Foo[((List[Char], Int), (collection.immutable.Nil.type, Int))] +t0226.scala:8: error: implicit error; +!I rep: Foo[((List[Char],Int),(Nil.type,Int))] foo(((List('b'), 3), (Nil, 4))) ^ 3 errors diff --git a/test/files/neg/t10066.check b/test/files/neg/t10066.check index 438965fc6c0e..74c6fd3eb8b2 100644 --- a/test/files/neg/t10066.check +++ b/test/files/neg/t10066.check @@ -1,7 +1,9 @@ -t10066.scala:33: error: could not find implicit value for parameter extractor: dynamicrash.Extractor[String] +t10066.scala:33: error: implicit error; +!I extractor: Extractor[String] println(storage.foo[String]) ^ -t10066.scala:37: error: could not find implicit value for parameter extractor: dynamicrash.Extractor[A] +t10066.scala:37: error: implicit error; +!I extractor: Extractor[A] println(storage.foo) ^ 2 errors diff --git a/test/files/neg/t10156.check b/test/files/neg/t10156.check index e0c7e828aa8c..0ab1e9f7ee0e 100644 --- a/test/files/neg/t10156.check +++ b/test/files/neg/t10156.check @@ -1,4 +1,5 @@ -t10156.scala:4: error: could not find implicit value for parameter a: t10156.A +t10156.scala:4: error: implicit error; +!I a: A val z = x _ ^ 1 error diff --git a/test/files/neg/t10279.check b/test/files/neg/t10279.check index a399a2b15041..a9ea7f2840b5 100644 --- a/test/files/neg/t10279.check +++ b/test/files/neg/t10279.check @@ -1,10 +1,12 @@ -t10279.scala:5: error: could not find implicit value for parameter s: String +t10279.scala:5: error: implicit error; +!I s: String val t1 = foo(1) _ // error: no implicit string ^ t10279.scala:6: error: _ must follow method; cannot follow String val t2 = foo(1)("") _ // error: _ must follow method ^ -t10279.scala:7: error: could not find implicit value for parameter s: String +t10279.scala:7: error: implicit error; +!I s: String val t3 = foo _ // error: no implicit string ^ t10279.scala:14: error: type mismatch; @@ -12,7 +14,8 @@ t10279.scala:14: error: type mismatch; required: ? => ? val t6 = { implicit val i = 0; bar(0) _ } // error: type mismatch, found Int, required: ? => ? ^ -t10279.scala:17: error: could not find implicit value for parameter x: Int +t10279.scala:17: error: implicit error; +!I x: Int val barSimple = fooSimple _ // error: no implicit int ^ 5 errors diff --git a/test/files/neg/t11591.check b/test/files/neg/t11591.check index 4d110a4c3ab3..88cbe410559f 100644 --- a/test/files/neg/t11591.check +++ b/test/files/neg/t11591.check @@ -1,4 +1,7 @@ -t11591.scala:8: error: could not find implicit value for parameter e: Test.A +t11591.scala:8: error: implicit error; +!I e: A +――Test.mkB invalid because + !I i: Int implicitly[A] ^ 1 error diff --git a/test/files/neg/t11643.check b/test/files/neg/t11643.check index 9db82b3af825..5b23dc3df2a5 100644 --- a/test/files/neg/t11643.check +++ b/test/files/neg/t11643.check @@ -1,7 +1,9 @@ -t11643.scala:6: error: could not find implicit value for parameter i: Int +t11643.scala:6: error: implicit error; +!I i: Int def g(j: Int) = j + f ^ -t11643.scala:7: error: could not find implicit value for parameter i: Int +t11643.scala:7: error: implicit error; +!I i: Int def k(j: Int) = { val x = j + f ; 42 } ^ 2 errors diff --git a/test/files/neg/t11823.check b/test/files/neg/t11823.check index de9c19058768..16f8734ab657 100644 --- a/test/files/neg/t11823.check +++ b/test/files/neg/t11823.check @@ -1,7 +1,9 @@ -t11823.scala:7: error: could not find implicit value for parameter e: Test.Foo[String] +t11823.scala:7: error: implicit error; +!I e: Foo[String] val fooString: Foo[String] = implicitly ^ -t11823.scala:8: error: could not find implicit value for parameter foo: Test.Foo[String] +t11823.scala:8: error: implicit error; +!I foo: Foo[String] val barString: Bar[String] = bar ^ 2 errors diff --git a/test/files/neg/t2405.check b/test/files/neg/t2405.check index c944aafcba12..da589b404adf 100644 --- a/test/files/neg/t2405.check +++ b/test/files/neg/t2405.check @@ -1,4 +1,5 @@ -t2405.scala:8: error: could not find implicit value for parameter e: Int +t2405.scala:8: error: implicit error; +!I e: Int implicitly[Int] ^ t2405.scala:6: warning: imported `y` is permanently hidden by definition of method y diff --git a/test/files/neg/t2421b.check b/test/files/neg/t2421b.check index 7c714f1c9bd7..eadb444b2d44 100644 --- a/test/files/neg/t2421b.check +++ b/test/files/neg/t2421b.check @@ -1,4 +1,5 @@ -t2421b.scala:12: error: could not find implicit value for parameter aa: Test.F[Test.A] +t2421b.scala:12: error: implicit error; +!I aa: F[A] f ^ 1 error diff --git a/test/files/neg/t2462a.check b/test/files/neg/t2462a.check index 671acdc29346..2b3819045038 100644 --- a/test/files/neg/t2462a.check +++ b/test/files/neg/t2462a.check @@ -1,4 +1,7 @@ -t2462a.scala:6: error: Cannot construct a collection of type List[String] with elements of type Int based on a collection of type List[Int]. +t2462a.scala:6: error: implicit error; +!I bf: BuildFrom[List[Int], Int, List[String]] + Cannot construct a collection of type List[String] with elements of type Int based on a collection of type List[Int]. + def foo(l: Lst[Int]) = l.map[Int, List[String]](x => 1) ^ 1 error diff --git a/test/files/neg/t2462c.check b/test/files/neg/t2462c.check index 3b425b41730d..594967b8904d 100644 --- a/test/files/neg/t2462c.check +++ b/test/files/neg/t2462c.check @@ -1,16 +1,31 @@ -t2462c.scala:26: error: No C of X$Y +t2462c.scala:26: error: implicit error; +!I evidence$1: C[X$Y] + No C of X$Y + f[X$Y] ^ -t2462c.scala:32: error: No C of Foo[Int] +t2462c.scala:32: error: implicit error; +!I evidence$1: C[Foo[Int]] + No C of Foo[Int] + f[Foo[Int]] ^ -t2462c.scala:35: error: No C of Foo[Int] +t2462c.scala:35: error: implicit error; +!I theC: C[Foo[Int]] + No C of Foo[Int] + g[Foo[Int]] ^ -t2462c.scala:38: error: I see no C[Foo[Int]] +t2462c.scala:38: error: implicit error; +!I theC: C[Foo[Int]] + I see no C[Foo[Int]] + h[Foo[Int]] ^ -t2462c.scala:42: error: String List [?T0, ZZ] -> List[C[_]] Int Option[Long] -- . +t2462c.scala:42: error: implicit error; +!I i: Int + String List [?T0, ZZ] -> List[C[_]] Int Option[Long] -- . + i.m[Option[Long]] ^ 5 errors diff --git a/test/files/neg/t3346b.check b/test/files/neg/t3346b.check index cf740736a799..28457b516ed8 100644 --- a/test/files/neg/t3346b.check +++ b/test/files/neg/t3346b.check @@ -1,4 +1,5 @@ -t3346b.scala:14: error: could not find implicit value for evidence parameter of type TC[Any] +t3346b.scala:14: error: implicit error; +!I evidence$1: TC[Any] val y = foo(1) ^ 1 error diff --git a/test/files/neg/t3399.check b/test/files/neg/t3399.check index 112574b3ffe3..d037c16ea84e 100644 --- a/test/files/neg/t3399.check +++ b/test/files/neg/t3399.check @@ -1,4 +1,7 @@ -t3399.scala:23: error: Cannot prove that Nats.Add[Nats._1,Nats._1] =:= Nats._1. +t3399.scala:23: error: implicit error; +!I e: Succ[Succ[_0]] =:= Succ[_0] + Cannot prove that Nats.Add[Nats._1,Nats._1] =:= Nats._1. + implicitly[ Add[_1, _1] =:= _1] ^ 1 error diff --git a/test/files/neg/t3507-old.check b/test/files/neg/t3507-old.check index d50ebfd9c984..1c88543129cf 100644 --- a/test/files/neg/t3507-old.check +++ b/test/files/neg/t3507-old.check @@ -1,4 +1,7 @@ -t3507-old.scala:13: error: No Manifest available for _1.b.c.type. +t3507-old.scala:13: error: implicit error; +!I evidence$1: Manifest[c.type] + No Manifest available for _1.b.c.type. + mani/*[object _1.b.c]*/(c) // kaboom in manifestOfType / TreeGen.mkAttributedQualifier ^ 1 error diff --git a/test/files/neg/t3977.check b/test/files/neg/t3977.check index 78249b09000f..47aff47756b1 100644 --- a/test/files/neg/t3977.check +++ b/test/files/neg/t3977.check @@ -1,4 +1,5 @@ -t3977.scala:12: error: could not find implicit value for parameter w: False#If[E] +t3977.scala:12: error: implicit error; +!I w: E new NoNull ^ 1 error diff --git a/test/files/neg/t4079.check b/test/files/neg/t4079.check index 286151d1154e..721b5487e902 100644 --- a/test/files/neg/t4079.check +++ b/test/files/neg/t4079.check @@ -1,4 +1,5 @@ -t4079_2.scala:2: error: could not find implicit value for parameter f: Functor[List] +t4079_2.scala:2: error: implicit error; +!I f: Functor[List[?]] Cat.compose[List,Option].Functor ^ 1 error diff --git a/test/files/neg/t4270.check b/test/files/neg/t4270.check index af56ada4fa57..add4696c201b 100644 --- a/test/files/neg/t4270.check +++ b/test/files/neg/t4270.check @@ -1,4 +1,5 @@ -t4270.scala:5: error: could not find implicit value for parameter e: Int +t4270.scala:5: error: implicit error; +!I e: Int implicitly[Int] ^ 1 error diff --git a/test/files/neg/t4889.check b/test/files/neg/t4889.check index 96e9b7528e67..af65bfe69971 100644 --- a/test/files/neg/t4889.check +++ b/test/files/neg/t4889.check @@ -1,4 +1,5 @@ -t4889.scala:19: error: could not find implicit value for parameter ma1: t4889.MatrixAdder[Int,[S]t4889.SparseMatrix[S]] +t4889.scala:19: error: implicit error; +!I ma1: MatrixAdder[Int, SparseMatrix[?]] m1.foo ^ 1 error diff --git a/test/files/neg/t550.check b/test/files/neg/t550.check index e09b9cab03f7..f6409def976a 100644 --- a/test/files/neg/t550.check +++ b/test/files/neg/t550.check @@ -1,7 +1,8 @@ t550.scala:6: error: type List takes type parameters def sum[a](xs: List)(implicit m: Monoid[a]): a = ^ -t550.scala:8: error: could not find implicit value for parameter m: Monoid[a] +t550.scala:8: error: implicit error; +!I m: Monoid[a] sum(List(1,2,3)) ^ 2 errors diff --git a/test/files/neg/t5553_2.check b/test/files/neg/t5553_2.check index b26c7f634f70..dff0e5b34a7e 100644 --- a/test/files/neg/t5553_2.check +++ b/test/files/neg/t5553_2.check @@ -23,16 +23,20 @@ t5553_2.scala:41: error: type mismatch; required: Base[T] def test10[T]: Base[T] = Foo3[T] ^ -t5553_2.scala:47: error: could not find implicit value for parameter z: String +t5553_2.scala:47: error: implicit error; +!I z: String def test13[T]: Int = Foo3[T] ^ -t5553_2.scala:48: error: could not find implicit value for parameter z: String +t5553_2.scala:48: error: implicit error; +!I z: String def test14[T]: Base[T] = Foo3[T] ^ -t5553_2.scala:49: error: could not find implicit value for parameter z: String +t5553_2.scala:49: error: implicit error; +!I z: String def test15[T]: String = Foo3[T] ^ -t5553_2.scala:50: error: could not find implicit value for parameter z: String +t5553_2.scala:50: error: implicit error; +!I z: String def test16[T] = Foo3[T] ^ t5553_2.scala:54: error: ambiguous reference to overloaded definition, diff --git a/test/files/neg/t5801.check b/test/files/neg/t5801.check index 7f6cb4cfe6c3..2a18a4f4c419 100644 --- a/test/files/neg/t5801.check +++ b/test/files/neg/t5801.check @@ -8,7 +8,8 @@ t5801.scala:4: error: not found: value sth t5801.scala:7: error: not found: value sth def bar(x: Int)(implicit y: Int): sth.Sth = null ^ -t5801.scala:8: error: could not find implicit value for parameter y: Int +t5801.scala:8: error: implicit error; +!I y: Int bar(1) ^ t5801.scala:10: error: not found: value sth @@ -17,7 +18,8 @@ t5801.scala:10: error: not found: value sth t5801.scala:13: error: not found: value sth def meh2(x: Int)(implicit b: Int, a: sth.Sth): Unit = {} ^ -t5801.scala:14: error: could not find implicit value for parameter b: Int +t5801.scala:14: error: implicit error; +!I b: Int meh2(1) ^ 7 errors diff --git a/test/files/neg/t5803.check b/test/files/neg/t5803.check index 54d348450455..3481422c9a8f 100644 --- a/test/files/neg/t5803.check +++ b/test/files/neg/t5803.check @@ -1,4 +1,5 @@ -t5803.scala:3: error: could not find implicit value for parameter ev: Nothing +t5803.scala:3: error: implicit error; +!I ev: Nothing new Foo(): String ^ 1 error diff --git a/test/files/neg/t6528.check b/test/files/neg/t6528.check index 1c55fe568e98..92699ca9dede 100644 --- a/test/files/neg/t6528.check +++ b/test/files/neg/t6528.check @@ -1,4 +1,5 @@ -t6528.scala:6: error: could not find implicit value for parameter e: CoSet[U,Any] +t6528.scala:6: error: implicit error; +!I e: CoSet[U, Any] implicitly[CoSet[U, Any]] ^ 1 error diff --git a/test/files/neg/t7289.check b/test/files/neg/t7289.check index 05dad641b93c..08c114b20481 100644 --- a/test/files/neg/t7289.check +++ b/test/files/neg/t7289.check @@ -1,4 +1,7 @@ -t7289.scala:8: error: could not find implicit value for parameter e: Test.Schtroumpf[Nil.type] +t7289.scala:8: error: implicit error; +!I e: Schtroumpf[Nil.type] +Test.schtroumpf invalid because +!I minorSchtroumpf: Schtroumpf[T] implicitly[Schtroumpf[Nil.type]] ^ 1 error diff --git a/test/files/neg/t7289_status_quo.check b/test/files/neg/t7289_status_quo.check index ca3c0124f001..bfc5a1b3b4f0 100644 --- a/test/files/neg/t7289_status_quo.check +++ b/test/files/neg/t7289_status_quo.check @@ -1,7 +1,13 @@ -t7289_status_quo.scala:9: error: could not find implicit value for parameter e: Test1.Ext[List[Int]] +t7289_status_quo.scala:9: error: implicit error; +!I e: Ext[List[Int]] +Test1.f invalid because +!I xi: Ext[A] implicitly[Ext[List[Int]]] // fails - not found ^ -t7289_status_quo.scala:11: error: could not find implicit value for parameter e: Test1.Ext[List[List[List[Int]]]] +t7289_status_quo.scala:11: error: implicit error; +!I e: Ext[List[List[List[Int]]]] +Test1.f invalid because +!I xi: Ext[A] implicitly[Ext[List[List[List[Int]]]]] // fails - not found ^ t7289_status_quo.scala:15: error: ambiguous implicit values: @@ -10,13 +16,16 @@ t7289_status_quo.scala:15: error: ambiguous implicit values: match expected type Test1.Ext[_ <: List[List[Int]]] implicitly[Ext[_ <: List[List[Int]]]] // fails - ambiguous ^ -t7289_status_quo.scala:20: error: could not find implicit value for parameter e: Test1.ExtCov[List[Int]] +t7289_status_quo.scala:20: error: implicit error; +!I e: ExtCov[List[Int]] implicitly[ExtCov[List[Int]]] // fails - not found ^ -t7289_status_quo.scala:21: error: could not find implicit value for parameter e: Test1.ExtCov[List[List[Int]]] +t7289_status_quo.scala:21: error: implicit error; +!I e: ExtCov[List[List[Int]]] implicitly[ExtCov[List[List[Int]]]] // fails - not found ^ -t7289_status_quo.scala:22: error: could not find implicit value for parameter e: Test1.ExtCov[List[List[List[Int]]]] +t7289_status_quo.scala:22: error: implicit error; +!I e: ExtCov[List[List[List[Int]]]] implicitly[ExtCov[List[List[List[Int]]]]] // fails - not found ^ 6 errors diff --git a/test/files/neg/t7509.check b/test/files/neg/t7509.check index 03ec8ef282fa..d5ad4222690c 100644 --- a/test/files/neg/t7509.check +++ b/test/files/neg/t7509.check @@ -6,7 +6,8 @@ t7509.scala:3: error: type mismatch; required: R crash(42) ^ -t7509.scala:3: error: could not find implicit value for parameter ev: R +t7509.scala:3: error: implicit error; +!I ev: R crash(42) ^ 3 errors diff --git a/test/files/neg/t7686.check b/test/files/neg/t7686.check index 2374f42bb7ec..ec52b9695a68 100644 --- a/test/files/neg/t7686.check +++ b/test/files/neg/t7686.check @@ -1,10 +1,19 @@ -t7686.scala:10: error: No TypeTag available for Test.In[_] +t7686.scala:10: error: implicit error; +!I tt: TypeTag[In[_$1]] + No TypeTag available for Test.In[_] + t1[In]; t2[In]; t3[In]; t4[In] ^ -t7686.scala:11: error: No TypeTag available for Test.Co[_] +t7686.scala:11: error: implicit error; +!I tt: TypeTag[Co[_$1]] + No TypeTag available for Test.Co[_] + t1[Co]; t2[Co]; t3[Co]; t4[Co] ^ -t7686.scala:12: error: No TypeTag available for Test.Cn[_] +t7686.scala:12: error: implicit error; +!I tt: TypeTag[Cn[_$1]] + No TypeTag available for Test.Cn[_] + t1[Cn]; t2[Cn]; t3[Cn]; t4[Cn] ^ 3 errors diff --git a/test/files/neg/t8104.check b/test/files/neg/t8104.check index b781d95393dd..f409bca11ec8 100644 --- a/test/files/neg/t8104.check +++ b/test/files/neg/t8104.check @@ -1,4 +1,5 @@ -Test_2.scala:20: error: could not find implicit value for parameter e: Generic.Aux[Test.C,(Int, Int)] +Test_2.scala:20: error: implicit error; +!I e: Generic.Aux[C, (Int,Int)] implicitly[Generic.Aux[C, (Int, Int)]] ^ 1 error diff --git a/test/files/neg/t8291.check b/test/files/neg/t8291.check index 4667f890e640..abd7fd042b7d 100644 --- a/test/files/neg/t8291.check +++ b/test/files/neg/t8291.check @@ -1,7 +1,13 @@ -t8291.scala:5: error: Could not find implicit for Int or String +t8291.scala:5: error: implicit error; +!I e: X[Int, String] + Could not find implicit for Int or String + implicitly[X[Int, String]] ^ -t8291.scala:6: error: Could not find implicit for Int or String +t8291.scala:6: error: implicit error; +!I e: X[Int, String] + Could not find implicit for Int or String + implicitly[Z[String]] ^ 2 errors diff --git a/test/files/neg/t8372.check b/test/files/neg/t8372.check index b89cf288c52d..033f91f4e760 100644 --- a/test/files/neg/t8372.check +++ b/test/files/neg/t8372.check @@ -1,7 +1,13 @@ -t8372.scala:7: error: No ClassTag available for A1 +t8372.scala:7: error: implicit error; +!I ct1: ClassTag[A1] + No ClassTag available for A1 + def unzip[T1, T2](a: Array[(T1, T2)]) = a.unzip ^ -t8372.scala:9: error: No ClassTag available for T1 +t8372.scala:9: error: implicit error; +!I ct1: ClassTag[T1] + No ClassTag available for T1 + def unzip3[T1, T2, T3](a: Array[(T1, T2, T3)]): (Array[T1], Array[T2], Array[T3]) = a.unzip3 ^ 2 errors diff --git a/test/files/neg/t9041.check b/test/files/neg/t9041.check index 172d3a350cf5..adee8b97e1f8 100644 --- a/test/files/neg/t9041.check +++ b/test/files/neg/t9041.check @@ -1,4 +1,5 @@ -t9041.scala:11: error: could not find implicit value for parameter cellSetter: CellSetter[scala.math.BigDecimal] +t9041.scala:11: error: implicit error; +!I cellSetter: CellSetter[BigDecimal] def setCell(cell: Cell, data: math.BigDecimal): Unit = { cell.setCellValue(data) } ^ 1 error diff --git a/test/files/neg/t9717.check b/test/files/neg/t9717.check index 29ea674e98a2..b08553b2dbda 100644 --- a/test/files/neg/t9717.check +++ b/test/files/neg/t9717.check @@ -4,13 +4,15 @@ t9717.scala:2: error: ambiguous implicit values: match expected type Int class B(implicit F: Int) extends A({ implicit val v: Int = 1; implicitly[Int] }) // ambiguous ^ -t9717.scala:6: error: could not find implicit value for parameter e: Int +t9717.scala:6: error: implicit error; +!I e: Int def this() = this(implicitly[Int]) // neg ^ t9717.scala:7: error: not found: value f def this(s: String) = this(f) // neg (`this` is not in scope!) ^ -t9717.scala:12: error: could not find implicit value for parameter e: Int +t9717.scala:12: error: implicit error; +!I e: Int def this() = { this(implicitly[Int]) } // not in scope (spec 5.3.1, scope which is in effect at the point of the enclosing class definition) ^ 4 errors diff --git a/test/files/neg/t9960.check b/test/files/neg/t9960.check index 085665971bc4..90afd4a386e3 100644 --- a/test/files/neg/t9960.check +++ b/test/files/neg/t9960.check @@ -1,4 +1,12 @@ -t9960.scala:27: error: could not find implicit value for parameter m: NNN.Aux[NNN.Reader,NNN.FxAppend[NNN.Fx1[NNN.Task],NNN.Fx2[NNN.Validate,NNN.Reader]],NNN.Fx2[NNN.Task,NNN.Validate]] +t9960.scala:27: error: implicit error; +!I m: + NNN.Aux[ + Reader + , + FxAppend[Fx1[Task], Fx2[Validate, Reader]] + , + Fx2[Task, Validate] + ] val hhhh: Eff[Fx2[Task, Validate], Unit] = runReader(gggg) ^ 1 error diff --git a/test/files/run/typetags_without_scala_reflect_typetag_lookup.check b/test/files/run/typetags_without_scala_reflect_typetag_lookup.check index 892784d1cf66..1f5806f88736 100644 --- a/test/files/run/typetags_without_scala_reflect_typetag_lookup.check +++ b/test/files/run/typetags_without_scala_reflect_typetag_lookup.check @@ -1,2 +1,3 @@ -pos: RangePosition(newSource1.scala, 455, 466, 471) could not find implicit value for evidence parameter of type reflect.runtime.package.universe.TypeTag[Int] ERROR +pos: RangePosition(newSource1.scala, 455, 466, 471) implicit error; +!I evidence$1: TypeTag[Int] ERROR diff --git a/test/files/run/typetags_without_scala_reflect_typetag_manifest_interop.check b/test/files/run/typetags_without_scala_reflect_typetag_manifest_interop.check index d510c578afc6..5a9a160570f9 100644 --- a/test/files/run/typetags_without_scala_reflect_typetag_manifest_interop.check +++ b/test/files/run/typetags_without_scala_reflect_typetag_manifest_interop.check @@ -1,2 +1,5 @@ -pos: RangePosition(newSource1.scala, 471, 479, 482) No Manifest available for App.this.T. ERROR +pos: RangePosition(newSource1.scala, 471, 479, 482) implicit error; +!I m: Manifest[T] + No Manifest available for App.this.T. + ERROR From bc5733b10762e9e01f19256938fb305a40bc22c6 Mon Sep 17 00:00:00 2001 From: Dale Wijnand Date: Wed, 7 Apr 2021 09:27:38 +0100 Subject: [PATCH 088/769] Revert -Vimplicits back to default false --- .../tools/nsc/settings/ScalaSettings.scala | 2 +- .../annotated-literal-annotation-arg.check | 10 +- test/files/neg/classtags_contextbound_a.check | 5 +- test/files/neg/classtags_contextbound_b.check | 5 +- test/files/neg/classtags_contextbound_c.check | 5 +- .../neg/classtags_dont_use_typetags.check | 5 +- test/files/neg/implicits.check | 3 +- ...op_abstypetags_arenot_classmanifests.check | 5 +- ...interop_abstypetags_arenot_classtags.check | 5 +- ...interop_abstypetags_arenot_manifests.check | 5 +- ...terop_classmanifests_arenot_typetags.check | 5 +- .../interop_classtags_arenot_manifests.check | 5 +- ...terop_typetags_arenot_classmanifests.check | 5 +- .../interop_typetags_arenot_classtags.check | 5 +- test/files/neg/leibniz-liskov.check | 35 +- test/files/neg/literate_existentials.check | 5 +- test/files/neg/macro-cyclic.check | 3 +- .../neg/macro-divergence-controlled.check | 3 +- ...ro-reify-typetag-hktypeparams-notags.check | 10 +- ...acro-reify-typetag-typeparams-notags.check | 10 +- .../macro-reify-typetag-useabstypetag.check | 10 +- test/files/neg/missing-implicit.check | 50 +-- test/files/neg/sortedImplicitNotFound.check | 316 ++---------------- test/files/neg/t0226.check | 3 +- test/files/neg/t10066.check | 6 +- test/files/neg/t10156.check | 3 +- test/files/neg/t10279.check | 9 +- test/files/neg/t11591.check | 5 +- test/files/neg/t11643.check | 6 +- test/files/neg/t11823.check | 6 +- test/files/neg/t2405.check | 3 +- test/files/neg/t2421b.check | 3 +- test/files/neg/t2462a.check | 5 +- test/files/neg/t2462c.check | 25 +- test/files/neg/t3346b.check | 3 +- test/files/neg/t3399.check | 5 +- test/files/neg/t3507-old.check | 5 +- test/files/neg/t3977.check | 3 +- test/files/neg/t4079.check | 3 +- test/files/neg/t4270.check | 3 +- test/files/neg/t4889.check | 3 +- test/files/neg/t550.check | 3 +- test/files/neg/t5553_2.check | 12 +- test/files/neg/t5801.check | 6 +- test/files/neg/t5803.check | 3 +- test/files/neg/t6528.check | 3 +- test/files/neg/t7289.check | 5 +- test/files/neg/t7289_status_quo.check | 19 +- test/files/neg/t7509.check | 3 +- test/files/neg/t7686.check | 15 +- test/files/neg/t8104.check | 3 +- test/files/neg/t8291.check | 10 +- test/files/neg/t8372.check | 10 +- test/files/neg/t9041.check | 3 +- test/files/neg/t9717.check | 6 +- test/files/neg/t9960.check | 10 +- ...without_scala_reflect_typetag_lookup.check | 3 +- ...ala_reflect_typetag_manifest_interop.check | 5 +- 58 files changed, 123 insertions(+), 607 deletions(-) diff --git a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala index 42d09f7c81de..c3b224d888c0 100644 --- a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala +++ b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala @@ -501,7 +501,7 @@ trait ScalaSettings extends StandardScalaSettings with Warnings { _: MutableSett .withAbbreviation("-Yhot-statistics") val Yshowsyms = BooleanSetting("-Vsymbols", "Print the AST symbol hierarchy after each phase.") withAbbreviation "-Yshow-syms" val Ytyperdebug = BooleanSetting("-Vtyper", "Trace type assignments.") withAbbreviation "-Ytyper-debug" - val Vimplicits = BooleanSetting("-Vimplicits", "Print dependent missing implicits.", true).withAbbreviation("-Xlog-implicits") + val Vimplicits = BooleanSetting("-Vimplicits", "Print dependent missing implicits.").withAbbreviation("-Xlog-implicits") val VimplicitsVerboseTree = BooleanSetting("-Vimplicits-verbose-tree", "Display all intermediate implicits in a chain.") val VimplicitsMaxRefined = IntSetting("-Vimplicits-max-refined", "max chars for printing refined types, abbreviate to `F {...}`", Int.MaxValue, Some((0, Int.MaxValue)), _ => None) val VtypeDiffs = BooleanSetting("-Vtype-diffs", "Print found/required error messages as colored diffs.") diff --git a/test/files/neg/annotated-literal-annotation-arg.check b/test/files/neg/annotated-literal-annotation-arg.check index 311092260ed5..220ab9a992f3 100644 --- a/test/files/neg/annotated-literal-annotation-arg.check +++ b/test/files/neg/annotated-literal-annotation-arg.check @@ -1,13 +1,7 @@ -annotated-literal-annotation-arg.scala:14: error: implicit error; -!I e: Foo - $foo - +annotated-literal-annotation-arg.scala:14: error: $foo implicitly[Foo] ^ -annotated-literal-annotation-arg.scala:15: error: implicit error; -!I e: Bar - bar - +annotated-literal-annotation-arg.scala:15: error: bar implicitly[Bar] ^ 2 errors diff --git a/test/files/neg/classtags_contextbound_a.check b/test/files/neg/classtags_contextbound_a.check index 850688d2c1d9..b74d7f8b5843 100644 --- a/test/files/neg/classtags_contextbound_a.check +++ b/test/files/neg/classtags_contextbound_a.check @@ -1,7 +1,4 @@ -classtags_contextbound_a.scala:2: error: implicit error; -!I evidence$5: ClassTag[T] - No ClassTag available for T - +classtags_contextbound_a.scala:2: error: No ClassTag available for T def foo[T] = Array[T]() ^ 1 error diff --git a/test/files/neg/classtags_contextbound_b.check b/test/files/neg/classtags_contextbound_b.check index 124afe85a058..42e8e68467c0 100644 --- a/test/files/neg/classtags_contextbound_b.check +++ b/test/files/neg/classtags_contextbound_b.check @@ -1,7 +1,4 @@ -classtags_contextbound_b.scala:5: error: implicit error; -!I evidence$1: ClassTag[T] - No ClassTag available for T - +classtags_contextbound_b.scala:5: error: No ClassTag available for T def foo[T] = mkArray[T] ^ 1 error diff --git a/test/files/neg/classtags_contextbound_c.check b/test/files/neg/classtags_contextbound_c.check index e191cdf852a5..8bab1bfd4a9b 100644 --- a/test/files/neg/classtags_contextbound_c.check +++ b/test/files/neg/classtags_contextbound_c.check @@ -1,7 +1,4 @@ -classtags_contextbound_c.scala:4: error: implicit error; -!I evidence$5: ClassTag[T] - No ClassTag available for T - +classtags_contextbound_c.scala:4: error: No ClassTag available for T def mkArray[T] = Array[T]() ^ 1 error diff --git a/test/files/neg/classtags_dont_use_typetags.check b/test/files/neg/classtags_dont_use_typetags.check index c305f45a86c5..5c7bd9492a22 100644 --- a/test/files/neg/classtags_dont_use_typetags.check +++ b/test/files/neg/classtags_dont_use_typetags.check @@ -1,7 +1,4 @@ -classtags_dont_use_typetags.scala:4: error: implicit error; -!I evidence$5: ClassTag[T] - No ClassTag available for T - +classtags_dont_use_typetags.scala:4: error: No ClassTag available for T def foo[T: TypeTag] = Array[T]() ^ 1 error diff --git a/test/files/neg/implicits.check b/test/files/neg/implicits.check index e76441ad82e4..2eb03eb5f3db 100644 --- a/test/files/neg/implicits.check +++ b/test/files/neg/implicits.check @@ -13,8 +13,7 @@ implicits.scala:47: error: type mismatch; required: Mxml case a => List(a) ^ -implicits.scala:59: error: implicit error; -!I x: Nothing +implicits.scala:59: error: could not find implicit value for parameter x: Nothing foo { ^ 4 errors diff --git a/test/files/neg/interop_abstypetags_arenot_classmanifests.check b/test/files/neg/interop_abstypetags_arenot_classmanifests.check index cc6806d64c4f..d918e02840dd 100644 --- a/test/files/neg/interop_abstypetags_arenot_classmanifests.check +++ b/test/files/neg/interop_abstypetags_arenot_classmanifests.check @@ -1,7 +1,4 @@ -interop_abstypetags_arenot_classmanifests.scala:6: error: implicit error; -!I e: ClassTag[T] - No ClassManifest available for T. - +interop_abstypetags_arenot_classmanifests.scala:6: error: No ClassManifest available for T. println(implicitly[ClassManifest[T]]) ^ 1 error diff --git a/test/files/neg/interop_abstypetags_arenot_classtags.check b/test/files/neg/interop_abstypetags_arenot_classtags.check index c1f7248b3c08..2cae95fc39f5 100644 --- a/test/files/neg/interop_abstypetags_arenot_classtags.check +++ b/test/files/neg/interop_abstypetags_arenot_classtags.check @@ -1,7 +1,4 @@ -interop_abstypetags_arenot_classtags.scala:6: error: implicit error; -!I ctag: ClassTag[T] - No ClassTag available for T - +interop_abstypetags_arenot_classtags.scala:6: error: No ClassTag available for T println(classTag[T]) ^ 1 error diff --git a/test/files/neg/interop_abstypetags_arenot_manifests.check b/test/files/neg/interop_abstypetags_arenot_manifests.check index 5b3f97afca98..3c3668f6128a 100644 --- a/test/files/neg/interop_abstypetags_arenot_manifests.check +++ b/test/files/neg/interop_abstypetags_arenot_manifests.check @@ -1,7 +1,4 @@ -interop_abstypetags_arenot_manifests.scala:5: error: implicit error; -!I m: Manifest[T] - No Manifest available for T. - +interop_abstypetags_arenot_manifests.scala:5: error: No Manifest available for T. println(manifest[T]) ^ 1 error diff --git a/test/files/neg/interop_classmanifests_arenot_typetags.check b/test/files/neg/interop_classmanifests_arenot_typetags.check index c323a5c0dfd9..fdc7eafe2a15 100644 --- a/test/files/neg/interop_classmanifests_arenot_typetags.check +++ b/test/files/neg/interop_classmanifests_arenot_typetags.check @@ -1,7 +1,4 @@ -interop_classmanifests_arenot_typetags.scala:6: error: implicit error; -!I e: TypeTag[T] - No TypeTag available for T - +interop_classmanifests_arenot_typetags.scala:6: error: No TypeTag available for T println(implicitly[TypeTag[T]]) ^ 1 error diff --git a/test/files/neg/interop_classtags_arenot_manifests.check b/test/files/neg/interop_classtags_arenot_manifests.check index 13f5fc54947f..3fe0b90be4e1 100644 --- a/test/files/neg/interop_classtags_arenot_manifests.check +++ b/test/files/neg/interop_classtags_arenot_manifests.check @@ -1,7 +1,4 @@ -interop_classtags_arenot_manifests.scala:5: error: implicit error; -!I m: Manifest[T] - No Manifest available for T. - +interop_classtags_arenot_manifests.scala:5: error: No Manifest available for T. println(manifest[T]) ^ 1 error diff --git a/test/files/neg/interop_typetags_arenot_classmanifests.check b/test/files/neg/interop_typetags_arenot_classmanifests.check index 29b66cb995bc..0925e6ffba7e 100644 --- a/test/files/neg/interop_typetags_arenot_classmanifests.check +++ b/test/files/neg/interop_typetags_arenot_classmanifests.check @@ -1,7 +1,4 @@ -interop_typetags_arenot_classmanifests.scala:6: error: implicit error; -!I e: ClassTag[T] - No ClassManifest available for T. - +interop_typetags_arenot_classmanifests.scala:6: error: No ClassManifest available for T. println(implicitly[ClassManifest[T]]) ^ 1 error diff --git a/test/files/neg/interop_typetags_arenot_classtags.check b/test/files/neg/interop_typetags_arenot_classtags.check index fb469c8108aa..7eaad2efd641 100644 --- a/test/files/neg/interop_typetags_arenot_classtags.check +++ b/test/files/neg/interop_typetags_arenot_classtags.check @@ -1,7 +1,4 @@ -interop_typetags_arenot_classtags.scala:6: error: implicit error; -!I ctag: ClassTag[T] - No ClassTag available for T - +interop_typetags_arenot_classtags.scala:6: error: No ClassTag available for T println(classTag[T]) ^ 1 error diff --git a/test/files/neg/leibniz-liskov.check b/test/files/neg/leibniz-liskov.check index e990ac07b197..c760861dbbf6 100644 --- a/test/files/neg/leibniz-liskov.check +++ b/test/files/neg/leibniz-liskov.check @@ -1,37 +1,19 @@ -leibniz-liskov.scala:7: error: implicit error; -!I e: A =:= B - Cannot prove that LeibnizLiskov.this.A =:= LeibnizLiskov.this.B. - +leibniz-liskov.scala:7: error: Cannot prove that LeibnizLiskov.this.A =:= LeibnizLiskov.this.B. implicitly[A =:= B] ^ -leibniz-liskov.scala:8: error: implicit error; -!I e: B =:= A - Cannot prove that LeibnizLiskov.this.B =:= LeibnizLiskov.this.A. - +leibniz-liskov.scala:8: error: Cannot prove that LeibnizLiskov.this.B =:= LeibnizLiskov.this.A. implicitly[B =:= A] ^ -leibniz-liskov.scala:11: error: implicit error; -!I e: A <:< SA - Cannot prove that LeibnizLiskov.this.A <:< LeibnizLiskov.this.SA. - +leibniz-liskov.scala:11: error: Cannot prove that LeibnizLiskov.this.A <:< LeibnizLiskov.this.SA. implicitly[A <:< SA] ^ -leibniz-liskov.scala:12: error: implicit error; -!I e: SB <:< B - Cannot prove that LeibnizLiskov.this.SB <:< LeibnizLiskov.this.B. - +leibniz-liskov.scala:12: error: Cannot prove that LeibnizLiskov.this.SB <:< LeibnizLiskov.this.B. implicitly[SB <:< B] ^ -leibniz-liskov.scala:13: error: implicit error; -!I e: SA <:< B - Cannot prove that LeibnizLiskov.this.SA <:< LeibnizLiskov.this.B. - +leibniz-liskov.scala:13: error: Cannot prove that LeibnizLiskov.this.SA <:< LeibnizLiskov.this.B. implicitly[SA <:< B] ^ -leibniz-liskov.scala:14: error: implicit error; -!I e: A <:< SB - Cannot prove that LeibnizLiskov.this.A <:< LeibnizLiskov.this.SB. - +leibniz-liskov.scala:14: error: Cannot prove that LeibnizLiskov.this.A <:< LeibnizLiskov.this.SB. implicitly[A <:< SB] ^ leibniz-liskov.scala:18: error: no type parameters for method substituteCo: (ff: F[LeibnizLiskov.this.A]): F[LeibnizLiskov.this.B] exist so that it can be applied to arguments (List[LeibnizLiskov.this.B]) @@ -58,10 +40,7 @@ leibniz-liskov.scala:19: error: type mismatch; required: F[LeibnizLiskov.this.B] aEqB.substituteContra(List(A(), A(), A())) ^ -leibniz-liskov.scala:20: error: implicit error; -!I e: xs.type <:< List[B] - Cannot prove that xs.type <:< List[LeibnizLiskov.this.B]. - +leibniz-liskov.scala:20: error: Cannot prove that xs.type <:< List[LeibnizLiskov.this.B]. locally { val xs = aEqB.flip.liftCo[List](List(B(), B(), B())); implicitly[xs.type <:< List[B]] } ^ leibniz-liskov.scala:21: error: no type parameters for method substituteContra: (ft: F[U]): F[T] exist so that it can be applied to arguments (List[T]) diff --git a/test/files/neg/literate_existentials.check b/test/files/neg/literate_existentials.check index a7b4eeacae2c..73b45c0af204 100644 --- a/test/files/neg/literate_existentials.check +++ b/test/files/neg/literate_existentials.check @@ -1,7 +1,4 @@ -literate_existentials.scala:189: error: implicit error; -!I e: Int <:< M - Cannot prove that Int <:< M forSome { type M <: String }. - +literate_existentials.scala:189: error: Cannot prove that Int <:< M forSome { type M <: String }. implicitly[Int <:< (M forSome { type M >: Nothing <: String })] // fails ^ 1 error diff --git a/test/files/neg/macro-cyclic.check b/test/files/neg/macro-cyclic.check index e39b8a45c044..79dadefb66c0 100644 --- a/test/files/neg/macro-cyclic.check +++ b/test/files/neg/macro-cyclic.check @@ -1,5 +1,4 @@ -Impls_Macros_1.scala:6: error: implicit error; -!I e: SourceLocation +Impls_Macros_1.scala:6: error: could not find implicit value for parameter e: SourceLocation c.universe.reify { implicitly[SourceLocation] } ^ 1 error diff --git a/test/files/neg/macro-divergence-controlled.check b/test/files/neg/macro-divergence-controlled.check index a7cdab37cc99..030a8c40ffc3 100644 --- a/test/files/neg/macro-divergence-controlled.check +++ b/test/files/neg/macro-divergence-controlled.check @@ -1,5 +1,4 @@ -Test_2.scala:2: error: implicit error; -!I e: Complex[Foo] +Test_2.scala:2: error: could not find implicit value for parameter e: Complex[Foo] println(implicitly[Complex[Foo]]) ^ 1 error diff --git a/test/files/neg/macro-reify-typetag-hktypeparams-notags.check b/test/files/neg/macro-reify-typetag-hktypeparams-notags.check index c000a798132e..ce218cdbc28e 100644 --- a/test/files/neg/macro-reify-typetag-hktypeparams-notags.check +++ b/test/files/neg/macro-reify-typetag-hktypeparams-notags.check @@ -1,13 +1,7 @@ -Test.scala:5: error: implicit error; -!I e: TypeTag[C[T]] - No TypeTag available for C[T] - +Test.scala:5: error: No TypeTag available for C[T] println(implicitly[TypeTag[C[T]]]) ^ -Test.scala:6: error: implicit error; -!I e: TypeTag[List[C[T]]] - No TypeTag available for List[C[T]] - +Test.scala:6: error: No TypeTag available for List[C[T]] println(implicitly[TypeTag[List[C[T]]]]) ^ 2 errors diff --git a/test/files/neg/macro-reify-typetag-typeparams-notags.check b/test/files/neg/macro-reify-typetag-typeparams-notags.check index 251622e82e5e..65a08a6d3e7c 100644 --- a/test/files/neg/macro-reify-typetag-typeparams-notags.check +++ b/test/files/neg/macro-reify-typetag-typeparams-notags.check @@ -1,13 +1,7 @@ -Test.scala:5: error: implicit error; -!I e: TypeTag[T] - No TypeTag available for T - +Test.scala:5: error: No TypeTag available for T println(implicitly[TypeTag[T]]) ^ -Test.scala:6: error: implicit error; -!I e: TypeTag[List[T]] - No TypeTag available for List[T] - +Test.scala:6: error: No TypeTag available for List[T] println(implicitly[TypeTag[List[T]]]) ^ 2 errors diff --git a/test/files/neg/macro-reify-typetag-useabstypetag.check b/test/files/neg/macro-reify-typetag-useabstypetag.check index 251622e82e5e..65a08a6d3e7c 100644 --- a/test/files/neg/macro-reify-typetag-useabstypetag.check +++ b/test/files/neg/macro-reify-typetag-useabstypetag.check @@ -1,13 +1,7 @@ -Test.scala:5: error: implicit error; -!I e: TypeTag[T] - No TypeTag available for T - +Test.scala:5: error: No TypeTag available for T println(implicitly[TypeTag[T]]) ^ -Test.scala:6: error: implicit error; -!I e: TypeTag[List[T]] - No TypeTag available for List[T] - +Test.scala:6: error: No TypeTag available for List[T] println(implicitly[TypeTag[List[T]]]) ^ 2 errors diff --git a/test/files/neg/missing-implicit.check b/test/files/neg/missing-implicit.check index 1f4703a20e94..bc043b4b2958 100644 --- a/test/files/neg/missing-implicit.check +++ b/test/files/neg/missing-implicit.check @@ -1,61 +1,31 @@ -missing-implicit.scala:23: error: implicit error; -!I e: TC[String] {type Int = } - (foo) - +missing-implicit.scala:23: error: could not find implicit value for parameter e: TC[String]{type Int} (foo) implicitly[TC[String] { type Int}] ^ -missing-implicit.scala:24: error: implicit error; -!I e: XC[String] - bar - +missing-implicit.scala:24: error: bar implicitly[XC[String]] ^ -missing-implicit.scala:25: error: implicit error; -!I e: U - (nope) - +missing-implicit.scala:25: error: could not find implicit value for parameter e: U (nope) implicitly[U] ^ -missing-implicit.scala:26: error: implicit error; -!I e: V - no way - +missing-implicit.scala:26: error: no way implicitly[V] ^ -missing-implicit.scala:31: error: implicit error; -!I v: V - no way - +missing-implicit.scala:31: error: no way f ^ -missing-implicit.scala:32: error: implicit error; -!I v: V - huh - +missing-implicit.scala:32: error: huh g ^ -missing-implicit.scala:49: error: implicit error; -!I e: F[Int] - No F of Int - +missing-implicit.scala:49: error: No F of Int implicitly[F[Int]] ^ -missing-implicit.scala:50: error: implicit error; -!I e: M[Int] - (No F of Int) - +missing-implicit.scala:50: error: could not find implicit value for parameter e: M[Int] (No F of Int) implicitly[M[Int]] ^ -missing-implicit.scala:51: error: implicit error; -!I e: AX - (No F of String) - +missing-implicit.scala:51: error: could not find implicit value for parameter e: AX (No F of String) implicitly[AX] ^ -missing-implicit.scala:52: error: implicit error; -!I e: X0 - (Missing X3 of Char and Int and String) - +missing-implicit.scala:52: error: could not find implicit value for parameter e: X0 (Missing X3 of Char and Int and String) implicitly[X0] ^ 10 errors diff --git a/test/files/neg/sortedImplicitNotFound.check b/test/files/neg/sortedImplicitNotFound.check index 28102161b260..788c9a022085 100644 --- a/test/files/neg/sortedImplicitNotFound.check +++ b/test/files/neg/sortedImplicitNotFound.check @@ -1,346 +1,80 @@ -sortedImplicitNotFound.scala:10: error: implicit error; -!I ev: Ordering[Object] - No implicit Ordering[Object] found to build a SortedSet[Object]. You may want to upcast to a Set[Int] first by calling `unsorted`. - -Ordering.ordered invalid because -!I asComparable: Object => Comparable[_$2] - No implicit view available from Object => Comparable[_ >: Object]. - -⋮ -Ordering.comparatorToOrdering invalid because -!I cmp: Comparator[Object] +sortedImplicitNotFound.scala:10: error: No implicit Ordering[Object] found to build a SortedSet[Object]. You may want to upcast to a Set[Int] first by calling `unsorted`. ms.map(_ => o) ^ -sortedImplicitNotFound.scala:13: error: implicit error; -!I ev: Ordering[Object] - No implicit Ordering[Object] found to build a SortedSet[Object]. You may want to upcast to a Set[Int] first by calling `unsorted`. - -Ordering.ordered invalid because -!I asComparable: Object => Comparable[_$2] - No implicit view available from Object => Comparable[_ >: Object]. - -⋮ -Ordering.comparatorToOrdering invalid because -!I cmp: Comparator[Object] +sortedImplicitNotFound.scala:13: error: No implicit Ordering[Object] found to build a SortedSet[Object]. You may want to upcast to a Set[Int] first by calling `unsorted`. ms.flatMap(_ => List(o)) ^ -sortedImplicitNotFound.scala:16: error: implicit error; -!I ev: Ordering[(Int,Object)] - No implicit Ordering[Object] found to build a SortedSet[(Int, Object)]. You may want to upcast to a Set[Int] first by calling `unsorted`. - -Ordering.ordered invalid because -!I asComparable: (Int,Object) => Comparable[_$2] - No implicit view available from (Int, Object) => Comparable[_ >: (Int, Object)]. - -⋮ -――Ordering.ordered invalid because - !I asComparable: Object => Comparable[_$2] - No implicit view available from Object => Comparable[_ >: Object]. - -――Ordering.comparatorToOrdering invalid because - !I cmp: Comparator[Object] +sortedImplicitNotFound.scala:16: error: No implicit Ordering[Object] found to build a SortedSet[(Int, Object)]. You may want to upcast to a Set[Int] first by calling `unsorted`. ms.zip(List(o)) ^ -sortedImplicitNotFound.scala:19: error: implicit error; -!I ev: Ordering[Object] - No implicit Ordering[Object] found to build a SortedSet[Object]. You may want to upcast to a Set[Int] first by calling `unsorted`. - -Ordering.ordered invalid because -!I asComparable: Object => Comparable[_$2] - No implicit view available from Object => Comparable[_ >: Object]. - -⋮ -Ordering.comparatorToOrdering invalid because -!I cmp: Comparator[Object] +sortedImplicitNotFound.scala:19: error: No implicit Ordering[Object] found to build a SortedSet[Object]. You may want to upcast to a Set[Int] first by calling `unsorted`. ms.collect{case _ => o} ^ -sortedImplicitNotFound.scala:24: error: implicit error; -!I ev: Ordering[Object] - No implicit Ordering[Object] found to build a SortedSet[Object]. You may want to upcast to a Set[Int] first by calling `unsorted`. - -Ordering.ordered invalid because -!I asComparable: Object => Comparable[_$2] - No implicit view available from Object => Comparable[_ >: Object]. - -⋮ -Ordering.comparatorToOrdering invalid because -!I cmp: Comparator[Object] +sortedImplicitNotFound.scala:24: error: No implicit Ordering[Object] found to build a SortedSet[Object]. You may want to upcast to a Set[Int] first by calling `unsorted`. is.map(_ => o) ^ -sortedImplicitNotFound.scala:27: error: implicit error; -!I ev: Ordering[Object] - No implicit Ordering[Object] found to build a SortedSet[Object]. You may want to upcast to a Set[Int] first by calling `unsorted`. - -Ordering.ordered invalid because -!I asComparable: Object => Comparable[_$2] - No implicit view available from Object => Comparable[_ >: Object]. - -⋮ -Ordering.comparatorToOrdering invalid because -!I cmp: Comparator[Object] +sortedImplicitNotFound.scala:27: error: No implicit Ordering[Object] found to build a SortedSet[Object]. You may want to upcast to a Set[Int] first by calling `unsorted`. is.flatMap(_ => List(o)) ^ -sortedImplicitNotFound.scala:30: error: implicit error; -!I ev: Ordering[(Int,Object)] - No implicit Ordering[Object] found to build a SortedSet[(Int, Object)]. You may want to upcast to a Set[Int] first by calling `unsorted`. - -Ordering.ordered invalid because -!I asComparable: (Int,Object) => Comparable[_$2] - No implicit view available from (Int, Object) => Comparable[_ >: (Int, Object)]. - -⋮ -――Ordering.ordered invalid because - !I asComparable: Object => Comparable[_$2] - No implicit view available from Object => Comparable[_ >: Object]. - -――Ordering.comparatorToOrdering invalid because - !I cmp: Comparator[Object] +sortedImplicitNotFound.scala:30: error: No implicit Ordering[Object] found to build a SortedSet[(Int, Object)]. You may want to upcast to a Set[Int] first by calling `unsorted`. is.zip(List(o)) ^ -sortedImplicitNotFound.scala:33: error: implicit error; -!I ev: Ordering[Object] - No implicit Ordering[Object] found to build a SortedSet[Object]. You may want to upcast to a Set[Int] first by calling `unsorted`. - -Ordering.ordered invalid because -!I asComparable: Object => Comparable[_$2] - No implicit view available from Object => Comparable[_ >: Object]. - -⋮ -Ordering.comparatorToOrdering invalid because -!I cmp: Comparator[Object] +sortedImplicitNotFound.scala:33: error: No implicit Ordering[Object] found to build a SortedSet[Object]. You may want to upcast to a Set[Int] first by calling `unsorted`. is.collect{case _ => o} ^ -sortedImplicitNotFound.scala:39: error: implicit error; -!I ev: Ordering[Object] - No implicit Ordering[Object] found to build a SortedSet[Object]. You may want to upcast to a Set[Int] first by calling `unsorted`. - -Ordering.ordered invalid because -!I asComparable: Object => Comparable[_$2] - No implicit view available from Object => Comparable[_ >: Object]. - -⋮ -Ordering.comparatorToOrdering invalid because -!I cmp: Comparator[Object] +sortedImplicitNotFound.scala:39: error: No implicit Ordering[Object] found to build a SortedSet[Object]. You may want to upcast to a Set[Int] first by calling `unsorted`. mb.map(_ => o) ^ -sortedImplicitNotFound.scala:43: error: implicit error; -!I ev: Ordering[Object] - No implicit Ordering[Object] found to build a SortedSet[Object]. You may want to upcast to a Set[Int] first by calling `unsorted`. - -Ordering.ordered invalid because -!I asComparable: Object => Comparable[_$2] - No implicit view available from Object => Comparable[_ >: Object]. - -⋮ -Ordering.comparatorToOrdering invalid because -!I cmp: Comparator[Object] +sortedImplicitNotFound.scala:43: error: No implicit Ordering[Object] found to build a SortedSet[Object]. You may want to upcast to a Set[Int] first by calling `unsorted`. mb.flatMap(_ => List(o)) ^ -sortedImplicitNotFound.scala:47: error: implicit error; -!I ev: Ordering[(Int,Object)] - No implicit Ordering[Object] found to build a SortedSet[(Int, Object)]. You may want to upcast to a Set[Int] first by calling `unsorted`. - -Ordering.ordered invalid because -!I asComparable: (Int,Object) => Comparable[_$2] - No implicit view available from (Int, Object) => Comparable[_ >: (Int, Object)]. - -⋮ -――Ordering.ordered invalid because - !I asComparable: Object => Comparable[_$2] - No implicit view available from Object => Comparable[_ >: Object]. - -――Ordering.comparatorToOrdering invalid because - !I cmp: Comparator[Object] +sortedImplicitNotFound.scala:47: error: No implicit Ordering[Object] found to build a SortedSet[(Int, Object)]. You may want to upcast to a Set[Int] first by calling `unsorted`. mb.zip(List(o)) ^ -sortedImplicitNotFound.scala:51: error: implicit error; -!I ev: Ordering[Object] - No implicit Ordering[Object] found to build a SortedSet[Object]. You may want to upcast to a Set[Int] first by calling `unsorted`. - -Ordering.ordered invalid because -!I asComparable: Object => Comparable[_$2] - No implicit view available from Object => Comparable[_ >: Object]. - -⋮ -Ordering.comparatorToOrdering invalid because -!I cmp: Comparator[Object] +sortedImplicitNotFound.scala:51: error: No implicit Ordering[Object] found to build a SortedSet[Object]. You may want to upcast to a Set[Int] first by calling `unsorted`. mb.collect{case _ => o} ^ -sortedImplicitNotFound.scala:57: error: implicit error; -!I ev: Ordering[Object] - No implicit Ordering[Object] found to build a SortedSet[Object]. You may want to upcast to a Set[Int] first by calling `unsorted`. - -Ordering.ordered invalid because -!I asComparable: Object => Comparable[_$2] - No implicit view available from Object => Comparable[_ >: Object]. - -⋮ -Ordering.comparatorToOrdering invalid because -!I cmp: Comparator[Object] +sortedImplicitNotFound.scala:57: error: No implicit Ordering[Object] found to build a SortedSet[Object]. You may want to upcast to a Set[Int] first by calling `unsorted`. ib.map(_ => o) ^ -sortedImplicitNotFound.scala:61: error: implicit error; -!I ev: Ordering[Object] - No implicit Ordering[Object] found to build a SortedSet[Object]. You may want to upcast to a Set[Int] first by calling `unsorted`. - -Ordering.ordered invalid because -!I asComparable: Object => Comparable[_$2] - No implicit view available from Object => Comparable[_ >: Object]. - -⋮ -Ordering.comparatorToOrdering invalid because -!I cmp: Comparator[Object] +sortedImplicitNotFound.scala:61: error: No implicit Ordering[Object] found to build a SortedSet[Object]. You may want to upcast to a Set[Int] first by calling `unsorted`. ib.flatMap(_ => List(o)) ^ -sortedImplicitNotFound.scala:65: error: implicit error; -!I ev: Ordering[(Int,Object)] - No implicit Ordering[Object] found to build a SortedSet[(Int, Object)]. You may want to upcast to a Set[Int] first by calling `unsorted`. - -Ordering.ordered invalid because -!I asComparable: (Int,Object) => Comparable[_$2] - No implicit view available from (Int, Object) => Comparable[_ >: (Int, Object)]. - -⋮ -――Ordering.ordered invalid because - !I asComparable: Object => Comparable[_$2] - No implicit view available from Object => Comparable[_ >: Object]. - -――Ordering.comparatorToOrdering invalid because - !I cmp: Comparator[Object] +sortedImplicitNotFound.scala:65: error: No implicit Ordering[Object] found to build a SortedSet[(Int, Object)]. You may want to upcast to a Set[Int] first by calling `unsorted`. ib.zip(List(o)) ^ -sortedImplicitNotFound.scala:69: error: implicit error; -!I ev: Ordering[Object] - No implicit Ordering[Object] found to build a SortedSet[Object]. You may want to upcast to a Set[Int] first by calling `unsorted`. - -Ordering.ordered invalid because -!I asComparable: Object => Comparable[_$2] - No implicit view available from Object => Comparable[_ >: Object]. - -⋮ -Ordering.comparatorToOrdering invalid because -!I cmp: Comparator[Object] +sortedImplicitNotFound.scala:69: error: No implicit Ordering[Object] found to build a SortedSet[Object]. You may want to upcast to a Set[Int] first by calling `unsorted`. ib.collect{case _ => o} ^ -sortedImplicitNotFound.scala:74: error: implicit error; -!I ev: Ordering[Object] - No implicit Ordering[Object] found to build a SortedSet[Object]. You may want to upcast to a Set[Value] first by calling `unsorted`. - -Ordering.ordered invalid because -!I asComparable: Object => Comparable[_$2] - No implicit view available from Object => Comparable[_ >: Object]. - -⋮ -Ordering.comparatorToOrdering invalid because -!I cmp: Comparator[Object] +sortedImplicitNotFound.scala:74: error: No implicit Ordering[Object] found to build a SortedSet[Object]. You may want to upcast to a Set[Value] first by calling `unsorted`. es.map(_ => o) ^ -sortedImplicitNotFound.scala:77: error: implicit error; -!I ev: Ordering[Object] - No implicit Ordering[Object] found to build a SortedSet[Object]. You may want to upcast to a Set[Value] first by calling `unsorted`. - -Ordering.ordered invalid because -!I asComparable: Object => Comparable[_$2] - No implicit view available from Object => Comparable[_ >: Object]. - -⋮ -Ordering.comparatorToOrdering invalid because -!I cmp: Comparator[Object] +sortedImplicitNotFound.scala:77: error: No implicit Ordering[Object] found to build a SortedSet[Object]. You may want to upcast to a Set[Value] first by calling `unsorted`. es.flatMap(_ => List(o)) ^ sortedImplicitNotFound.scala:80: error: diverging implicit expansion for type Ordering[(WeekDay.Value, Object)] starting with method orderingToOrdered in object Ordered es.zip(List(o)) // ah well...: diverging implicit expansion for type Ordering[(WeekDay.Value, Object)] ^ -sortedImplicitNotFound.scala:83: error: implicit error; -!I ev: Ordering[Object] - No implicit Ordering[Object] found to build a SortedSet[Object]. You may want to upcast to a Set[Value] first by calling `unsorted`. - -Ordering.ordered invalid because -!I asComparable: Object => Comparable[_$2] - No implicit view available from Object => Comparable[_ >: Object]. - -⋮ -Ordering.comparatorToOrdering invalid because -!I cmp: Comparator[Object] +sortedImplicitNotFound.scala:83: error: No implicit Ordering[Object] found to build a SortedSet[Object]. You may want to upcast to a Set[Value] first by calling `unsorted`. es.collect{case _ => o} ^ -sortedImplicitNotFound.scala:88: error: implicit error; -!I ordering: Ordering[Object] - No implicit Ordering[Object] found to build a SortedMap[Object, Object]. You may want to upcast to a Map[Int, Object] first by calling `unsorted`. - -Ordering.ordered invalid because -!I asComparable: Object => Comparable[_$2] - No implicit view available from Object => Comparable[_ >: Object]. - -⋮ -Ordering.comparatorToOrdering invalid because -!I cmp: Comparator[Object] +sortedImplicitNotFound.scala:88: error: No implicit Ordering[Object] found to build a SortedMap[Object, Object]. You may want to upcast to a Map[Int, Object] first by calling `unsorted`. mm.map(_ => (o, o)) ^ -sortedImplicitNotFound.scala:91: error: implicit error; -!I ordering: Ordering[Object] - No implicit Ordering[Object] found to build a SortedMap[Object, Object]. You may want to upcast to a Map[Int, Object] first by calling `unsorted`. - -Ordering.ordered invalid because -!I asComparable: Object => Comparable[_$2] - No implicit view available from Object => Comparable[_ >: Object]. - -⋮ -Ordering.comparatorToOrdering invalid because -!I cmp: Comparator[Object] +sortedImplicitNotFound.scala:91: error: No implicit Ordering[Object] found to build a SortedMap[Object, Object]. You may want to upcast to a Map[Int, Object] first by calling `unsorted`. mm.flatMap(_ => List((o, o))) ^ -sortedImplicitNotFound.scala:94: error: implicit error; -!I ordering: Ordering[Object] - No implicit Ordering[Object] found to build a SortedMap[Object, Object]. You may want to upcast to a Map[Int, Object] first by calling `unsorted`. - -Ordering.ordered invalid because -!I asComparable: Object => Comparable[_$2] - No implicit view available from Object => Comparable[_ >: Object]. - -⋮ -Ordering.comparatorToOrdering invalid because -!I cmp: Comparator[Object] +sortedImplicitNotFound.scala:94: error: No implicit Ordering[Object] found to build a SortedMap[Object, Object]. You may want to upcast to a Map[Int, Object] first by calling `unsorted`. mm.collect{case _ => (o, o)} ^ -sortedImplicitNotFound.scala:99: error: implicit error; -!I ordering: Ordering[Object] - No implicit Ordering[Object] found to build a SortedMap[Object, Object]. You may want to upcast to a Map[Int, Object] first by calling `unsorted`. - -Ordering.ordered invalid because -!I asComparable: Object => Comparable[_$2] - No implicit view available from Object => Comparable[_ >: Object]. - -⋮ -Ordering.comparatorToOrdering invalid because -!I cmp: Comparator[Object] +sortedImplicitNotFound.scala:99: error: No implicit Ordering[Object] found to build a SortedMap[Object, Object]. You may want to upcast to a Map[Int, Object] first by calling `unsorted`. im.map(_ => (o, o)) ^ -sortedImplicitNotFound.scala:102: error: implicit error; -!I ordering: Ordering[Object] - No implicit Ordering[Object] found to build a SortedMap[Object, Object]. You may want to upcast to a Map[Int, Object] first by calling `unsorted`. - -Ordering.ordered invalid because -!I asComparable: Object => Comparable[_$2] - No implicit view available from Object => Comparable[_ >: Object]. - -⋮ -Ordering.comparatorToOrdering invalid because -!I cmp: Comparator[Object] +sortedImplicitNotFound.scala:102: error: No implicit Ordering[Object] found to build a SortedMap[Object, Object]. You may want to upcast to a Map[Int, Object] first by calling `unsorted`. im.flatMap(_ => List((o, o))) ^ -sortedImplicitNotFound.scala:105: error: implicit error; -!I ordering: Ordering[Object] - No implicit Ordering[Object] found to build a SortedMap[Object, Object]. You may want to upcast to a Map[Int, Object] first by calling `unsorted`. - -Ordering.ordered invalid because -!I asComparable: Object => Comparable[_$2] - No implicit view available from Object => Comparable[_ >: Object]. - -⋮ -Ordering.comparatorToOrdering invalid because -!I cmp: Comparator[Object] +sortedImplicitNotFound.scala:105: error: No implicit Ordering[Object] found to build a SortedMap[Object, Object]. You may want to upcast to a Map[Int, Object] first by calling `unsorted`. im.collect{case _ => (o, o)} ^ 26 errors diff --git a/test/files/neg/t0226.check b/test/files/neg/t0226.check index 860b5a70bdf8..7c7391c8452a 100644 --- a/test/files/neg/t0226.check +++ b/test/files/neg/t0226.check @@ -4,8 +4,7 @@ t0226.scala:5: error: not found: type A1 t0226.scala:5: error: not found: type A1 (implicit _1: Foo[List[A1]], _2: Foo[A2]): Foo[Tuple2[List[A1], A2]] = ^ -t0226.scala:8: error: implicit error; -!I rep: Foo[((List[Char],Int),(Nil.type,Int))] +t0226.scala:8: error: could not find implicit value for parameter rep: Test.this.Foo[((List[Char], Int), (collection.immutable.Nil.type, Int))] foo(((List('b'), 3), (Nil, 4))) ^ 3 errors diff --git a/test/files/neg/t10066.check b/test/files/neg/t10066.check index 74c6fd3eb8b2..438965fc6c0e 100644 --- a/test/files/neg/t10066.check +++ b/test/files/neg/t10066.check @@ -1,9 +1,7 @@ -t10066.scala:33: error: implicit error; -!I extractor: Extractor[String] +t10066.scala:33: error: could not find implicit value for parameter extractor: dynamicrash.Extractor[String] println(storage.foo[String]) ^ -t10066.scala:37: error: implicit error; -!I extractor: Extractor[A] +t10066.scala:37: error: could not find implicit value for parameter extractor: dynamicrash.Extractor[A] println(storage.foo) ^ 2 errors diff --git a/test/files/neg/t10156.check b/test/files/neg/t10156.check index 0ab1e9f7ee0e..e0c7e828aa8c 100644 --- a/test/files/neg/t10156.check +++ b/test/files/neg/t10156.check @@ -1,5 +1,4 @@ -t10156.scala:4: error: implicit error; -!I a: A +t10156.scala:4: error: could not find implicit value for parameter a: t10156.A val z = x _ ^ 1 error diff --git a/test/files/neg/t10279.check b/test/files/neg/t10279.check index a9ea7f2840b5..a399a2b15041 100644 --- a/test/files/neg/t10279.check +++ b/test/files/neg/t10279.check @@ -1,12 +1,10 @@ -t10279.scala:5: error: implicit error; -!I s: String +t10279.scala:5: error: could not find implicit value for parameter s: String val t1 = foo(1) _ // error: no implicit string ^ t10279.scala:6: error: _ must follow method; cannot follow String val t2 = foo(1)("") _ // error: _ must follow method ^ -t10279.scala:7: error: implicit error; -!I s: String +t10279.scala:7: error: could not find implicit value for parameter s: String val t3 = foo _ // error: no implicit string ^ t10279.scala:14: error: type mismatch; @@ -14,8 +12,7 @@ t10279.scala:14: error: type mismatch; required: ? => ? val t6 = { implicit val i = 0; bar(0) _ } // error: type mismatch, found Int, required: ? => ? ^ -t10279.scala:17: error: implicit error; -!I x: Int +t10279.scala:17: error: could not find implicit value for parameter x: Int val barSimple = fooSimple _ // error: no implicit int ^ 5 errors diff --git a/test/files/neg/t11591.check b/test/files/neg/t11591.check index 88cbe410559f..4d110a4c3ab3 100644 --- a/test/files/neg/t11591.check +++ b/test/files/neg/t11591.check @@ -1,7 +1,4 @@ -t11591.scala:8: error: implicit error; -!I e: A -――Test.mkB invalid because - !I i: Int +t11591.scala:8: error: could not find implicit value for parameter e: Test.A implicitly[A] ^ 1 error diff --git a/test/files/neg/t11643.check b/test/files/neg/t11643.check index 5b23dc3df2a5..9db82b3af825 100644 --- a/test/files/neg/t11643.check +++ b/test/files/neg/t11643.check @@ -1,9 +1,7 @@ -t11643.scala:6: error: implicit error; -!I i: Int +t11643.scala:6: error: could not find implicit value for parameter i: Int def g(j: Int) = j + f ^ -t11643.scala:7: error: implicit error; -!I i: Int +t11643.scala:7: error: could not find implicit value for parameter i: Int def k(j: Int) = { val x = j + f ; 42 } ^ 2 errors diff --git a/test/files/neg/t11823.check b/test/files/neg/t11823.check index 16f8734ab657..de9c19058768 100644 --- a/test/files/neg/t11823.check +++ b/test/files/neg/t11823.check @@ -1,9 +1,7 @@ -t11823.scala:7: error: implicit error; -!I e: Foo[String] +t11823.scala:7: error: could not find implicit value for parameter e: Test.Foo[String] val fooString: Foo[String] = implicitly ^ -t11823.scala:8: error: implicit error; -!I foo: Foo[String] +t11823.scala:8: error: could not find implicit value for parameter foo: Test.Foo[String] val barString: Bar[String] = bar ^ 2 errors diff --git a/test/files/neg/t2405.check b/test/files/neg/t2405.check index da589b404adf..c944aafcba12 100644 --- a/test/files/neg/t2405.check +++ b/test/files/neg/t2405.check @@ -1,5 +1,4 @@ -t2405.scala:8: error: implicit error; -!I e: Int +t2405.scala:8: error: could not find implicit value for parameter e: Int implicitly[Int] ^ t2405.scala:6: warning: imported `y` is permanently hidden by definition of method y diff --git a/test/files/neg/t2421b.check b/test/files/neg/t2421b.check index eadb444b2d44..7c714f1c9bd7 100644 --- a/test/files/neg/t2421b.check +++ b/test/files/neg/t2421b.check @@ -1,5 +1,4 @@ -t2421b.scala:12: error: implicit error; -!I aa: F[A] +t2421b.scala:12: error: could not find implicit value for parameter aa: Test.F[Test.A] f ^ 1 error diff --git a/test/files/neg/t2462a.check b/test/files/neg/t2462a.check index 2b3819045038..671acdc29346 100644 --- a/test/files/neg/t2462a.check +++ b/test/files/neg/t2462a.check @@ -1,7 +1,4 @@ -t2462a.scala:6: error: implicit error; -!I bf: BuildFrom[List[Int], Int, List[String]] - Cannot construct a collection of type List[String] with elements of type Int based on a collection of type List[Int]. - +t2462a.scala:6: error: Cannot construct a collection of type List[String] with elements of type Int based on a collection of type List[Int]. def foo(l: Lst[Int]) = l.map[Int, List[String]](x => 1) ^ 1 error diff --git a/test/files/neg/t2462c.check b/test/files/neg/t2462c.check index 594967b8904d..3b425b41730d 100644 --- a/test/files/neg/t2462c.check +++ b/test/files/neg/t2462c.check @@ -1,31 +1,16 @@ -t2462c.scala:26: error: implicit error; -!I evidence$1: C[X$Y] - No C of X$Y - +t2462c.scala:26: error: No C of X$Y f[X$Y] ^ -t2462c.scala:32: error: implicit error; -!I evidence$1: C[Foo[Int]] - No C of Foo[Int] - +t2462c.scala:32: error: No C of Foo[Int] f[Foo[Int]] ^ -t2462c.scala:35: error: implicit error; -!I theC: C[Foo[Int]] - No C of Foo[Int] - +t2462c.scala:35: error: No C of Foo[Int] g[Foo[Int]] ^ -t2462c.scala:38: error: implicit error; -!I theC: C[Foo[Int]] - I see no C[Foo[Int]] - +t2462c.scala:38: error: I see no C[Foo[Int]] h[Foo[Int]] ^ -t2462c.scala:42: error: implicit error; -!I i: Int - String List [?T0, ZZ] -> List[C[_]] Int Option[Long] -- . - +t2462c.scala:42: error: String List [?T0, ZZ] -> List[C[_]] Int Option[Long] -- . i.m[Option[Long]] ^ 5 errors diff --git a/test/files/neg/t3346b.check b/test/files/neg/t3346b.check index 28457b516ed8..cf740736a799 100644 --- a/test/files/neg/t3346b.check +++ b/test/files/neg/t3346b.check @@ -1,5 +1,4 @@ -t3346b.scala:14: error: implicit error; -!I evidence$1: TC[Any] +t3346b.scala:14: error: could not find implicit value for evidence parameter of type TC[Any] val y = foo(1) ^ 1 error diff --git a/test/files/neg/t3399.check b/test/files/neg/t3399.check index d037c16ea84e..112574b3ffe3 100644 --- a/test/files/neg/t3399.check +++ b/test/files/neg/t3399.check @@ -1,7 +1,4 @@ -t3399.scala:23: error: implicit error; -!I e: Succ[Succ[_0]] =:= Succ[_0] - Cannot prove that Nats.Add[Nats._1,Nats._1] =:= Nats._1. - +t3399.scala:23: error: Cannot prove that Nats.Add[Nats._1,Nats._1] =:= Nats._1. implicitly[ Add[_1, _1] =:= _1] ^ 1 error diff --git a/test/files/neg/t3507-old.check b/test/files/neg/t3507-old.check index 1c88543129cf..d50ebfd9c984 100644 --- a/test/files/neg/t3507-old.check +++ b/test/files/neg/t3507-old.check @@ -1,7 +1,4 @@ -t3507-old.scala:13: error: implicit error; -!I evidence$1: Manifest[c.type] - No Manifest available for _1.b.c.type. - +t3507-old.scala:13: error: No Manifest available for _1.b.c.type. mani/*[object _1.b.c]*/(c) // kaboom in manifestOfType / TreeGen.mkAttributedQualifier ^ 1 error diff --git a/test/files/neg/t3977.check b/test/files/neg/t3977.check index 47aff47756b1..78249b09000f 100644 --- a/test/files/neg/t3977.check +++ b/test/files/neg/t3977.check @@ -1,5 +1,4 @@ -t3977.scala:12: error: implicit error; -!I w: E +t3977.scala:12: error: could not find implicit value for parameter w: False#If[E] new NoNull ^ 1 error diff --git a/test/files/neg/t4079.check b/test/files/neg/t4079.check index 721b5487e902..286151d1154e 100644 --- a/test/files/neg/t4079.check +++ b/test/files/neg/t4079.check @@ -1,5 +1,4 @@ -t4079_2.scala:2: error: implicit error; -!I f: Functor[List[?]] +t4079_2.scala:2: error: could not find implicit value for parameter f: Functor[List] Cat.compose[List,Option].Functor ^ 1 error diff --git a/test/files/neg/t4270.check b/test/files/neg/t4270.check index add4696c201b..af56ada4fa57 100644 --- a/test/files/neg/t4270.check +++ b/test/files/neg/t4270.check @@ -1,5 +1,4 @@ -t4270.scala:5: error: implicit error; -!I e: Int +t4270.scala:5: error: could not find implicit value for parameter e: Int implicitly[Int] ^ 1 error diff --git a/test/files/neg/t4889.check b/test/files/neg/t4889.check index af65bfe69971..96e9b7528e67 100644 --- a/test/files/neg/t4889.check +++ b/test/files/neg/t4889.check @@ -1,5 +1,4 @@ -t4889.scala:19: error: implicit error; -!I ma1: MatrixAdder[Int, SparseMatrix[?]] +t4889.scala:19: error: could not find implicit value for parameter ma1: t4889.MatrixAdder[Int,[S]t4889.SparseMatrix[S]] m1.foo ^ 1 error diff --git a/test/files/neg/t550.check b/test/files/neg/t550.check index f6409def976a..e09b9cab03f7 100644 --- a/test/files/neg/t550.check +++ b/test/files/neg/t550.check @@ -1,8 +1,7 @@ t550.scala:6: error: type List takes type parameters def sum[a](xs: List)(implicit m: Monoid[a]): a = ^ -t550.scala:8: error: implicit error; -!I m: Monoid[a] +t550.scala:8: error: could not find implicit value for parameter m: Monoid[a] sum(List(1,2,3)) ^ 2 errors diff --git a/test/files/neg/t5553_2.check b/test/files/neg/t5553_2.check index dff0e5b34a7e..b26c7f634f70 100644 --- a/test/files/neg/t5553_2.check +++ b/test/files/neg/t5553_2.check @@ -23,20 +23,16 @@ t5553_2.scala:41: error: type mismatch; required: Base[T] def test10[T]: Base[T] = Foo3[T] ^ -t5553_2.scala:47: error: implicit error; -!I z: String +t5553_2.scala:47: error: could not find implicit value for parameter z: String def test13[T]: Int = Foo3[T] ^ -t5553_2.scala:48: error: implicit error; -!I z: String +t5553_2.scala:48: error: could not find implicit value for parameter z: String def test14[T]: Base[T] = Foo3[T] ^ -t5553_2.scala:49: error: implicit error; -!I z: String +t5553_2.scala:49: error: could not find implicit value for parameter z: String def test15[T]: String = Foo3[T] ^ -t5553_2.scala:50: error: implicit error; -!I z: String +t5553_2.scala:50: error: could not find implicit value for parameter z: String def test16[T] = Foo3[T] ^ t5553_2.scala:54: error: ambiguous reference to overloaded definition, diff --git a/test/files/neg/t5801.check b/test/files/neg/t5801.check index 2a18a4f4c419..7f6cb4cfe6c3 100644 --- a/test/files/neg/t5801.check +++ b/test/files/neg/t5801.check @@ -8,8 +8,7 @@ t5801.scala:4: error: not found: value sth t5801.scala:7: error: not found: value sth def bar(x: Int)(implicit y: Int): sth.Sth = null ^ -t5801.scala:8: error: implicit error; -!I y: Int +t5801.scala:8: error: could not find implicit value for parameter y: Int bar(1) ^ t5801.scala:10: error: not found: value sth @@ -18,8 +17,7 @@ t5801.scala:10: error: not found: value sth t5801.scala:13: error: not found: value sth def meh2(x: Int)(implicit b: Int, a: sth.Sth): Unit = {} ^ -t5801.scala:14: error: implicit error; -!I b: Int +t5801.scala:14: error: could not find implicit value for parameter b: Int meh2(1) ^ 7 errors diff --git a/test/files/neg/t5803.check b/test/files/neg/t5803.check index 3481422c9a8f..54d348450455 100644 --- a/test/files/neg/t5803.check +++ b/test/files/neg/t5803.check @@ -1,5 +1,4 @@ -t5803.scala:3: error: implicit error; -!I ev: Nothing +t5803.scala:3: error: could not find implicit value for parameter ev: Nothing new Foo(): String ^ 1 error diff --git a/test/files/neg/t6528.check b/test/files/neg/t6528.check index 92699ca9dede..1c55fe568e98 100644 --- a/test/files/neg/t6528.check +++ b/test/files/neg/t6528.check @@ -1,5 +1,4 @@ -t6528.scala:6: error: implicit error; -!I e: CoSet[U, Any] +t6528.scala:6: error: could not find implicit value for parameter e: CoSet[U,Any] implicitly[CoSet[U, Any]] ^ 1 error diff --git a/test/files/neg/t7289.check b/test/files/neg/t7289.check index 08c114b20481..05dad641b93c 100644 --- a/test/files/neg/t7289.check +++ b/test/files/neg/t7289.check @@ -1,7 +1,4 @@ -t7289.scala:8: error: implicit error; -!I e: Schtroumpf[Nil.type] -Test.schtroumpf invalid because -!I minorSchtroumpf: Schtroumpf[T] +t7289.scala:8: error: could not find implicit value for parameter e: Test.Schtroumpf[Nil.type] implicitly[Schtroumpf[Nil.type]] ^ 1 error diff --git a/test/files/neg/t7289_status_quo.check b/test/files/neg/t7289_status_quo.check index bfc5a1b3b4f0..ca3c0124f001 100644 --- a/test/files/neg/t7289_status_quo.check +++ b/test/files/neg/t7289_status_quo.check @@ -1,13 +1,7 @@ -t7289_status_quo.scala:9: error: implicit error; -!I e: Ext[List[Int]] -Test1.f invalid because -!I xi: Ext[A] +t7289_status_quo.scala:9: error: could not find implicit value for parameter e: Test1.Ext[List[Int]] implicitly[Ext[List[Int]]] // fails - not found ^ -t7289_status_quo.scala:11: error: implicit error; -!I e: Ext[List[List[List[Int]]]] -Test1.f invalid because -!I xi: Ext[A] +t7289_status_quo.scala:11: error: could not find implicit value for parameter e: Test1.Ext[List[List[List[Int]]]] implicitly[Ext[List[List[List[Int]]]]] // fails - not found ^ t7289_status_quo.scala:15: error: ambiguous implicit values: @@ -16,16 +10,13 @@ t7289_status_quo.scala:15: error: ambiguous implicit values: match expected type Test1.Ext[_ <: List[List[Int]]] implicitly[Ext[_ <: List[List[Int]]]] // fails - ambiguous ^ -t7289_status_quo.scala:20: error: implicit error; -!I e: ExtCov[List[Int]] +t7289_status_quo.scala:20: error: could not find implicit value for parameter e: Test1.ExtCov[List[Int]] implicitly[ExtCov[List[Int]]] // fails - not found ^ -t7289_status_quo.scala:21: error: implicit error; -!I e: ExtCov[List[List[Int]]] +t7289_status_quo.scala:21: error: could not find implicit value for parameter e: Test1.ExtCov[List[List[Int]]] implicitly[ExtCov[List[List[Int]]]] // fails - not found ^ -t7289_status_quo.scala:22: error: implicit error; -!I e: ExtCov[List[List[List[Int]]]] +t7289_status_quo.scala:22: error: could not find implicit value for parameter e: Test1.ExtCov[List[List[List[Int]]]] implicitly[ExtCov[List[List[List[Int]]]]] // fails - not found ^ 6 errors diff --git a/test/files/neg/t7509.check b/test/files/neg/t7509.check index d5ad4222690c..03ec8ef282fa 100644 --- a/test/files/neg/t7509.check +++ b/test/files/neg/t7509.check @@ -6,8 +6,7 @@ t7509.scala:3: error: type mismatch; required: R crash(42) ^ -t7509.scala:3: error: implicit error; -!I ev: R +t7509.scala:3: error: could not find implicit value for parameter ev: R crash(42) ^ 3 errors diff --git a/test/files/neg/t7686.check b/test/files/neg/t7686.check index ec52b9695a68..2374f42bb7ec 100644 --- a/test/files/neg/t7686.check +++ b/test/files/neg/t7686.check @@ -1,19 +1,10 @@ -t7686.scala:10: error: implicit error; -!I tt: TypeTag[In[_$1]] - No TypeTag available for Test.In[_] - +t7686.scala:10: error: No TypeTag available for Test.In[_] t1[In]; t2[In]; t3[In]; t4[In] ^ -t7686.scala:11: error: implicit error; -!I tt: TypeTag[Co[_$1]] - No TypeTag available for Test.Co[_] - +t7686.scala:11: error: No TypeTag available for Test.Co[_] t1[Co]; t2[Co]; t3[Co]; t4[Co] ^ -t7686.scala:12: error: implicit error; -!I tt: TypeTag[Cn[_$1]] - No TypeTag available for Test.Cn[_] - +t7686.scala:12: error: No TypeTag available for Test.Cn[_] t1[Cn]; t2[Cn]; t3[Cn]; t4[Cn] ^ 3 errors diff --git a/test/files/neg/t8104.check b/test/files/neg/t8104.check index f409bca11ec8..b781d95393dd 100644 --- a/test/files/neg/t8104.check +++ b/test/files/neg/t8104.check @@ -1,5 +1,4 @@ -Test_2.scala:20: error: implicit error; -!I e: Generic.Aux[C, (Int,Int)] +Test_2.scala:20: error: could not find implicit value for parameter e: Generic.Aux[Test.C,(Int, Int)] implicitly[Generic.Aux[C, (Int, Int)]] ^ 1 error diff --git a/test/files/neg/t8291.check b/test/files/neg/t8291.check index abd7fd042b7d..4667f890e640 100644 --- a/test/files/neg/t8291.check +++ b/test/files/neg/t8291.check @@ -1,13 +1,7 @@ -t8291.scala:5: error: implicit error; -!I e: X[Int, String] - Could not find implicit for Int or String - +t8291.scala:5: error: Could not find implicit for Int or String implicitly[X[Int, String]] ^ -t8291.scala:6: error: implicit error; -!I e: X[Int, String] - Could not find implicit for Int or String - +t8291.scala:6: error: Could not find implicit for Int or String implicitly[Z[String]] ^ 2 errors diff --git a/test/files/neg/t8372.check b/test/files/neg/t8372.check index 033f91f4e760..b89cf288c52d 100644 --- a/test/files/neg/t8372.check +++ b/test/files/neg/t8372.check @@ -1,13 +1,7 @@ -t8372.scala:7: error: implicit error; -!I ct1: ClassTag[A1] - No ClassTag available for A1 - +t8372.scala:7: error: No ClassTag available for A1 def unzip[T1, T2](a: Array[(T1, T2)]) = a.unzip ^ -t8372.scala:9: error: implicit error; -!I ct1: ClassTag[T1] - No ClassTag available for T1 - +t8372.scala:9: error: No ClassTag available for T1 def unzip3[T1, T2, T3](a: Array[(T1, T2, T3)]): (Array[T1], Array[T2], Array[T3]) = a.unzip3 ^ 2 errors diff --git a/test/files/neg/t9041.check b/test/files/neg/t9041.check index adee8b97e1f8..172d3a350cf5 100644 --- a/test/files/neg/t9041.check +++ b/test/files/neg/t9041.check @@ -1,5 +1,4 @@ -t9041.scala:11: error: implicit error; -!I cellSetter: CellSetter[BigDecimal] +t9041.scala:11: error: could not find implicit value for parameter cellSetter: CellSetter[scala.math.BigDecimal] def setCell(cell: Cell, data: math.BigDecimal): Unit = { cell.setCellValue(data) } ^ 1 error diff --git a/test/files/neg/t9717.check b/test/files/neg/t9717.check index b08553b2dbda..29ea674e98a2 100644 --- a/test/files/neg/t9717.check +++ b/test/files/neg/t9717.check @@ -4,15 +4,13 @@ t9717.scala:2: error: ambiguous implicit values: match expected type Int class B(implicit F: Int) extends A({ implicit val v: Int = 1; implicitly[Int] }) // ambiguous ^ -t9717.scala:6: error: implicit error; -!I e: Int +t9717.scala:6: error: could not find implicit value for parameter e: Int def this() = this(implicitly[Int]) // neg ^ t9717.scala:7: error: not found: value f def this(s: String) = this(f) // neg (`this` is not in scope!) ^ -t9717.scala:12: error: implicit error; -!I e: Int +t9717.scala:12: error: could not find implicit value for parameter e: Int def this() = { this(implicitly[Int]) } // not in scope (spec 5.3.1, scope which is in effect at the point of the enclosing class definition) ^ 4 errors diff --git a/test/files/neg/t9960.check b/test/files/neg/t9960.check index 90afd4a386e3..085665971bc4 100644 --- a/test/files/neg/t9960.check +++ b/test/files/neg/t9960.check @@ -1,12 +1,4 @@ -t9960.scala:27: error: implicit error; -!I m: - NNN.Aux[ - Reader - , - FxAppend[Fx1[Task], Fx2[Validate, Reader]] - , - Fx2[Task, Validate] - ] +t9960.scala:27: error: could not find implicit value for parameter m: NNN.Aux[NNN.Reader,NNN.FxAppend[NNN.Fx1[NNN.Task],NNN.Fx2[NNN.Validate,NNN.Reader]],NNN.Fx2[NNN.Task,NNN.Validate]] val hhhh: Eff[Fx2[Task, Validate], Unit] = runReader(gggg) ^ 1 error diff --git a/test/files/run/typetags_without_scala_reflect_typetag_lookup.check b/test/files/run/typetags_without_scala_reflect_typetag_lookup.check index 1f5806f88736..892784d1cf66 100644 --- a/test/files/run/typetags_without_scala_reflect_typetag_lookup.check +++ b/test/files/run/typetags_without_scala_reflect_typetag_lookup.check @@ -1,3 +1,2 @@ -pos: RangePosition(newSource1.scala, 455, 466, 471) implicit error; -!I evidence$1: TypeTag[Int] ERROR +pos: RangePosition(newSource1.scala, 455, 466, 471) could not find implicit value for evidence parameter of type reflect.runtime.package.universe.TypeTag[Int] ERROR diff --git a/test/files/run/typetags_without_scala_reflect_typetag_manifest_interop.check b/test/files/run/typetags_without_scala_reflect_typetag_manifest_interop.check index 5a9a160570f9..d510c578afc6 100644 --- a/test/files/run/typetags_without_scala_reflect_typetag_manifest_interop.check +++ b/test/files/run/typetags_without_scala_reflect_typetag_manifest_interop.check @@ -1,5 +1,2 @@ -pos: RangePosition(newSource1.scala, 471, 479, 482) implicit error; -!I m: Manifest[T] - No Manifest available for App.this.T. - ERROR +pos: RangePosition(newSource1.scala, 471, 479, 482) No Manifest available for App.this.T. ERROR From 1b27f0904c91c88b38ccdff6a71d4aaf8af2c374 Mon Sep 17 00:00:00 2001 From: Seth Tisue Date: Sun, 18 Apr 2021 16:37:44 -0700 Subject: [PATCH 089/769] remove references to Bintray and JCenter --- scripts/common | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/scripts/common b/scripts/common index 2584d10574ef..d5f3f715b496 100644 --- a/scripts/common +++ b/scripts/common @@ -18,7 +18,6 @@ integrationRepoUrl=${integrationRepoUrl-"https://scala-ci.typesafe.com/artifacto # only used on jenkins sbtRepositoryConfig="$WORKSPACE/scripts/sbt-repositories-config" -jcenterCacheUrl=${jcenterCacheUrl-"https://scala-ci.typesafe.com/artifactory/jcenter/"} # used by `checkAvailability` TMP_ROOT_DIR=$(mktemp -d -t pr-scala.XXXX) @@ -92,11 +91,9 @@ function generateRepositoriesConfig() { fi cat >> "$sbtRepositoryConfig" << EOF - jcenter-cache: $jcenterCacheUrl local maven-central - typesafe-ivy-releases-boot: https://repo.lightbend.com/typesafe/ivy-releases/, [organisation]/[module]/[revision]/[type]s/[artifact](-[classifier]).[ext], bootOnly - typesafe-ivy-releases: https://dl.bintray.com/typesafe/ivy-releases/, [organisation]/[module]/(scala_[scalaVersion]/)(sbt_[sbtVersion]/)[revision]/[type]s/[artifact](-[classifier]).[ext] + typesafe-ivy-releases: https://repo.lightbend.com/typesafe/ivy-releases/, [organisation]/[module]/[revision]/[type]s/[artifact](-[classifier]).[ext] sbt-plugin-releases: https://repo.scala-sbt.org/scalasbt/sbt-plugin-releases/, [organisation]/[module]/(scala_[scalaVersion]/)(sbt_[sbtVersion]/)[revision]/[type]s/[artifact](-[classifier]).[ext] EOF } From c719da700c5728e4cb5efb1e4904f34c74ee00c7 Mon Sep 17 00:00:00 2001 From: Guillaume Martres Date: Mon, 19 Apr 2021 17:21:17 +0200 Subject: [PATCH 090/769] Support Scala 3 wildcard and renaming imports under -Xsource:3 Instead of: import foo._ One can now write: import foo.* and instead of: import foo.{bar => baz} One can now write: import foo.{bar as baz} As well as: import foo.bar as baz This will let us deprecate the old syntax in a future release of Scala 3 (it's currently only deprecated under `-source future`). See http://dotty.epfl.ch/docs/reference/changed-features/imports.html for details but note that unlike Scala 3 this commit does not implement support for: import java as j As that would require deeper changes in the compiler. --- .../scala/tools/nsc/ast/parser/Parsers.scala | 45 ++++++++++++------- .../scala/reflect/internal/StdNames.scala | 3 ++ test/files/neg/import-future.check | 4 ++ test/files/neg/import-future.scala | 27 +++++++++++ test/files/pos/import-future.scala | 25 +++++++++++ 5 files changed, 87 insertions(+), 17 deletions(-) create mode 100644 test/files/neg/import-future.check create mode 100644 test/files/neg/import-future.scala create mode 100644 test/files/pos/import-future.scala diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala index e84248e4663b..f0356c7b00eb 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala @@ -2564,19 +2564,27 @@ self => def loop(expr: Tree): Tree = { expr setPos expr.pos.makeTransparent val selectors: List[ImportSelector] = in.token match { - case USCORE => List(wildImportSelector()) // import foo.bar._; - case LBRACE => importSelectors() // import foo.bar.{ x, y, z } - case _ => - val nameOffset = in.offset - val name = ident() - if (in.token == DOT) { - // import foo.bar.ident. and so create a select node and recurse. - val t = atPos(start, if (name == nme.ERROR) in.offset else nameOffset)(Select(expr, name)) - in.nextToken() - return loop(t) + case USCORE => + List(wildImportSelector()) // import foo.bar._ + case IDENTIFIER if currentRun.isScala3 && in.name == raw.STAR => + List(wildImportSelector()) // import foo.bar.* + case LBRACE => + importSelectors() // import foo.bar.{ x, y, z } + case _ => + if (settings.isScala3 && lookingAhead { isRawIdent && in.name == nme.as }) + List(importSelector()) // import foo.bar as baz + else { + val nameOffset = in.offset + val name = ident() + if (in.token == DOT) { + // import foo.bar.ident. and so create a select node and recurse. + val t = atPos(start, if (name == nme.ERROR) in.offset else nameOffset)(Select(expr, name)) + in.nextToken() + return loop(t) + } + // import foo.bar.Baz; + else List(makeImportSelector(name, nameOffset)) } - // import foo.bar.Baz; - else List(makeImportSelector(name, nameOffset)) } // reaching here means we're done walking. atPos(start)(Import(expr, selectors)) @@ -2619,17 +2627,20 @@ self => val bbq = in.token == BACKQUOTED_IDENT val name = wildcardOrIdent() var renameOffset = -1 - val rename = in.token match { - case ARROW => + + val rename = + if (in.token == ARROW || (settings.isScala3 && isRawIdent && in.name == nme.as)) { in.nextToken() renameOffset = in.offset if (name == nme.WILDCARD && !bbq) syntaxError(renameOffset, "Wildcard import cannot be renamed") wildcardOrIdent() - case _ if name == nme.WILDCARD && !bbq => null - case _ => + } + else if (name == nme.WILDCARD && !bbq) null + else { renameOffset = start name - } + } + ImportSelector(name, start, rename, renameOffset) } diff --git a/src/reflect/scala/reflect/internal/StdNames.scala b/src/reflect/scala/reflect/internal/StdNames.scala index 1906a2f3028f..fc8581847966 100644 --- a/src/reflect/scala/reflect/internal/StdNames.scala +++ b/src/reflect/scala/reflect/internal/StdNames.scala @@ -658,6 +658,9 @@ trait StdNames { val long2Long: NameType = nameType("long2Long") val boolean2Boolean: NameType = nameType("boolean2Boolean") + // Scala 3 import syntax + val as: NameType = nameType("as") + // Compiler utilized names val AnnotatedType: NameType = nameType("AnnotatedType") diff --git a/test/files/neg/import-future.check b/test/files/neg/import-future.check new file mode 100644 index 000000000000..000601f45b7d --- /dev/null +++ b/test/files/neg/import-future.check @@ -0,0 +1,4 @@ +import-future.scala:15: error: not found: value unrelated + unrelated(1) // error + ^ +1 error diff --git a/test/files/neg/import-future.scala b/test/files/neg/import-future.scala new file mode 100644 index 000000000000..288fd3d0e240 --- /dev/null +++ b/test/files/neg/import-future.scala @@ -0,0 +1,27 @@ +// scalac: -Xsource:3 +// + +class D { + def *(y: Int): Int = y + def unrelated(y: Int): Int = y +} + +object Test { + val d = new D + + def one: Int = { + import d.`*` + + unrelated(1) // error + + *(1) + } + + def two: Int = { + import d.* + + unrelated(1) + + *(1) + } +} diff --git a/test/files/pos/import-future.scala b/test/files/pos/import-future.scala new file mode 100644 index 000000000000..cfaff804af02 --- /dev/null +++ b/test/files/pos/import-future.scala @@ -0,0 +1,25 @@ +// scalac: -Xsource:3 +// + +import java.io as jio +import scala.{collection as c} + +import c.mutable as mut +import mut.ArrayBuffer as Buf + +object O { + val x: jio.IOException = ??? + val y = Buf(1, 2, 3) + + type OString = String + def foo22(x: Int) = x +} + +class C { + import O.{ foo22 as foo, OString as OS } + println(foo(22)) + val s: OS = "" + + import mut.* + val ab = ArrayBuffer(1) +} From 350bbe7de2232ee2e7e42f5d7f58d5b33db3c406 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Mon, 8 Feb 2021 11:11:36 +1000 Subject: [PATCH 091/769] More accurate outer checks in patterns Avoids eliding outer checks that matter (run/t11534b.scala) and avoids emitting checks that don't (pos/t11534.scala) which avoids compiler warnings when the tested class doesn't have an outer field. The latter stops the annoying unchecked warning that appeared since a recent refactoring made `TermName` a final class. --- .../transform/patmat/MatchTreeMaking.scala | 92 +++++++++++--- test/files/neg/t7721.check | 20 ++- test/files/pos/t11534.scala | 8 ++ test/files/run/t11534b.scala | 24 ++++ test/files/run/t11534c.scala | 117 ++++++++++++++++++ 5 files changed, 243 insertions(+), 18 deletions(-) create mode 100644 test/files/pos/t11534.scala create mode 100644 test/files/run/t11534b.scala create mode 100644 test/files/run/t11534c.scala diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala index 414407141b26..2d3299112420 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala @@ -347,9 +347,6 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging { def eqTest(pat: Tree, testedBinder: Symbol) = REF(testedBinder) OBJ_EQ pat override def withOuterTest(orig: Tree)(testedBinder: Symbol, expectedTp: Type): Tree = { - val expectedPrefix = expectedTp.prefix - val testedPrefix = testedBinder.info.prefix - // Check if a type is defined in a static location. Unlike `tp.isStatic` before `flatten`, // this also includes methods and (possibly nested) objects inside of methods. def definedInStaticLocation(tp: Type): Boolean = { @@ -361,20 +358,81 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging { tp.typeSymbol.owner == tp.prefix.typeSymbol && isStatic(tp.prefix) } - if ((expectedPrefix eq NoPrefix) - || expectedTp.typeSymbol.isJava - || definedInStaticLocation(expectedTp) - || testedPrefix =:= expectedPrefix) orig - else gen.mkAttributedQualifierIfPossible(expectedPrefix) match { - case None => orig - case Some(expectedOuterRef) => - // ExplicitOuter replaces `Select(q, outerSym) OBJ_EQ expectedPrefix` - // by `Select(q, outerAccessor(outerSym.owner)) OBJ_EQ expectedPrefix` - // if there's an outer accessor, otherwise the condition becomes `true` - // TODO: centralize logic whether there's an outer accessor and use here? - val synthOuterGetter = expectedTp.typeSymbol.newMethod(nme.OUTER_SYNTH, newFlags = SYNTHETIC | ARTIFACT) setInfo expectedPrefix - val outerTest = (Select(codegen._asInstanceOf(testedBinder, expectedTp), synthOuterGetter)) OBJ_EQ expectedOuterRef - and(orig, outerTest) + // In `def foo(a: b.B) = a match { case _: p.P }` + // testedBinder.symbol.info = b.B + // expectedTp = p.P + + expectedTp.dealias match { + case RefinedType(Nil, _) => orig + case rt@RefinedType(parent :: rest, scope) => + // If the pattern type is refined type, emit outer tests for each component. + withOuterTest(withOuterTest(orig)(testedBinder, parent))(testedBinder, copyRefinedType(rt, rest, scope)) + case expectedTp => + val expectedClass = expectedTp.typeSymbol + assert(!expectedClass.isRefinementClass, orig) + // .typeSymbol dealiases, so look at the prefix of the base type at the dealiased symbol, + // not of expectedTp itself. + val expectedPrefix = expectedTp.baseType(expectedClass).prefix + + + // Given `(a: x.B) match { case _: x.P }` where P is subclass of B, is it possible + // that a value conforms to both x.B and x1.P where `x ne x1`? + // + // To answer this, we create a new prefix based on a fresh symbol and check the + // base type of TypeRef(freshPrefix, typePatternSymbol (P), args) at the binder + // symbol (B). If that is prefixed by the fresh symbol, they are statically the + // same. + // + // It is not sufficient to show that x.P is a subtype of x.B, as this + // would incorrectly elide the outer test in: + // + // class P extends p1.B + // def test(b: p1.B) = b match { case _: p1.P } + // test(new p2.P) + def prefixAligns: Boolean = { + expectedTp match { + case TypeRef(pre, _, _) if !pre.isStable => // e.g. _: Outer#Inner + false + case TypeRef(pre, sym, args) => + val testedBinderClass = testedBinder.info.upperBound.typeSymbol + val testedBinderType = testedBinder.info.baseType(testedBinderClass) + + val testedPrefixIsExpectedTypePrefix = pre =:= testedBinderType.prefix + val testedPrefixAndExpectedPrefixAreStaticallyIdentical: Boolean = { + val freshPrefix = pre match { + case ThisType(thissym) => + ThisType(thissym.cloneSymbol(thissym.owner)) + case _ => + val preSym = pre.termSymbol + val freshPreSym = preSym.cloneSymbol(preSym.owner).setInfo(preSym.info) + singleType(pre.prefix, freshPreSym) + } + val expectedTpFromFreshPrefix = TypeRef(freshPrefix, sym, args) + val baseTypeFromFreshPrefix = expectedTpFromFreshPrefix.baseType(testedBinderClass) + freshPrefix eq baseTypeFromFreshPrefix.prefix + } + testedPrefixAndExpectedPrefixAreStaticallyIdentical && testedPrefixIsExpectedTypePrefix + case _ => + false + } + } + + if ((expectedPrefix eq NoPrefix) + || expectedTp.typeSymbol.isJava + || definedInStaticLocation(expectedTp) + || testedBinder.info <:< expectedTp + || prefixAligns) orig + else gen.mkAttributedQualifierIfPossible(expectedPrefix) match { + case None => orig + case Some(expectedOuterRef) => + // ExplicitOuter replaces `Select(q, outerSym) OBJ_EQ expectedPrefix` + // by `Select(q, outerAccessor(outerSym.owner)) OBJ_EQ expectedPrefix` + // if there's an outer accessor, otherwise the condition becomes `true` + // TODO: centralize logic whether there's an outer accessor and use here? + val synthOuterGetter = expectedTp.typeSymbol.newMethod(nme.OUTER_SYNTH, newFlags = SYNTHETIC | ARTIFACT) setInfo expectedPrefix + val outerTest = (Select(codegen._asInstanceOf(testedBinder, expectedTp), synthOuterGetter)) OBJ_EQ expectedOuterRef + and(orig, outerTest) + } } } } diff --git a/test/files/neg/t7721.check b/test/files/neg/t7721.check index 04ef4858356c..2fa50df39c8d 100644 --- a/test/files/neg/t7721.check +++ b/test/files/neg/t7721.check @@ -22,6 +22,24 @@ t7721.scala:49: warning: abstract type pattern B.this.Foo is unchecked since it t7721.scala:49: warning: abstract type pattern B.this.Bar is unchecked since it is eliminated by erasure case x: Foo with Bar with Concrete => x.bippy + x.barry + x.dingo + x.conco + x.bongo ^ +t7721.scala:13: warning: The outer reference in this type test cannot be checked at run time. + case x: Foo with Concrete => x.bippy + x.conco + ^ +t7721.scala:17: warning: The outer reference in this type test cannot be checked at run time. + case x: Concrete with Foo => x.bippy + x.conco + ^ +t7721.scala:21: warning: The outer reference in this type test cannot be checked at run time. + case x: Foo with Bar => x.bippy + x.barry + ^ +t7721.scala:41: warning: The outer reference in this type test cannot be checked at run time. + case x: Foo with Concrete => x.bippy + x.dingo + x.conco + ^ +t7721.scala:45: warning: The outer reference in this type test cannot be checked at run time. + case x: Concrete with Foo => x.bippy + x.dingo + x.conco + ^ +t7721.scala:49: warning: The outer reference in this type test cannot be checked at run time. + case x: Foo with Bar with Concrete => x.bippy + x.barry + x.dingo + x.conco + x.bongo + ^ error: No warnings can be incurred under -Werror. -8 warnings +14 warnings 1 error diff --git a/test/files/pos/t11534.scala b/test/files/pos/t11534.scala new file mode 100644 index 000000000000..bab4bd956d87 --- /dev/null +++ b/test/files/pos/t11534.scala @@ -0,0 +1,8 @@ +// scalac: -Werror +object Test1 { + val g: scala.tools.nsc.Global = ??? + import g._ + def test(sym: Symbol) = sym.name match { + case _: TermName => + } +} diff --git a/test/files/run/t11534b.scala b/test/files/run/t11534b.scala new file mode 100644 index 000000000000..75e835bed9a3 --- /dev/null +++ b/test/files/run/t11534b.scala @@ -0,0 +1,24 @@ +object Test { + case class O(i: Int) { + class A + class B extends A { + def bOuter = O.this + } + trait C { + def cOuter = O.this + } + class D extends o2.B with C + } + val o1 = new O(1); + val o2 = new O(2); + def pat1(a: Test.o1.C) = a match { + case b: Test.o1.B => + assert(b.bOuter eq Test.o1, + s"expected ${o1} as outer of value conforming to pattern `b: Test.o1.B`, but got ${b.bOuter}") + case _ => + + } + def main(args: Array[String]): Unit = { + pat1(new o1.D) + } +} diff --git a/test/files/run/t11534c.scala b/test/files/run/t11534c.scala new file mode 100644 index 000000000000..4fb201c64b4d --- /dev/null +++ b/test/files/run/t11534c.scala @@ -0,0 +1,117 @@ +// scalac: -unchecked +import scala.util.Try + +object Test { + class O(val i: Int) { + class A { + val aOuter = i + } + + class B1 extends A { + val b1Outer = i + } + } + class M(i: Int) extends O(i) { + class B2 extends m2.A { + val b2Outer = i + } + + def pat1(a: M.this.A) = a match { + case b: M.this.B1 => // can elide outer check, (a : m1.A) && (a : O#B1) implies (a : m1.B1) + assertOuter(m1.i, b.b1Outer) + true + case _ => + false + } + def pat2(a: m2.A) = a match { + case b: M.this.B2 => // needs runtime outer check + assertOuter(m1.i, b.b2Outer) + true + case _ => + false + } + def pat3(a: M.this.B1) = a match { + case b: M.this.A => // can elide outer check, (a : m1.B1) && (a : O#A) implies (a : m1.B1) + assertOuter(m1.i, b.aOuter) + true + case _ => + false + } + def pat4(a: M.this.B2) = a match { + case b: m2.A => // can elide outer check, (a : m1.B2) implies (a : m2.A) + assertOuter(m2.i, b.aOuter) + true + case _ => + false + } + } + + val m1 = new M(1); + val m2 = new M(2); + + def pat1(a: m1.A) = a match { + case b: m1.B1 => // can elide outer check, (a : m1.A) && (a : O#B1) implies (a : m1.B1) + assertOuter(m1.i, b.b1Outer) + true + case _ => + false + } + def pat2(a: m2.A) = a match { + case b: m1.B2 => // needs runtime outer check + assertOuter(m1.i, b.b2Outer) + true + case _ => + false + } + def pat3(a: m1.B1) = a match { + case b: m1.A => // can elide outer check, (a : m1.B1) && (a : O#A) implies (a : m1.B1) + assertOuter(m1.i, b.aOuter) + true + case _ => + false + } + def pat4(a: m1.B2) = a match { + case b: m2.A => // can elide outer check, (a : m1.B2) implies (a : m2.A) + assertOuter(m2.i, b.aOuter) + true + case _ => + false + } + + def pat5(a: M#B2) = a match { + case b: m2.A => // can elide outer check, (a : A#B2) implies (a : m2.A) + assertOuter(m2.i, b.aOuter) + true + case _ => + false + } + def assertOuter(expected: Int, actual: Int): Unit = { + if (expected != actual) throw WrongOuter(expected, actual) + } + case class WrongOuter(expected: Int, actual: Int) extends RuntimeException(s"expected: $expected, actual: $actual") + + def main(args: Array[String]): Unit = { + assert(pat1(new m1.B1)) + assert(m1.pat1(new m1.B1)) + assert(Try(pat1((new m2.B1).asInstanceOf[m1.B1])).failed.get == WrongOuter(m1.i, m2.i)) + assert(Try(m1.pat1((new m2.B1).asInstanceOf[m1.B1])).failed.get == WrongOuter(m1.i, m2.i)) + + assert(!pat2(new m2.B2)) + assert(!m1.pat2(new m2.B2)) + assert(pat2(new m1.B2)) + assert(m1.pat2(new m1.B2)) + + assert(pat3(new m1.B1)) + assert(m1.pat3(new m1.B1)) + assert(Try(pat3((new m2.B1).asInstanceOf[m1.B1])).failed.get == WrongOuter(m1.i, m2.i)) + assert(Try(m1.pat3((new m2.B1).asInstanceOf[m1.B1])).failed.get == WrongOuter(m1.i, m2.i)) + + assert(pat4(new m1.B2)) + assert(m1.pat4(new m1.B2)) + assert(pat4((new m2.B2).asInstanceOf[m1.B2])) + assert(m1.pat4((new m2.B2).asInstanceOf[m1.B2])) + + assert(pat5(new m1.B2)) + assert(pat5(new m2.B2)) + } +} From 9fcddd35ae3a4ce48a2c5a8765cea5d0557b9801 Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Tue, 20 Apr 2021 16:10:39 +0200 Subject: [PATCH 092/769] remove faulty assertion in backend --- .../scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala | 9 ++------- test/files/pos/t12225.scala | 6 ++++++ 2 files changed, 8 insertions(+), 7 deletions(-) create mode 100644 test/files/pos/t12225.scala diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala b/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala index 149c29a96ac2..ff76ec0dca3e 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala @@ -91,9 +91,9 @@ abstract class BTypesFromSymbols[G <: Global](val global: G) extends BTypes { assert(classSym != NoSymbol, "Cannot create ClassBType from NoSymbol") assert(classSym.isClass, s"Cannot create ClassBType from non-class symbol $classSym") + // note: classSym can be scala.Array, see https://github.com/scala/bug/issues/12225#issuecomment-729687859 if (global.settings.debug) { - // OPT these assertions have too much performance overhead to run unconditionally - assertClassNotArrayNotPrimitive(classSym) + // OPT this assertion has too much performance overhead to run unconditionally assert(!primitiveTypeToBType.contains(classSym) || isCompilingPrimitive, s"Cannot create ClassBType for primitive class symbol $classSym") } @@ -221,11 +221,6 @@ abstract class BTypesFromSymbols[G <: Global](val global: G) extends BTypes { assert(sym != definitions.ArrayClass || isCompilingArray, sym) } - def assertClassNotArrayNotPrimitive(sym: Symbol): Unit = { - assertClassNotArray(sym) - assert(!primitiveTypeToBType.contains(sym) || isCompilingPrimitive, sym) - } - def implementedInterfaces(classSym: Symbol): List[Symbol] = { def isInterfaceOrTrait(sym: Symbol) = sym.isInterface || sym.isTrait diff --git a/test/files/pos/t12225.scala b/test/files/pos/t12225.scala new file mode 100644 index 000000000000..baae67d36bf8 --- /dev/null +++ b/test/files/pos/t12225.scala @@ -0,0 +1,6 @@ +// scalac: -Ydebug +object Test { + def foo(arr: Array[Int]): Unit = { + val Array(x, y) = arr + } +} From 24bd2d570c128e0a473df3dba346fd3a95dd3c95 Mon Sep 17 00:00:00 2001 From: Guillaume Martres Date: Tue, 20 Apr 2021 15:40:58 +0200 Subject: [PATCH 093/769] Support Scala 3 vararg splice syntax under -Xsource:3 Instead of: foo(s: _*) One can now write: foo(s*) And instead of: case Seq(elems @ _*) => One can now write: case Seq(elems*) => See https://dotty.epfl.ch/docs/reference/changed-features/vararg-splices.html for details. --- .../scala/tools/nsc/ast/parser/Parsers.scala | 26 ++++++++++++++++--- test/files/pos/varargs-future.scala | 22 ++++++++++++++++ 2 files changed, 45 insertions(+), 3 deletions(-) create mode 100644 test/files/pos/varargs-future.scala diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala index e84248e4663b..4daccf7ea78a 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala @@ -922,6 +922,16 @@ self => mkApply(Ident(op.encode), stripParens(left) :: arguments) } + /** Is current ident a `*`, and is it followed by a `)` or `, )`? */ + def followingIsScala3Vararg(): Boolean = + currentRun.isScala3 && isRawStar && lookingAhead { + in.token == RPAREN || + in.token == COMMA && { + in.nextToken() + in.token == RPAREN + } + } + /* --------- OPERAND/OPERATOR STACK --------------------------------------- */ /** Modes for infix types. */ @@ -1716,7 +1726,7 @@ self => val base = opstack @tailrec - def loop(top: Tree): Tree = if (!isIdent) top else { + def loop(top: Tree): Tree = if (!isIdent || followingIsScala3Vararg()) top else { pushOpInfo(reduceExprStack(base, top)) newLineOptWhenFollowing(isExprIntroToken) if (isExprIntro) @@ -1727,7 +1737,12 @@ self => else finishPostfixOp(start, base, popOpInfo()) } - reduceExprStack(base, loop(prefixExpr())) + val expr = reduceExprStack(base, loop(prefixExpr())) + if (followingIsScala3Vararg()) + atPos(expr.pos.start) { + Typed(expr, atPos(in.skipToken()) { Ident(tpnme.WILDCARD_STAR) }) + } + else expr } /** {{{ @@ -2080,7 +2095,12 @@ self => if (isCloseDelim) atPos(top.pos.start, in.prev.offset)(Star(stripParens(top))) else EmptyTree ) - case _ => EmptyTree + case Ident(name) if isSequenceOK && followingIsScala3Vararg() => + atPos(top.pos.start) { + Bind(name, atPos(in.skipToken()) { Star(Ident(nme.WILDCARD)) }) + } + case _ => + EmptyTree } @tailrec def loop(top: Tree): Tree = reducePatternStack(base, top) match { diff --git a/test/files/pos/varargs-future.scala b/test/files/pos/varargs-future.scala new file mode 100644 index 000000000000..e8c9057e564b --- /dev/null +++ b/test/files/pos/varargs-future.scala @@ -0,0 +1,22 @@ +// scalac: -Xsource:3 +// + +class Test { + def foo(xs: Int*): Seq[Int] = xs + + val s: Seq[Int] = Seq(1, 2, 3) + foo(s*) + + // not very useful, but supported by Scala 3 (and matches what works with `: _*` syntax) + foo( + s*, + ) + + s match { + case Seq(elems*) => println(elems) + } + + s match { + case Seq(x, rest*) => println(rest) + } +} From 8da523c79614a5456588dcabbe64d17bfd9777c6 Mon Sep 17 00:00:00 2001 From: Guillaume Martres Date: Thu, 1 Apr 2021 17:03:48 +0200 Subject: [PATCH 094/769] Allow soft keywords `open` and `infix` under -Xsource:3 Since everything is open and can be used infix by default in Scala 2, these keywords are no-op, but they're useful for cross-compiling with a future version of Scala 3 where they will be required in some cases (with Scala 3.0 they're only required to avoid warnings under `-source future`). See https://dotty.epfl.ch/docs/reference/changed-features/operators.html and http://dotty.epfl.ch/docs/reference/other-new-features/open-classes.html for details. --- .../scala/tools/nsc/ast/parser/Parsers.scala | 47 ++++++++++++++----- .../scala/tools/nsc/ast/parser/Scanners.scala | 2 + .../scala/reflect/internal/StdNames.scala | 4 ++ test/files/neg/open-infix-future.check | 22 +++++++++ test/files/neg/open-infix-future.scala | 17 +++++++ test/files/pos/open-infix-future.scala | 36 ++++++++++++++ 6 files changed, 117 insertions(+), 11 deletions(-) create mode 100644 test/files/neg/open-infix-future.check create mode 100644 test/files/neg/open-infix-future.scala create mode 100644 test/files/pos/open-infix-future.scala diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala index 358c3188206b..016575d5bda4 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala @@ -674,6 +674,24 @@ self => case _ => false } + def isSoftModifier: Boolean = + currentRun.isScala3 && in.token == IDENTIFIER && softModifierNames.contains(in.name) + + /** Is the current token a soft modifier in a position where such a modifier is allowed? */ + def isValidSoftModifier: Boolean = + isSoftModifier && { + val mod = in.name + lookingAhead { + while (in.token == NEWLINE || isModifier || isSoftModifier) in.nextToken() + + in.token match { + case CLASS | CASECLASS => true + case DEF | TRAIT | TYPE => mod == nme.infix + case _ => false + } + } + } + def isAnnotation: Boolean = in.token == AT def isLocalModifier: Boolean = in.token match { @@ -719,12 +737,13 @@ self => } def isLiteral = isLiteralToken(in.token) - def isExprIntroToken(token: Token): Boolean = isLiteralToken(token) || (token match { - case IDENTIFIER | BACKQUOTED_IDENT | - THIS | SUPER | IF | FOR | NEW | USCORE | TRY | WHILE | - DO | RETURN | THROW | LPAREN | LBRACE | XMLSTART => true - case _ => false - }) + def isExprIntroToken(token: Token): Boolean = + !isValidSoftModifier && (isLiteralToken(token) || (token match { + case IDENTIFIER | BACKQUOTED_IDENT | + THIS | SUPER | IF | FOR | NEW | USCORE | TRY | WHILE | + DO | RETURN | THROW | LPAREN | LBRACE | XMLSTART => true + case _ => false + })) def isExprIntro: Boolean = isExprIntroToken(in.token) @@ -2265,8 +2284,11 @@ self => */ def accessModifierOpt(): Modifiers = normalizeModifiers { in.token match { - case m @ (PRIVATE | PROTECTED) => in.nextToken() ; accessQualifierOpt(Modifiers(flagTokens(m))) - case _ => NoMods + case m @ (PRIVATE | PROTECTED) => + in.nextToken() + accessQualifierOpt(Modifiers(flagTokens(m))) + case _ => + NoMods } } @@ -2288,7 +2310,10 @@ self => in.nextToken() loop(mods) case _ => - mods + if (isValidSoftModifier) { + in.nextToken() + loop(mods) + } else mods } loop(NoMods) } @@ -3221,7 +3246,7 @@ self => case IMPORT => in.flushDoc() importClause() - case _ if isAnnotation || isTemplateIntro || isModifier => + case _ if isAnnotation || isTemplateIntro || isModifier || isValidSoftModifier => joinComment(topLevelTmplDef :: Nil) } @@ -3271,7 +3296,7 @@ self => case IMPORT => in.flushDoc() importClause() - case _ if isDefIntro || isModifier || isAnnotation => + case _ if isDefIntro || isModifier || isAnnotation || isValidSoftModifier => joinComment(nonLocalDefOrDcl) case _ if isExprIntro => in.flushDoc() diff --git a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala index 2ba2f1c87b97..9d1f7b55a91e 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala @@ -1438,6 +1438,8 @@ trait Scanners extends ScannersCommon { final val token2name = (allKeywords map (_.swap)).toMap + final val softModifierNames = Set(nme.open, nme.infix) + // Token representation ---------------------------------------------------- /** Returns the string representation of given token. */ diff --git a/src/reflect/scala/reflect/internal/StdNames.scala b/src/reflect/scala/reflect/internal/StdNames.scala index fc8581847966..66dee512f7bd 100644 --- a/src/reflect/scala/reflect/internal/StdNames.scala +++ b/src/reflect/scala/reflect/internal/StdNames.scala @@ -661,6 +661,10 @@ trait StdNames { // Scala 3 import syntax val as: NameType = nameType("as") + // Scala 3 soft keywords + val infix: NameType = nameType("infix") + val open: NameType = nameType("open") + // Compiler utilized names val AnnotatedType: NameType = nameType("AnnotatedType") diff --git a/test/files/neg/open-infix-future.check b/test/files/neg/open-infix-future.check new file mode 100644 index 000000000000..15515fc2ef50 --- /dev/null +++ b/test/files/neg/open-infix-future.check @@ -0,0 +1,22 @@ +open-infix-future.scala:4: error: expected class or object definition +open trait A // error +^ +open-infix-future.scala:5: error: expected class or object definition +open object B // error +^ +open-infix-future.scala:8: error: ';' expected but 'val' found. + infix val a: Int = 1 // error + ^ +open-infix-future.scala:9: error: ';' expected but 'var' found. + infix var b: Int = 1 // error + ^ +open-infix-future.scala:11: error: ';' expected but 'type' found. + open type D // error + ^ +open-infix-future.scala:14: error: illegal start of statement + open class E // error + ^ +open-infix-future.scala:15: error: ';' expected but 'def' found. + open def bla(y: Int) = y // error + ^ +7 errors diff --git a/test/files/neg/open-infix-future.scala b/test/files/neg/open-infix-future.scala new file mode 100644 index 000000000000..2a250f3b006e --- /dev/null +++ b/test/files/neg/open-infix-future.scala @@ -0,0 +1,17 @@ +// scalac: -Xsource:3 +// + +open trait A // error +open object B // error + +class C { + infix val a: Int = 1 // error + infix var b: Int = 1 // error + + open type D // error + + def foo: Unit = { + open class E // error + open def bla(y: Int) = y // error + } +} diff --git a/test/files/pos/open-infix-future.scala b/test/files/pos/open-infix-future.scala new file mode 100644 index 000000000000..8fee778d40cb --- /dev/null +++ b/test/files/pos/open-infix-future.scala @@ -0,0 +1,36 @@ +// scalac: -Xsource:3 +// + +open class A +infix class B[T, S] + +open infix class C[T, S] +open infix case class CC[T, S](x: Int) +infix open class D[T, S] +infix trait DT[T, S] + +open +infix +private +class E + +class F { + open infix class C1[T, S] + infix type X + + infix def foo(x: Int): Int = x +} + +object G { + open infix class C2[T, S] +} + +object Test { + val infix: Int = 1 + infix + 1 + val open: Int => Int = x => x + open(1) + open { + 2 + } +} From 1de75d61fa8b9bc613e11f8e5be6398e9dfc3caf Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Mon, 19 Apr 2021 14:04:07 +0200 Subject: [PATCH 095/769] Handle a few JDK deprecations --- src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala | 8 +++----- src/library/scala/reflect/package.scala | 8 ++++++-- .../reflect/internal/util/AbstractFileClassLoader.scala | 3 ++- src/testkit/scala/tools/testkit/AssertUtil.scala | 5 +++-- 4 files changed, 14 insertions(+), 10 deletions(-) diff --git a/src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala b/src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala index cccd4326c375..ae55c09c3387 100644 --- a/src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala +++ b/src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala @@ -15,16 +15,14 @@ package tools.nsc package ast import scala.language.implicitConversions - import java.awt.{List => _, _} import java.awt.event._ import java.io.{StringWriter, Writer} import javax.swing._ import javax.swing.event.TreeModelListener import javax.swing.tree._ - import java.util.concurrent.CountDownLatch -import scala.annotation.tailrec +import scala.annotation.{nowarn, tailrec} /** * Tree browsers can show the AST in a graphical and interactive @@ -217,8 +215,8 @@ abstract class TreeBrowsers { } class ASTMenuBar extends JMenuBar { - val menuKey = Toolkit.getDefaultToolkit().getMenuShortcutKeyMask() - val shiftKey = InputEvent.SHIFT_MASK + val menuKey = Toolkit.getDefaultToolkit().getMenuShortcutKeyMask(): @nowarn("cat=deprecation") // deprecated since JDK 10, replacement only available in 10+ + val shiftKey = InputEvent.SHIFT_DOWN_MASK val jmFile = new JMenu("File") // val jmiSaveImage = new JMenuItem( // new AbstractAction("Save Tree Image") { diff --git a/src/library/scala/reflect/package.scala b/src/library/scala/reflect/package.scala index 33faadc783ad..67551c7f6e80 100644 --- a/src/library/scala/reflect/package.scala +++ b/src/library/scala/reflect/package.scala @@ -12,7 +12,8 @@ package scala -import java.lang.reflect.{ AccessibleObject => jAccessibleObject } +import java.lang.reflect.{AccessibleObject => jAccessibleObject} +import scala.annotation.nowarn package object reflect { @@ -54,7 +55,10 @@ package object reflect { * attempt, it is caught and discarded. */ def ensureAccessible[T <: jAccessibleObject](m: T): T = { - if (!m.isAccessible) { + // This calls `setAccessible` unnecessarily, because `isAccessible` is only `true` if `setAccessible(true)` + // was called before, not if the reflected object is inherently accessible. + // TODO: replace by `canAccess` once we're on JDK 9+ + if (!m.isAccessible: @nowarn("cat=deprecation")) { try m setAccessible true catch { case _: SecurityException => } // does nothing } diff --git a/src/reflect/scala/reflect/internal/util/AbstractFileClassLoader.scala b/src/reflect/scala/reflect/internal/util/AbstractFileClassLoader.scala index 04591dc0fa6c..2c50d5cf9443 100644 --- a/src/reflect/scala/reflect/internal/util/AbstractFileClassLoader.scala +++ b/src/reflect/scala/reflect/internal/util/AbstractFileClassLoader.scala @@ -109,8 +109,9 @@ class AbstractFileClassLoader(val root: AbstractFile, parent: ClassLoader) throw new UnsupportedOperationException() } + // TODO: `getPackage` is deprecated in JDK 9+ - what should be overridden instead? override def getPackage(name: String): Package = findAbstractDir(name) match { - case null => super.getPackage(name) + case null => super.getPackage(name): @nowarn("cat=deprecation") case file => packages.getOrElseUpdate(name, { val ctor = classOf[Package].getDeclaredConstructor(classOf[String], classOf[String], classOf[String], classOf[String], classOf[String], classOf[String], classOf[String], classOf[URL], classOf[ClassLoader]) ctor.setAccessible(true) diff --git a/src/testkit/scala/tools/testkit/AssertUtil.scala b/src/testkit/scala/tools/testkit/AssertUtil.scala index 824adefe107b..4b7083d83e2c 100644 --- a/src/testkit/scala/tools/testkit/AssertUtil.scala +++ b/src/testkit/scala/tools/testkit/AssertUtil.scala @@ -28,6 +28,7 @@ import java.util.concurrent.atomic.AtomicReference import java.lang.ref._ import java.lang.reflect.{Array => _, _} import java.util.IdentityHashMap +import scala.annotation.nowarn /** This module contains additional higher-level assert statements * that are ultimately based on junit.Assert primitives. @@ -166,7 +167,7 @@ object AssertUtil { def assertZeroNetThreads(body: => Unit): Unit = { val group = new ThreadGroup("junit") try assertZeroNetThreads(group)(body) - finally group.destroy() + finally group.destroy(): @nowarn("cat=deprecation") // deprecated since JDK 16, will be removed } def assertZeroNetThreads[A](group: ThreadGroup)(body: => A): Try[A] = { val testDone = new CountDownLatch(1) @@ -294,7 +295,7 @@ class NoTrace[A](body: => A) extends Runnable { case Success(a) => result = Some(a) case Failure(e) => synchronized { uncaught += ((Thread.currentThread, e)) } } - finally group.destroy() + finally group.destroy(): @nowarn("cat=deprecation") // deprecated since JDK 16, will be removed } private[testkit] lazy val errors: List[(Thread, Throwable)] = synchronized(uncaught.toList) From 728aed2584c1cf6f2dd4ef97712e9b5e72e2e78e Mon Sep 17 00:00:00 2001 From: Tom Grigg Date: Wed, 10 Feb 2021 23:42:32 -0800 Subject: [PATCH 096/769] bump sbt to 1.3.13 Co-authored-by: Seth Tisue --- project/build.properties | 2 +- scripts/common | 2 +- src/intellij/scala.ipr.SAMPLE | 36 ++++++++++++------------ test/benchmarks/project/build.properties | 2 +- 4 files changed, 21 insertions(+), 21 deletions(-) diff --git a/project/build.properties b/project/build.properties index a919a9b5f46b..0837f7a132de 100644 --- a/project/build.properties +++ b/project/build.properties @@ -1 +1 @@ -sbt.version=1.3.8 +sbt.version=1.3.13 diff --git a/scripts/common b/scripts/common index 23c1c334b59d..4f869dfe6eca 100644 --- a/scripts/common +++ b/scripts/common @@ -15,7 +15,7 @@ rm -rf "$WORKSPACE/resolutionScratch_" mkdir -p "$WORKSPACE/resolutionScratch_" SBT_CMD=${SBT_CMD-sbt} -SBT_CMD="$SBT_CMD -sbt-version 1.3.8" +SBT_CMD="$SBT_CMD -sbt-version 1.3.13" # repo to publish builds integrationRepoUrl=${integrationRepoUrl-"https://scala-ci.typesafe.com/artifactory/scala-integration/"} diff --git a/src/intellij/scala.ipr.SAMPLE b/src/intellij/scala.ipr.SAMPLE index 836d9eb6d8df..d96540ba59a8 100644 --- a/src/intellij/scala.ipr.SAMPLE +++ b/src/intellij/scala.ipr.SAMPLE @@ -458,26 +458,26 @@ - + - - - - - - - - - - + + + + + + + + + + - - - - - - - + + + + + + + diff --git a/test/benchmarks/project/build.properties b/test/benchmarks/project/build.properties index a919a9b5f46b..0837f7a132de 100644 --- a/test/benchmarks/project/build.properties +++ b/test/benchmarks/project/build.properties @@ -1 +1 @@ -sbt.version=1.3.8 +sbt.version=1.3.13 From a1dcdcfc88e42a17b88c50957daeaa67ace33e92 Mon Sep 17 00:00:00 2001 From: Tom Grigg Date: Wed, 10 Feb 2021 23:53:49 -0800 Subject: [PATCH 097/769] GitHub Actions: build and test on Windows Co-authored-by: Seth Tisue --- .gitattributes | 3 ++ .github/workflows/ci.yml | 60 ++++++++++++++++++++++++++++++++++++++++ 2 files changed, 63 insertions(+) create mode 100644 .github/workflows/ci.yml diff --git a/.gitattributes b/.gitattributes index da4421cb78ed..99eca173f23e 100644 --- a/.gitattributes +++ b/.gitattributes @@ -21,6 +21,9 @@ text eol=lf *.txt eol=lf *.xml eol=lf +# Some sbt launcher scripts can't handle CR in .jvmopts +.jvmopts eol=lf + # Windows-specific files get windows endings *.bat eol=crlf *.cmd eol=crlf diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 000000000000..51ca62cefcca --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,60 @@ +name: Scala Merge CI + +on: + push: + branches: ['2.*.x'] + +defaults: + run: + shell: bash + +jobs: + build_and_test: + name: Windows + runs-on: windows-latest + strategy: + fail-fast: false + steps: + - run: git config --global core.autocrlf false + - name: Checkout + uses: actions/checkout@v2 + + # Note that we don't use olafurpg/setup-scala; it wouldn't buy us anything + # over setup-java. (We don't want csbt or xsbt; we prefer the standard + # sbt launch script, which comes preinstalled on Windows (and Ubuntu).) + - name: Setup Java + uses: actions/setup-java@v2 + with: + distribution: adopt + java-version: 8 + + - name: Cache + uses: actions/cache@v2 + with: + path: | + ~/.sbt + ~/.ivy2/cache + ~/.cache/coursier + key: ${{ runner.os }}-sbt-cache-v2-${{ hashFiles('**/*.sbt') }}-${{ hashFiles('project/build.properties') }} + + - name: Setup + run: | + source scripts/common + java -version + javac -version + generateRepositoriesConfig + # Pass these environment vars to subsequent steps + echo "SBT=sbt -Dsbt.override.build.repos=true -Dsbt.repository.config=${sbtRepositoryConfig}" >> $GITHUB_ENV + echo "COURSIER_HOME=$HOME/.coursier" >> "$GITHUB_ENV" + echo "COURSIER_CACHE=$HOME/.cache/coursier/v1" >> "$GITHUB_ENV" + + - name: Build + run: | + source scripts/common + $SBT -warn setupPublishCore generateBuildCharacterPropertiesFile publishLocal + + - name: Test + run: | + source scripts/common + parseScalaProperties buildcharacter.properties + $SBT -Dstarr.version=$maven_version_number -warn setupValidateTest testAll From 6aed5b053a1d5806dfb530b9629f0e3a68089d1d Mon Sep 17 00:00:00 2001 From: Seth Tisue Date: Tue, 20 Apr 2021 19:08:30 -0700 Subject: [PATCH 098/769] remove obsolete CI scripts --- scripts/jobs/integrate/ide | 35 ---------------------------------- scripts/jobs/integrate/windows | 22 --------------------- 2 files changed, 57 deletions(-) delete mode 100755 scripts/jobs/integrate/ide delete mode 100755 scripts/jobs/integrate/windows diff --git a/scripts/jobs/integrate/ide b/scripts/jobs/integrate/ide deleted file mode 100755 index 1dc7b43139e8..000000000000 --- a/scripts/jobs/integrate/ide +++ /dev/null @@ -1,35 +0,0 @@ -#!/bin/bash -e -# requires checkout: root is a scala checkout with which to integrate (actually, only required file is versions.properties, as documented below) -# requires env: scalaVersion (specifies binary already built from above checkout), WORKSPACE (provided by jenkins), repo_ref (HEAD of the scala checkout), -# requires files: $WORKSPACE/versions.properties (from checkout -- defines version numbers for modules used to build scala for dbuild...) - -echo "IDE integration not yet available on 2.12.x. Punting." -exit 0 - -# TODO: remove when integration is up and running -if [ "woele$_scabot_last" != "woele1" ]; then echo "Scabot didn't mark this as last commit -- skipping."; exit 0; fi - -baseDir=${WORKSPACE-`pwd`} -uberBuildUrl=${uberBuildUrl-"https://github.com/scala-ide/uber-build.git"} -uberBuildConfig=${uberBuildConfig-"validator.conf"} # TODO: backport to 2.10.x: uberBuildConfig="validator-2.10.conf" - -uberBuildDir="$WORKSPACE/uber-build/" - -cd $WORKSPACE -if [[ -d $uberBuildDir ]]; then - ( cd $uberBuildDir && git fetch $uberBuildUrl HEAD && git checkout -f FETCH_HEAD && git clean -fxd ) -else - git clone $uberBuildUrl -fi - -echo "maven.version.number=$scalaVersion" >> versions.properties - -# pass prRepoUrl in, which uber-build passes along to dbuild (in sbt-builds-for-ide) -# the "-P pr-scala" maven arg accomplishes the same thing for maven (directly used in uber-build) -BASEDIR="$WORKSPACE" prRepoUrl="$prRepoUrl" IDE_M2_REPO="$prRepoUrl" MAVEN_ARGS="-P pr-scala"\ - $uberBuildDir/uber-build.sh $uberBuildDir/config/$uberBuildConfig $repo_ref $scalaVersion - -# uber-build puts its local repo under target/m2repo -# wipe the org/scala-lang part, which otherwise just keeps -# growing and growing due to the -$sha-SNAPSHOT approach -[[ -d $WORKSPACE/target/m2repo/org/scala-lang ]] && rm -rf $WORKSPACE/target/m2repo/org/scala-lang diff --git a/scripts/jobs/integrate/windows b/scripts/jobs/integrate/windows deleted file mode 100755 index 964b70383c09..000000000000 --- a/scripts/jobs/integrate/windows +++ /dev/null @@ -1,22 +0,0 @@ -#!/bin/bash - -source scripts/common - -java -version -javac -version - -generateRepositoriesConfig - -# it may not be necessary to set both COURSIER_HOME and sbt.coursier.home, -# but at least for now, doing it just in case; see discussion at -# https://github.com/scala/scala-dev/issues/666 -export COURSIER_HOME=$WORKSPACE/.coursier - -SBT="java $JAVA_OPTS -Dsbt.ivy.home=$WORKSPACE/.ivy2 -Dsbt.coursier.home=$WORKSPACE/.coursier -jar $sbtLauncher -Dsbt.override.build.repos=true -Dsbt.repository.config=$sbtRepositoryConfig" - -# Build locker with STARR -$SBT -warn "setupPublishCore" generateBuildCharacterPropertiesFile publishLocal - -# Build quick and run the tests -parseScalaProperties buildcharacter.properties -$SBT -Dstarr.version=$maven_version_number -warn "setupValidateTest" testAll From 0db7e9b650765392b57941d1981477f98075f091 Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Mon, 19 Apr 2021 21:57:25 +0200 Subject: [PATCH 099/769] allow reflective access to java.lang in tests --- build.sbt | 14 +++++++++----- 1 file changed, 9 insertions(+), 5 deletions(-) diff --git a/build.sbt b/build.sbt index 174358a206f9..0e192e14a183 100644 --- a/build.sbt +++ b/build.sbt @@ -691,6 +691,10 @@ lazy val testkit = configureAsSubproject(project) ) ) +// Jigsaw: reflective access between modules (`setAccessible(true)`) requires an `opens` directive. +// This is enforced by error (not just by warning) since JDK 16. In our tests we use reflective access +// from the unnamed package (the classpath) to JDK modules in testing utilities like `assertNotReachable`. +val addOpensForTesting = "-XX:+IgnoreUnrecognizedVMOptions" +: Seq("java.util.concurrent.atomic", "java.lang", "java.lang.reflect", "java.net").map(p => s"--add-opens=java.base/$p=ALL-UNNAMED") lazy val junit = project.in(file("test") / "junit") .dependsOn(testkit, compiler, replFrontend, scaladoc) @@ -700,7 +704,7 @@ lazy val junit = project.in(file("test") / "junit") .settings(publish / skip := true) .settings( Test / fork := true, - Test / javaOptions += "-Xss1M", + Test / javaOptions ++= "-Xss1M" +: addOpensForTesting, (Test / forkOptions) := (Test / forkOptions).value.withWorkingDirectory((ThisBuild / baseDirectory).value), (Test / testOnly / forkOptions) := (Test / testOnly / forkOptions).value.withWorkingDirectory((ThisBuild / baseDirectory).value), Compile / scalacOptions ++= Seq( @@ -743,7 +747,7 @@ lazy val scalacheck = project.in(file("test") / "scalacheck") Test / fork := true, // Instead of forking above, it should be possible to set: // Test / classLoaderLayeringStrategy := ClassLoaderLayeringStrategy.Flat, - Test / javaOptions += "-Xss1M", + Test / javaOptions ++= "-Xss1M" +: addOpensForTesting, Test / testOptions += Tests.Argument( // Full stack trace on failure: "-verbosity", "2" @@ -788,7 +792,7 @@ def osgiTestProject(p: Project, framework: ModuleID) = p Test / Keys.test := (Test / Keys.test).dependsOn(Compile / packageBin).value, Test / Keys.testOnly := (Test / Keys.testOnly).dependsOn(Compile / packageBin).evaluated, testOptions += Tests.Argument(TestFrameworks.JUnit, "-a", "-v", "-q"), - Test / javaOptions += "-Dscala.bundle.dir=" + (ThisBuild / buildDirectory).value / "osgi", + Test / javaOptions ++= ("-Dscala.bundle.dir=" + (ThisBuild / buildDirectory).value / "osgi") +: addOpensForTesting, Test / Keys.test / forkOptions := (Test / Keys.test / forkOptions).value.withWorkingDirectory((ThisBuild / baseDirectory).value), Test / unmanagedSourceDirectories := List((ThisBuild / baseDirectory).value / "test" / "osgi" / "src"), Compile / unmanagedResourceDirectories := (Test / unmanagedSourceDirectories).value, @@ -839,10 +843,10 @@ lazy val test = project IntegrationTest / sources := Nil, IntegrationTest / fork := true, Compile / scalacOptions += "-Yvalidate-pos:parser,typer", - IntegrationTest / javaOptions ++= List("-Xmx2G", "-Dpartest.exec.in.process=true", "-Dfile.encoding=UTF-8", "-Duser.language=en", "-Duser.country=US"), + IntegrationTest / javaOptions ++= List("-Xmx2G", "-Dpartest.exec.in.process=true", "-Dfile.encoding=UTF-8", "-Duser.language=en", "-Duser.country=US") ++ addOpensForTesting, IntegrationTest / testOptions += Tests.Argument("-Dfile.encoding=UTF-8", "-Duser.language=en", "-Duser.country=US"), testFrameworks += new TestFramework("scala.tools.partest.sbt.Framework"), - IntegrationTest / testOptions += Tests.Argument("-Dpartest.java_opts=-Xmx1024M -Xms64M"), + IntegrationTest / testOptions += Tests.Argument(s"-Dpartest.java_opts=-Xmx1024M -Xms64M ${addOpensForTesting.mkString(" ")}"), IntegrationTest / testOptions += Tests.Argument("-Dpartest.scalac_opts=" + (Compile / scalacOptions).value.mkString(" ")), (IntegrationTest / forkOptions) := (IntegrationTest / forkOptions).value.withWorkingDirectory((ThisBuild / baseDirectory).value), IntegrationTest / testOptions += { From 00513cdc3ab19add9f2afb780a0e5eac1b4a4080 Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Thu, 22 Apr 2021 10:52:19 +0200 Subject: [PATCH 100/769] Fix tests for JDK 16 --- build.sbt | 4 +++- test/files/neg/macro-invalidret.check | 4 ++-- test/files/presentation/infix-completion.check | 8 ++++---- test/files/presentation/infix-completion2.check | 8 ++++---- .../run/reflection-magicsymbols-invoke.check | 4 ++-- test/files/run/repl-trim-stack-trace.check | 4 ++-- test/files/run/t3613.scala | 17 ++++++++--------- test/files/run/t6344.check | 12 ++++++------ test/files/run/t7741a.check | 3 --- .../files/run/t7741a/GroovyInterface$1Dump.java | 2 +- test/files/run/t9529.check | 2 +- 11 files changed, 33 insertions(+), 35 deletions(-) delete mode 100644 test/files/run/t7741a.check diff --git a/build.sbt b/build.sbt index 0e192e14a183..cbf59c39444b 100644 --- a/build.sbt +++ b/build.sbt @@ -694,7 +694,9 @@ lazy val testkit = configureAsSubproject(project) // Jigsaw: reflective access between modules (`setAccessible(true)`) requires an `opens` directive. // This is enforced by error (not just by warning) since JDK 16. In our tests we use reflective access // from the unnamed package (the classpath) to JDK modules in testing utilities like `assertNotReachable`. -val addOpensForTesting = "-XX:+IgnoreUnrecognizedVMOptions" +: Seq("java.util.concurrent.atomic", "java.lang", "java.lang.reflect", "java.net").map(p => s"--add-opens=java.base/$p=ALL-UNNAMED") +// `add-exports=jdk.jdeps/com.sun.tools.javap` is tests that use `:javap` in the REPL, see scala/bug#12378 +val addOpensForTesting = "-XX:+IgnoreUnrecognizedVMOptions" +: "--add-exports=jdk.jdeps/com.sun.tools.javap=ALL-UNNAMED" +: + Seq("java.util.concurrent.atomic", "java.lang", "java.lang.reflect", "java.net").map(p => s"--add-opens=java.base/$p=ALL-UNNAMED") lazy val junit = project.in(file("test") / "junit") .dependsOn(testkit, compiler, replFrontend, scaladoc) diff --git a/test/files/neg/macro-invalidret.check b/test/files/neg/macro-invalidret.check index ceba1b88c927..11097f429909 100644 --- a/test/files/neg/macro-invalidret.check +++ b/test/files/neg/macro-invalidret.check @@ -39,9 +39,9 @@ type mismatch for return type: reflect.runtime.universe.Literal does not conform def bar2: Int = macro Impls.foo2 ^ Macros_Test_2.scala:33: error: exception during macro expansion: -#partest !java15 +#partest !java15+ java.lang.NullPointerException -#partest java15 +#partest java15+ java.lang.NullPointerException: Cannot throw exception because "null" is null #partest at Impls$.foo3(Impls_1.scala:7) diff --git a/test/files/presentation/infix-completion.check b/test/files/presentation/infix-completion.check index 9d0723e882c4..a6549c83911b 100644 --- a/test/files/presentation/infix-completion.check +++ b/test/files/presentation/infix-completion.check @@ -3,9 +3,9 @@ reload: Snippet.scala askTypeCompletion at Snippet.scala(1,34) ================================================================================ [response] askTypeCompletion at (1,34) -#partest !java15 +#partest !java15+ retrieved 203 members -#partest java15 +#partest java15+ retrieved 205 members #partest [inaccessible] protected def num: Fractional[Double] @@ -123,7 +123,7 @@ def compareTo(x$1: Double): Int def compareTo(x$1: Float): Int def compareTo(x$1: Integer): Int def compareTo(x$1: Long): Int -#partest java15 +#partest java15+ def describeConstable(): java.util.Optional[Double] #partest def doubleValue(): Double @@ -145,7 +145,7 @@ def isNegInfinity: Boolean def isPosInfinity: Boolean def isValidLong: Boolean def longValue(): Long -#partest java15 +#partest java15+ def resolveConstantDesc(x$1: java.lang.invoke.MethodHandles.Lookup): Double #partest def round: Long diff --git a/test/files/presentation/infix-completion2.check b/test/files/presentation/infix-completion2.check index 9d0723e882c4..a6549c83911b 100644 --- a/test/files/presentation/infix-completion2.check +++ b/test/files/presentation/infix-completion2.check @@ -3,9 +3,9 @@ reload: Snippet.scala askTypeCompletion at Snippet.scala(1,34) ================================================================================ [response] askTypeCompletion at (1,34) -#partest !java15 +#partest !java15+ retrieved 203 members -#partest java15 +#partest java15+ retrieved 205 members #partest [inaccessible] protected def num: Fractional[Double] @@ -123,7 +123,7 @@ def compareTo(x$1: Double): Int def compareTo(x$1: Float): Int def compareTo(x$1: Integer): Int def compareTo(x$1: Long): Int -#partest java15 +#partest java15+ def describeConstable(): java.util.Optional[Double] #partest def doubleValue(): Double @@ -145,7 +145,7 @@ def isNegInfinity: Boolean def isPosInfinity: Boolean def isValidLong: Boolean def longValue(): Long -#partest java15 +#partest java15+ def resolveConstantDesc(x$1: java.lang.invoke.MethodHandles.Lookup): Double #partest def round: Long diff --git a/test/files/run/reflection-magicsymbols-invoke.check b/test/files/run/reflection-magicsymbols-invoke.check index 7300a52e3068..6759edfecff3 100644 --- a/test/files/run/reflection-magicsymbols-invoke.check +++ b/test/files/run/reflection-magicsymbols-invoke.check @@ -64,10 +64,10 @@ testing Object.finalize: () testing Object.getClass: class java.lang.String testing Object.hashCode: 50 testing Object.ne: false -#partest !java15 +#partest !java15+ testing Object.notify: class java.lang.IllegalMonitorStateException: null testing Object.notifyAll: class java.lang.IllegalMonitorStateException: null -#partest java15 +#partest java15+ testing Object.notify: class java.lang.IllegalMonitorStateException: current thread is not owner testing Object.notifyAll: class java.lang.IllegalMonitorStateException: current thread is not owner #partest diff --git a/test/files/run/repl-trim-stack-trace.check b/test/files/run/repl-trim-stack-trace.check index 53609d85dcc5..ee27e0c4cec9 100644 --- a/test/files/run/repl-trim-stack-trace.check +++ b/test/files/run/repl-trim-stack-trace.check @@ -24,9 +24,9 @@ java.lang.Exception ... ??? elided scala> null.asInstanceOf -#partest !java15 +#partest !java15+ java.lang.NullPointerException -#partest java15 +#partest java15+ java.lang.NullPointerException: Cannot throw exception because the return value of "res3()" is null #partest at .lzycompute(:8) diff --git a/test/files/run/t3613.scala b/test/files/run/t3613.scala index 1293f62c0fd4..d8a6a862c925 100644 --- a/test/files/run/t3613.scala +++ b/test/files/run/t3613.scala @@ -1,15 +1,14 @@ class Boopy { - private val s = new Schnuck - def observer : PartialFunction[ Any, Unit ] = s.observer + private val s = new Schnuck + def observer : PartialFunction[ Any, Unit ] = s.observer - private class Schnuck extends javax.swing.AbstractListModel { - model => - val observer : PartialFunction[ Any, Unit ] = { - case "Boopy" => fireIntervalAdded( model, 0, 1 ) - } - def getSize = 0 - def getElementAt( idx: Int ) = ??? + private class Schnuck extends javax.swing.AbstractListModel[AnyRef] { model => + val observer : PartialFunction[ Any, Unit ] = { + case "Boopy" => fireIntervalAdded( model, 0, 1 ) } + def getSize = 0 + def getElementAt(idx: Int): AnyRef = null + } } diff --git a/test/files/run/t6344.check b/test/files/run/t6344.check index d994d81c7dc5..03f2468145d1 100644 --- a/test/files/run/t6344.check +++ b/test/files/run/t6344.check @@ -4,9 +4,9 @@ public int C0.v1(int) public int C0.v3() public int C0.v3() public int C0.v4(int,scala.collection.immutable.List) -#partest !java15 +#partest !java15+ public int C0.v4(int,scala.collection.immutable.List>) -#partest java15 +#partest java15+ public int C0.v4(int,scala.collection.immutable.List>) #partest public scala.collection.immutable.List C0.v2() @@ -18,9 +18,9 @@ public java.lang.Object C1.v1(java.lang.Object) public java.lang.Object C1.v3() public java.lang.Object C1.v3() public java.lang.Object C1.v4(java.lang.Object,scala.collection.immutable.List) -#partest !java15 +#partest !java15+ public java.lang.Object C1.v4(java.lang.Object,scala.collection.immutable.List>) -#partest java15 +#partest java15+ public java.lang.Object C1.v4(java.lang.Object,scala.collection.immutable.List>) #partest public scala.collection.immutable.List C1.v2() @@ -32,9 +32,9 @@ public java.lang.String C2.v1(java.lang.String) public java.lang.String C2.v3() public java.lang.String C2.v3() public java.lang.String C2.v4(java.lang.String,scala.collection.immutable.List) -#partest !java15 +#partest !java15+ public java.lang.String C2.v4(java.lang.String,scala.collection.immutable.List>) -#partest java15 +#partest java15+ public java.lang.String C2.v4(java.lang.String,scala.collection.immutable.List>) #partest public scala.collection.immutable.List C2.v2() diff --git a/test/files/run/t7741a.check b/test/files/run/t7741a.check deleted file mode 100644 index e835f0ce738c..000000000000 --- a/test/files/run/t7741a.check +++ /dev/null @@ -1,3 +0,0 @@ -#partest !java8 -Note: t7741a/GroovyInterface$1Dump.java uses or overrides a deprecated API. -Note: Recompile with -Xlint:deprecation for details. diff --git a/test/files/run/t7741a/GroovyInterface$1Dump.java b/test/files/run/t7741a/GroovyInterface$1Dump.java index 0c0eab3f1b6d..cc187f353ed4 100644 --- a/test/files/run/t7741a/GroovyInterface$1Dump.java +++ b/test/files/run/t7741a/GroovyInterface$1Dump.java @@ -175,7 +175,7 @@ public static byte[] dump () throws Exception { { mv = cw.visitMethod(ACC_PRIVATE + ACC_STATIC + ACC_SYNTHETIC, "$createCallSiteArray", "()Lorg/codehaus/groovy/runtime/callsite/CallSiteArray;", null, null); mv.visitCode(); - mv.visitLdcInsn(new Integer(0)); + mv.visitLdcInsn(Integer.valueOf(0)); mv.visitTypeInsn(ANEWARRAY, "java/lang/String"); mv.visitVarInsn(ASTORE, 0); mv.visitTypeInsn(NEW, "org/codehaus/groovy/runtime/callsite/CallSiteArray"); diff --git a/test/files/run/t9529.check b/test/files/run/t9529.check index f1c433ddaced..38ad198f56ba 100644 --- a/test/files/run/t9529.check +++ b/test/files/run/t9529.check @@ -32,7 +32,7 @@ u: List(@anns.Ann_0$Container(value={@anns.Ann_0(name="u", value="you"), @anns.A List(@anns.Ann_0$Container(value={@anns.Ann_0(name="", value="constructor"), @anns.Ann_0(name="", value="initializer")})) -#partest java15 +#partest java15+ A: List() B: List(@java.lang.Deprecated(forRemoval=false, since="")) C: List(@anns.Ann_0(name="C", value="see")) From 6c18216269ad8cf5f23dc1a43b26db9a807ffcb5 Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Wed, 21 Apr 2021 17:38:45 +0200 Subject: [PATCH 101/769] mima filter for CharSequence.isEmpty mixin forwarder --- project/MimaFilters.scala | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/project/MimaFilters.scala b/project/MimaFilters.scala index 71d9d7c65c03..0b35213fffec 100644 --- a/project/MimaFilters.scala +++ b/project/MimaFilters.scala @@ -33,6 +33,12 @@ object MimaFilters extends AutoPlugin { ProblemFilters.exclude[MissingTypesProblem]("scala.annotation.implicitAmbiguous"), ProblemFilters.exclude[MissingTypesProblem]("scala.annotation.implicitNotFound"), ProblemFilters.exclude[MissingTypesProblem]("scala.annotation.migration"), + + // when building on a recent JDK, classes implementing `CharSequence` get a mixin forwarder for + // the `isEmpty` default method that was added in JDK 15 + ProblemFilters.exclude[DirectMissingMethodProblem]("scala.Predef#SeqCharSequence.isEmpty"), + ProblemFilters.exclude[DirectMissingMethodProblem]("scala.Predef#ArrayCharSequence.isEmpty"), + ProblemFilters.exclude[DirectMissingMethodProblem]("scala.runtime.ArrayCharSequence.isEmpty"), ) override val buildSettings = Seq( From a307af51c84eac75a1be8f2a37fba350d6eb98a6 Mon Sep 17 00:00:00 2001 From: Anatolii Kmetiuk Date: Thu, 22 Apr 2021 11:49:44 +0200 Subject: [PATCH 102/769] Upgrade Dotty to 3.0.0-RC3 --- project/DottySupport.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/DottySupport.scala b/project/DottySupport.scala index 369fa420d31a..94c29eed0701 100644 --- a/project/DottySupport.scala +++ b/project/DottySupport.scala @@ -22,7 +22,7 @@ object TastySupport { * Dotty in .travis.yml. */ object DottySupport { - val dottyVersion = "3.0.0-RC2" + val dottyVersion = "3.0.0-RC3" val compileWithDotty: Boolean = Option(System.getProperty("scala.build.compileWithDotty")).map(_.toBoolean).getOrElse(false) lazy val commonSettings = Seq( From 5edc7af81236126583f06d9a1328c52ea839dbf3 Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Mon, 19 Apr 2021 12:53:37 +0200 Subject: [PATCH 103/769] Build and test mergely on JDK 16 (on Travis) --- .travis.yml | 254 +++++++++++++++++++++++++++++----------------------- 1 file changed, 141 insertions(+), 113 deletions(-) diff --git a/.travis.yml b/.travis.yml index 994d9c446ecc..bc80e7ca1f48 100644 --- a/.travis.yml +++ b/.travis.yml @@ -8,120 +8,148 @@ stages: - build - test +templates: # this has no effect on travis, it's just a place to put our templates + pr-jdk8: &pr-jdk8 + if: type = pull_request OR repo != scala/scala + + cron-jdk16: &cron-jdk16 + if: type = cron AND repo = scala/scala + env: ADOPTOPENJDK=16 + + build-for-testing: &build-for-testing + # pull request validation (w/ bootstrap) + # differs from the build that publishes releases / integration builds: + # - not using bash script setup, but just the underlying sbt calls + # - publishing locally rather than to Artifactory + # the bootstrap above is older historically; this way of doing it is newer + # and also simpler. we should aim to reduce/eliminate the duplication. + stage: build + name: build, publishLocal, build again + script: + - set -e + - sbt setupPublishCore generateBuildCharacterPropertiesFile headerCheck publishLocal + - STARR=$(sed -n 's/^maven\.version\.number=//p' buildcharacter.properties) && echo $STARR + - sbt -Dstarr.version=$STARR setupValidateTest compile + workspaces: + create: + name: bootstrapped + paths: + # so new STARR will be available + - "buildcharacter.properties" + - "$HOME/.ivy2/local/org.scala-lang" + # so build products built using new STARR are kept + - "target" + - "project/target" + - "project/project/target" + - "project/project/project/target" + - "dist" + - "build" + + test1: &test1 + stage: test + name: tests (junit, scalacheck, et al) + workspaces: + use: bootstrapped + script: + - set -e + - STARR=$(sed -n 's/^maven\.version\.number=//p' buildcharacter.properties) && echo $STARR + - sbt -Dstarr.version=$STARR setupValidateTest Test/compile testAll1 + + test2: &test2 + stage: test + name: tests (partest) + workspaces: + use: bootstrapped + script: + - set -e + - STARR=$(sed -n 's/^maven\.version\.number=//p' buildcharacter.properties) && echo $STARR + - sbt -Dstarr.version=$STARR setupValidateTest testAll2 + jobs: - include: - - stage: build - if: type != pull_request AND repo = scala/scala - name: bootstrap and publish - script: - # see comment in `bootstrap_fun` for details on the procedure - # env available in each stage - # - by travis config (see below): secret env vars - # - by `common` script: WORKSPACE, IVY2_DIR, SBT_CMD, integrationRepoUrl - # - by `bootstrap_fun`: publishPrivateTask, ... - - set -e - - (cd admin && ./init.sh) - - source scripts/common - - source scripts/bootstrap_fun - - determineScalaVersion - - removeExistingBuilds $integrationRepoUrl - - if [ ! -z "$STARR_REF" ]; then buildStarr; fi - - buildLocker - - buildQuick - - triggerScalaDist - - # pull request validation (w/ bootstrap) - # differs from the bootstrap above by: - # - not using bash script setup, but just the underlying sbt calls - # - publishing locally rather than to Artifactory - # the bootstrap above is older historically; this way of doing it is newer - # and also simpler. we should aim to reduce/eliminate the duplication. - - stage: build - name: build, publishLocal, build again - if: type = pull_request OR repo != scala/scala - script: - - set -e - - sbt setupPublishCore generateBuildCharacterPropertiesFile headerCheck publishLocal - - STARR=$(sed -n 's/^maven\.version\.number=//p' buildcharacter.properties) && echo $STARR - - sbt -Dstarr.version=$STARR setupValidateTest compile - workspaces: - create: - name: bootstrapped - paths: - # so new STARR will be available - - "buildcharacter.properties" - - "$HOME/.ivy2/local/org.scala-lang" - # so build products built using new STARR are kept - - "target" - - "project/target" - - "project/project/target" - - "project/project/project/target" - - "dist" - - "build" - - - stage: test - name: tests (junit, scalacheck, et al) - if: type = pull_request OR repo != scala/scala - workspaces: - use: bootstrapped - script: - - set -e - - STARR=$(sed -n 's/^maven\.version\.number=//p' buildcharacter.properties) && echo $STARR - - sbt -Dstarr.version=$STARR setupValidateTest Test/compile testAll1 - - - name: tests (partest) - if: type = pull_request OR repo != scala/scala - workspaces: - use: bootstrapped - script: - - set -e - - STARR=$(sed -n 's/^maven\.version\.number=//p' buildcharacter.properties) && echo $STARR - - sbt -Dstarr.version=$STARR setupValidateTest testAll2 - - - name: ensure standard library is buildable by Scala 3 - if: type = pull_request OR repo != scala/scala - workspaces: - use: bootstrapped - script: - - set -e - - STARR=$(sed -n 's/^maven\.version\.number=//p' buildcharacter.properties) && echo $STARR - - sbt -Dscala.build.compileWithDotty=true library/compile - - - stage: test - name: build benchmarks (bootstrapped) - if: type = pull_request OR repo != scala/scala - workspaces: - use: bootstrapped - script: - - set -e - - STARR=$(sed -n 's/^maven\.version\.number=//p' buildcharacter.properties) && echo $STARR - - sbt bench/Jmh/compile - - - stage: build - name: language spec (Jekyll) - # wkhtmltopdf requires libssl1.1, which we can't install on xenial - dist: bionic - language: ruby - install: - - ruby -v - - gem install bundler - - bundler --version - - bundle install - # cribbed from https://github.com/SebastiaanKlippert/go-wkhtmltopdf/blob/master/.travis.yml - - sudo apt-get update - - sudo apt-get install -y build-essential xorg xfonts-75dpi libpng16-16 libssl1.1 - - wget --quiet "https://github.com/wkhtmltopdf/packaging/releases/download/0.12.6-1/wkhtmltox_0.12.6-1.bionic_amd64.deb" - - sudo dpkg -i "wkhtmltox_0.12.6-1.bionic_amd64.deb" - - rm "wkhtmltox_0.12.6-1.bionic_amd64.deb" - script: - - set -e - - (cd admin && ./init.sh) - - bundle exec jekyll build -s spec/ -d build/spec - - export JEKYLL_ENV=spec-pdf - - bundle exec jekyll build -s spec/ -d build/spec-pdf - - ./scripts/generate-spec-pdf.sh - after_success: - - ./scripts/travis-publish-spec.sh + include: + - stage: build + if: (type = push OR type = api) AND repo = scala/scala # api for manually triggered release builds + name: publish (bootstrapped) to scala-integration or sonatype + script: + # see comment in `bootstrap_fun` for details on the procedure + # env available in each stage + # - by travis config (see below): secret env vars + # - by `common` script: WORKSPACE, IVY2_DIR, SBT_CMD, integrationRepoUrl + # - by `bootstrap_fun`: publishPrivateTask, ... + - set -e + - (cd admin && ./init.sh) + - source scripts/common + - source scripts/bootstrap_fun + - determineScalaVersion + - removeExistingBuilds $integrationRepoUrl + - if [ ! -z "$STARR_REF" ]; then buildStarr; fi + - buildLocker + - buildQuick + - triggerScalaDist + + - <<: *build-for-testing + <<: *pr-jdk8 + + - <<: *test1 + <<: *pr-jdk8 + + - <<: *test2 + <<: *pr-jdk8 + + - <<: *build-for-testing + <<: *cron-jdk16 + + - <<: *test1 + <<: *cron-jdk16 + + - <<: *test2 + <<: *cron-jdk16 + + - stage: test + name: build library with Scala 3 + if: type = pull_request OR repo != scala/scala + workspaces: + use: bootstrapped + script: + - set -e + - STARR=$(sed -n 's/^maven\.version\.number=//p' buildcharacter.properties) && echo $STARR + - sbt -Dscala.build.compileWithDotty=true library/compile + + - name: build benchmarks + if: type = pull_request OR repo != scala/scala + workspaces: + use: bootstrapped + script: + - set -e + - STARR=$(sed -n 's/^maven\.version\.number=//p' buildcharacter.properties) && echo $STARR + - sbt bench/Jmh/compile + + - stage: build + if: type = pull_request OR type = push + name: language spec + # wkhtmltopdf requires libssl1.1, which we can't install on xenial + dist: bionic + language: ruby + install: + - ruby -v + - gem install bundler + - bundler --version + - bundle install + # cribbed from https://github.com/SebastiaanKlippert/go-wkhtmltopdf/blob/master/.travis.yml + - sudo apt-get update + - sudo apt-get install -y build-essential xorg xfonts-75dpi libpng16-16 libssl1.1 + - wget --quiet "https://github.com/wkhtmltopdf/packaging/releases/download/0.12.6-1/wkhtmltox_0.12.6-1.bionic_amd64.deb" + - sudo dpkg -i "wkhtmltox_0.12.6-1.bionic_amd64.deb" + - rm "wkhtmltox_0.12.6-1.bionic_amd64.deb" + script: + - set -e + - (cd admin && ./init.sh) + - bundle exec jekyll build -s spec/ -d build/spec + - export JEKYLL_ENV=spec-pdf + - bundle exec jekyll build -s spec/ -d build/spec-pdf + - ./scripts/generate-spec-pdf.sh + after_success: + - ./scripts/travis-publish-spec.sh env: global: From 53d8e098750c6e774a7fec6a41b5d67dea3c24c1 Mon Sep 17 00:00:00 2001 From: Philippus Date: Thu, 22 Apr 2021 16:10:01 +0200 Subject: [PATCH 104/769] Add null check for getURLs-method --- .../reflect/runtime/ReflectionUtils.scala | 2 +- .../runtime/ReflectionUtilsShowTest.scala | 19 +++++++++++++++++++ 2 files changed, 20 insertions(+), 1 deletion(-) create mode 100644 test/junit/scala/reflect/runtime/ReflectionUtilsShowTest.scala diff --git a/src/reflect/scala/reflect/runtime/ReflectionUtils.scala b/src/reflect/scala/reflect/runtime/ReflectionUtils.scala index 3ecfd00a65c8..1b6060466ed4 100644 --- a/src/reflect/scala/reflect/runtime/ReflectionUtils.scala +++ b/src/reflect/scala/reflect/runtime/ReflectionUtils.scala @@ -47,7 +47,7 @@ object ReflectionUtils { isAbstractFileClassLoader(clazz.getSuperclass) } def inferClasspath(cl: ClassLoader): String = cl match { - case cl: java.net.URLClassLoader => + case cl: java.net.URLClassLoader if cl.getURLs != null => (cl.getURLs mkString ",") case cl if cl != null && isAbstractFileClassLoader(cl.getClass) => cl.asInstanceOf[{val root: scala.reflect.io.AbstractFile}].root.canonicalPath diff --git a/test/junit/scala/reflect/runtime/ReflectionUtilsShowTest.scala b/test/junit/scala/reflect/runtime/ReflectionUtilsShowTest.scala new file mode 100644 index 000000000000..d7e90be1af42 --- /dev/null +++ b/test/junit/scala/reflect/runtime/ReflectionUtilsShowTest.scala @@ -0,0 +1,19 @@ +package scala.reflect.runtime + +import java.net.{URL, URLClassLoader} + +import org.junit.Test +import org.junit.runner.RunWith +import org.junit.runners.JUnit4 + +@RunWith(classOf[JUnit4]) +class ReflectionUtilsShowTest { + @Test def testGetUrlsCanReturnNull(): Unit = { + val sut = new MyClassLoader(Array.empty[URL]) + assert(ReflectionUtils.show(sut).contains("")) + } +} + +class MyClassLoader(urls: Array[URL]) extends URLClassLoader(urls) { + override def getURLs: Array[URL] = null +} From 21b185029e123d7944cb4d5ee4e4c2e0b91b53a7 Mon Sep 17 00:00:00 2001 From: Seth Tisue Date: Wed, 21 Apr 2021 10:53:54 -0700 Subject: [PATCH 105/769] GitHub Actions: Windows CI: align with Travis-CI, use simpler bootstrap --- .github/workflows/ci.yml | 19 +++---------------- 1 file changed, 3 insertions(+), 16 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 51ca62cefcca..70647980f2e2 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -37,24 +37,11 @@ jobs: ~/.cache/coursier key: ${{ runner.os }}-sbt-cache-v2-${{ hashFiles('**/*.sbt') }}-${{ hashFiles('project/build.properties') }} - - name: Setup - run: | - source scripts/common - java -version - javac -version - generateRepositoriesConfig - # Pass these environment vars to subsequent steps - echo "SBT=sbt -Dsbt.override.build.repos=true -Dsbt.repository.config=${sbtRepositoryConfig}" >> $GITHUB_ENV - echo "COURSIER_HOME=$HOME/.coursier" >> "$GITHUB_ENV" - echo "COURSIER_CACHE=$HOME/.cache/coursier/v1" >> "$GITHUB_ENV" - - name: Build run: | - source scripts/common - $SBT -warn setupPublishCore generateBuildCharacterPropertiesFile publishLocal + sbt setupPublishCore generateBuildCharacterPropertiesFile headerCheck publishLocal - name: Test run: | - source scripts/common - parseScalaProperties buildcharacter.properties - $SBT -Dstarr.version=$maven_version_number -warn setupValidateTest testAll + STARR=`cat buildcharacter.properties | grep ^maven.version.number | cut -d= -f2` && echo $STARR + sbt -Dstarr.version=$STARR setupValidateTest test:compile info testAll From 7ea03477ea8ecc653675befd3c31c4a99eb7df04 Mon Sep 17 00:00:00 2001 From: Dale Wijnand Date: Mon, 19 Apr 2021 15:42:01 +0100 Subject: [PATCH 106/769] Rewrite TypeApply's type arguments to use explicit outer references --- .../tools/nsc/transform/ExplicitOuter.scala | 25 +++++++++++++++++++ test/files/run/t12312.scala | 25 +++++++++++++++++++ 2 files changed, 50 insertions(+) create mode 100644 test/files/run/t12312.scala diff --git a/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala b/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala index bb2778927539..a271dcbc57cf 100644 --- a/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala +++ b/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala @@ -488,6 +488,31 @@ abstract class ExplicitOuter extends InfoTransform transform(treeCopy.Apply(tree, treeCopy.Select(eqsel, outerSelect, eq), args)) } + // (t12312) C.this.a().X().isInstanceOf[C.this.a.X.type]() --> + // D.this.$outer().a().X().isInstanceOf[D.this.$outer.a.X.type]() + case TypeApply(fun, targs) => + val rewriteTypeToExplicitOuter = new TypeMap { typeMap => + def apply(tp: Type) = tp map { + case ThisType(sym) if sym != currentClass && !(sym.hasModuleFlag && sym.isStatic) => + var cls = currentClass + var tpe = cls.thisType + do { + tpe = singleType(tpe, outerAccessor(cls)) + cls = cls.outerClass + } while (cls != NoSymbol && sym != cls) + tpe.mapOver(typeMap) + case tp => tp.mapOver(typeMap) + } + } + val fun2 = transform(fun) + val targs2 = targs.mapConserve { targ0 => + val targ = transform(targ0) + val targTp = targ.tpe + val targTp2 = rewriteTypeToExplicitOuter(targTp.dealias) + if (targTp eq targTp2) targ else TypeTree(targTp2).setOriginal(targ) + } + treeCopy.TypeApply(tree, fun2, targs2) + case _ => val x = super.transform(tree) if (x.tpe eq null) x diff --git a/test/files/run/t12312.scala b/test/files/run/t12312.scala new file mode 100644 index 000000000000..3ad1c4542b75 --- /dev/null +++ b/test/files/run/t12312.scala @@ -0,0 +1,25 @@ +class A { object X } + +class C { + val a, b = new A; import a.X + class D { + def isInstanceOf_aX(z: AnyRef) = z.isInstanceOf[X.type] + class E { + def isInstanceOf_aX(z: AnyRef) = z.isInstanceOf[X.type] + } + } +} + +object Test extends C { + def main(args: Array[String]): Unit = { + val d = new D() + assert(d.isInstanceOf_aX(a.X)) + assert(!d.isInstanceOf_aX(b.X)) + assert(!d.isInstanceOf_aX(new Object)) + + val e = new d.E() + assert(e.isInstanceOf_aX(a.X)) + assert(!e.isInstanceOf_aX(b.X)) + assert(!e.isInstanceOf_aX(new Object)) + } +} From fee1f61aa115e92b6bdedb6817eaa3ed2fb1a596 Mon Sep 17 00:00:00 2001 From: Ikko Ashimine Date: Fri, 23 Apr 2021 01:46:56 +0900 Subject: [PATCH 107/769] Fix typo in ExprBuilder.scala containg -> containing --- src/compiler/scala/tools/nsc/transform/async/ExprBuilder.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/compiler/scala/tools/nsc/transform/async/ExprBuilder.scala b/src/compiler/scala/tools/nsc/transform/async/ExprBuilder.scala index 9761bf0ed6dd..6cecc2487382 100644 --- a/src/compiler/scala/tools/nsc/transform/async/ExprBuilder.scala +++ b/src/compiler/scala/tools/nsc/transform/async/ExprBuilder.scala @@ -303,7 +303,7 @@ trait ExprBuilder extends TransformUtils with AsyncAnalysis { buildStateAndOpenNextState(afterLabelState, style = StateTransitionStyle.None) } } else if (containsAwait(rhs)) { - // A while loop containg an await. We assuming that the the backward branch is reachable across the async + // A while loop containing an await. We assuming that the the backward branch is reachable across the async // code path and create a state for the `while` label. // // In theory we could avoid creating this state in code like: From 0fbe2d5c1b8d79b6978fed56834fd3ce02503a45 Mon Sep 17 00:00:00 2001 From: Adriaan Moors Date: Thu, 22 Apr 2021 16:27:01 +0200 Subject: [PATCH 108/769] Cache -Xsource comparisons in currentRun --- src/compiler/scala/tools/nsc/Global.scala | 5 +++++ src/compiler/scala/tools/nsc/ast/parser/Parsers.scala | 2 +- src/compiler/scala/tools/nsc/ast/parser/Scanners.scala | 2 +- src/compiler/scala/tools/nsc/transform/UnCurry.scala | 2 +- .../scala/tools/nsc/typechecker/ContextErrors.scala | 2 +- src/compiler/scala/tools/nsc/typechecker/Contexts.scala | 2 +- src/compiler/scala/tools/nsc/typechecker/Implicits.scala | 8 ++++---- src/compiler/scala/tools/nsc/typechecker/Namers.scala | 2 +- .../scala/tools/nsc/typechecker/NamesDefaults.scala | 4 ++-- src/compiler/scala/tools/nsc/typechecker/RefChecks.scala | 4 ++-- src/compiler/scala/tools/nsc/typechecker/Typers.scala | 4 ++-- src/compiler/scala/tools/nsc/typechecker/Unapplies.scala | 2 +- 12 files changed, 22 insertions(+), 17 deletions(-) diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala index 74a9454a80b5..15352aa947d7 100644 --- a/src/compiler/scala/tools/nsc/Global.scala +++ b/src/compiler/scala/tools/nsc/Global.scala @@ -1142,6 +1142,11 @@ class Global(var currentSettings: Settings, reporter0: Reporter) val profiler: Profiler = Profiler(settings) keepPhaseStack = settings.log.isSetByUser + // We hit these checks regularly. They shouldn't change inside the same run, so cache the comparisons here. + val isScala211: Boolean = settings.isScala211 + val isScala212: Boolean = settings.isScala212 + val isScala213: Boolean = settings.isScala213 + // used in sbt def uncheckedWarnings: List[(Position, String)] = reporting.uncheckedWarnings // used in sbt diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala index 650dc1722ba6..f3c08f93737f 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala @@ -2298,7 +2298,7 @@ self => if (vds.isEmpty) syntaxError(start, s"case classes must have a parameter list; try 'case class $name()' or 'case object $name'") else if (vds.head.nonEmpty && vds.head.head.mods.isImplicit) { - if (settings.isScala213) + if (currentRun.isScala213) syntaxError(start, s"case classes must have a non-implicit parameter list; try 'case class $name()$elliptical'") else { deprecationWarning(start, s"case classes should have a non-implicit parameter list; adapting to 'case class $name()$elliptical'", "2.12.2") diff --git a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala index 26abb5b837f6..d9eeca9c056a 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala @@ -610,7 +610,7 @@ trait Scanners extends ScannersCommon { val isEmptyCharLit = (ch == '\'') getLitChar() if (ch == '\'') { - if (isEmptyCharLit && settings.isScala213) + if (isEmptyCharLit && currentRun.isScala213) syntaxError("empty character literal (use '\\'' for single quote)") else { if (isEmptyCharLit) diff --git a/src/compiler/scala/tools/nsc/transform/UnCurry.scala b/src/compiler/scala/tools/nsc/transform/UnCurry.scala index 305b723752f5..a90d9aa701eb 100644 --- a/src/compiler/scala/tools/nsc/transform/UnCurry.scala +++ b/src/compiler/scala/tools/nsc/transform/UnCurry.scala @@ -435,7 +435,7 @@ abstract class UnCurry extends InfoTransform (sym ne null) && sym.elisionLevel.exists { level => if (sym.isMethod) level < settings.elidebelow.value else { - if (settings.isScala213) reporter.error(sym.pos, s"${sym.name}: Only methods can be marked @elidable!") + if (currentRun.isScala213) reporter.error(sym.pos, s"${sym.name}: Only methods can be marked @elidable!") false } } diff --git a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala index 0fea82c35a95..700f154a4bfd 100644 --- a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala +++ b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala @@ -114,7 +114,7 @@ trait ContextErrors { def issueTypeError(err: AbsTypeError)(implicit context: Context) { context.issue(err) } def typeErrorMsg(context: Context, found: Type, req: Type) = - if (context.openImplicits.nonEmpty && !settings.XlogImplicits.value && settings.isScala213) + if (context.openImplicits.nonEmpty && !settings.XlogImplicits.value && currentRun.isScala213) // OPT: avoid error string creation for errors that won't see the light of day, but predicate // this on -Xsource:2.13 for bug compatibility with https://github.com/scala/scala/pull/7147#issuecomment-418233611 "type mismatch" diff --git a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala index bb6f19138449..e22983a712a7 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala @@ -866,7 +866,7 @@ trait Contexts { self: Analyzer => isAccessible(sym, pre) && !(imported && { val e = scope.lookupEntry(name) - (e ne null) && (e.owner == scope) && (!settings.isScala212 || e.sym.exists) + (e ne null) && (e.owner == scope) && (!currentRun.isScala212 || e.sym.exists) }) /** Do something with the symbols with name `name` imported via the import in `imp`, diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala index bfb3446874fc..7309cf5d9f3c 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala @@ -1113,7 +1113,7 @@ trait Implicits { if(isView || wildPtNotInstantiable || matchesPtInst(firstPending)) typedImplicit(firstPending, ptChecked = true, isLocalToCallsite) else SearchFailure - if (typedFirstPending.isFailure && settings.isScala213) + if (typedFirstPending.isFailure && currentRun.isScala213) undoLog.undoTo(mark) // Don't accumulate constraints from typechecking or type error message creation for failed candidates // Pass the errors to `DivergentImplicitRecovery` so that it can note @@ -1214,7 +1214,7 @@ trait Implicits { * bound, the implicits infos which are members of these companion objects. */ private def companionImplicitMap(tp: Type): InfoMap = { - val isScala213 = settings.isScala213 + val isScala213 = currentRun.isScala213 /* Populate implicit info map by traversing all parts of type `tp`. * Parameters as for `getParts`. @@ -1626,9 +1626,9 @@ trait Implicits { val outSym = out.typeSymbol val fail = - if (out.annotations.isEmpty && (outSym == ObjectClass || (settings.isScala211 && outSym == AnyValClass))) + if (out.annotations.isEmpty && (outSym == ObjectClass || (currentRun.isScala211 && outSym == AnyValClass))) maybeInvalidConversionError(s"the result type of an implicit conversion must be more specific than $out") - else if (settings.isScala211 && in.annotations.isEmpty && in.typeSymbol == NullClass) + else if (currentRun.isScala211 && in.annotations.isEmpty && in.typeSymbol == NullClass) maybeInvalidConversionError("an expression of type Null is ineligible for implicit conversion") else false diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala index 60de89831879..5cad833c0bcc 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala @@ -1707,7 +1707,7 @@ trait Namers extends MethodSynthesis { val valOwner = owner.owner // there's no overriding outside of classes, and we didn't use to do this in 2.11, so provide opt-out - if (!settings.isScala212 || !valOwner.isClass) WildcardType + if (!currentRun.isScala212 || !valOwner.isClass) WildcardType else { // normalize to getter so that we correctly consider a val overriding a def // (a val's name ends in a " ", so can't compare to def) diff --git a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala index 37ccf6bd58e8..822583029945 100644 --- a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala +++ b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala @@ -609,7 +609,7 @@ trait NamesDefaults { self: Analyzer => case _ => false } params indexWhere (p => matchesName(p, name, argIndex)) match { - case -1 if positionalAllowed && !settings.isScala213 => + case -1 if positionalAllowed && !currentRun.isScala213 => if (isVariableInScope(context0, name)) { // only issue the deprecation warning if `name` is in scope, this avoids the warning when mis-spelling a parameter name. context0.deprecationWarning( @@ -629,7 +629,7 @@ trait NamesDefaults { self: Analyzer => case AssignOrNamedArg(Ident(oName), _) if oName != name => oName } DoubleParamNamesDefaultError(arg, name, existingArgIndex+1, otherName) - case paramPos if !settings.isScala213 && !invokesDefault && isAmbiguousAssignment(typer, params(paramPos), arg) => + case paramPos if !currentRun.isScala213 && !invokesDefault && isAmbiguousAssignment(typer, params(paramPos), arg) => AmbiguousReferenceInNamesDefaultError(arg, name) case paramPos if paramPos != argIndex => positionalAllowed = false // named arg is not in original parameter order: require names after this diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala index 0b02e96a58ae..122d85d7f2eb 100644 --- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala +++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala @@ -143,7 +143,7 @@ abstract class RefChecks extends Transform { case _ => false } val haveDefaults = methods filter ( - if (settings.isScala211) + if (currentRun.isScala211) (sym => mexists(sym.info.paramss)(_.hasDefault) && !nme.isProtectedAccessorName(sym.name)) else (sym => hasDefaultParam(sym.info) && !nme.isProtectedAccessorName(sym.name)) @@ -1486,7 +1486,7 @@ abstract class RefChecks extends Transform { if (!sym.isMethod || sym.isAccessor || sym.isLazy || sym.isDeferred) reporter.error(sym.pos, s"${sym.name}: Only methods can be marked @elidable.") } - if (settings.isScala213) checkIsElisible(tree.symbol) + if (currentRun.isScala213) checkIsElisible(tree.symbol) tree match { case m: MemberDef => diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index 0a88e8e1a56c..a0bc729890eb 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -1893,7 +1893,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper val impl2 = finishMethodSynthesis(impl1, clazz, context) - if (settings.isScala211 && mdef.symbol == PredefModule) + if (currentRun.isScala211 && mdef.symbol == PredefModule) ensurePredefParentsAreInSameSourceFile(impl2) treeCopy.ModuleDef(mdef, typedMods, mdef.name, impl2) setType NoType @@ -3472,7 +3472,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper // and lubbing the argument types (we treat SAM and FunctionN types equally, but non-function arguments // do not receive special treatment: they are typed under WildcardType.) val altArgPts = - if (settings.isScala212 && args.exists(treeInfo.isFunctionMissingParamType)) + if (currentRun.isScala212 && args.exists(treeInfo.isFunctionMissingParamType)) try alts.map(alt => formalTypes(alt.info.paramTypes, argslen).map(ft => (ft, alt))).transpose // do least amount of work up front catch { case _: IllegalArgumentException => args.map(_ => Nil) } // fail safe in case formalTypes fails to align to argslen else args.map(_ => Nil) // will type under argPt == WildcardType diff --git a/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala b/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala index e4862d6872f6..200a92bfdeaf 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala @@ -159,7 +159,7 @@ trait Unapplies extends ast.TreeDSL { case _ => nme.unapply } val cparams = List(ValDef(Modifiers(PARAM | SYNTHETIC), unapplyParamName, classType(cdef, tparams), EmptyTree)) - val resultType = if (!settings.isScala212) TypeTree() else { // fix for scala/bug#6541 under -Xsource:2.12 + val resultType = if (!currentRun.isScala212) TypeTree() else { // fix for scala/bug#6541 under -Xsource:2.12 def repeatedToSeq(tp: Tree) = tp match { case AppliedTypeTree(Select(_, tpnme.REPEATED_PARAM_CLASS_NAME), tps) => AppliedTypeTree(gen.rootScalaDot(tpnme.Seq), tps) case _ => tp From 6dccef67eb68518c170e0e7001ca2681a00bfc63 Mon Sep 17 00:00:00 2001 From: Guillaume Martres Date: Thu, 22 Apr 2021 16:29:03 +0200 Subject: [PATCH 109/769] Add infrastructure for -Xsource:3 support --- src/compiler/scala/tools/nsc/Global.scala | 1 + src/compiler/scala/tools/nsc/settings/ScalaSettings.scala | 2 ++ 2 files changed, 3 insertions(+) diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala index 15352aa947d7..f2f10792e7d6 100644 --- a/src/compiler/scala/tools/nsc/Global.scala +++ b/src/compiler/scala/tools/nsc/Global.scala @@ -1146,6 +1146,7 @@ class Global(var currentSettings: Settings, reporter0: Reporter) val isScala211: Boolean = settings.isScala211 val isScala212: Boolean = settings.isScala212 val isScala213: Boolean = settings.isScala213 + val isScala3: Boolean = settings.isScala3 // used in sbt def uncheckedWarnings: List[(Position, String)] = reporting.uncheckedWarnings diff --git a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala index e2598d1c0b6c..46e9497cebc5 100644 --- a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala +++ b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala @@ -106,6 +106,8 @@ trait ScalaSettings extends StandardScalaSettings with Warnings { _: MutableSett def isScala212: Boolean = source.value >= version212 private[this] val version213 = ScalaVersion("2.13.0") def isScala213: Boolean = source.value >= version213 + private[this] val version3 = ScalaVersion("3.0.0") + def isScala3: Boolean = source.value >= version3 /** * -X "Advanced" settings From 02251e9ad56aa0555b363419d1f9c72de5eb2291 Mon Sep 17 00:00:00 2001 From: Guillaume Martres Date: Tue, 30 Mar 2021 14:15:08 +0200 Subject: [PATCH 110/769] Support `case` in pattern bindings under -Xsource:3 Just like in Scala 3.0, adding this keyword doesn't change anything, but it will be required in future versions of Scala 3 for non-exhaustive patterns in a for comprehension. We would like to start issuing warnings by default in Scala 3 for code which does not use `case` in those situations, but to not hamper cross-compilation we need Scala 2 to also support that keyword. For details, see: https://dotty.epfl.ch/docs/reference/changed-features/pattern-bindings.html --- .../scala/tools/nsc/ast/parser/Parsers.scala | 8 ++++++- .../scala/tools/nsc/ast/parser/Scanners.scala | 10 ++++++++ .../neg/for-comprehension-case-future.check | 7 ++++++ .../neg/for-comprehension-case-future.scala | 24 +++++++++++++++++++ test/files/neg/for-comprehension-case.check | 13 ++++++++++ test/files/neg/for-comprehension-case.scala | 14 +++++++++++ 6 files changed, 75 insertions(+), 1 deletion(-) create mode 100644 test/files/neg/for-comprehension-case-future.check create mode 100644 test/files/neg/for-comprehension-case-future.scala create mode 100644 test/files/neg/for-comprehension-case.check create mode 100644 test/files/neg/for-comprehension-case.scala diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala index f3c08f93737f..99df44e35b4e 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala @@ -1860,6 +1860,12 @@ self => */ def generator(eqOK: Boolean, allowNestedIf: Boolean = true): List[Tree] = { val start = in.offset + val hasCase = in.token == CASE + if (hasCase) { + if (!currentRun.isScala3) syntaxError(in.offset, s"`case` keyword in for comprehension requires the -Xsource:3 flag.") + in.skipCASE() + } + val hasVal = in.token == VAL if (hasVal) in.nextToken() @@ -1873,7 +1879,7 @@ self => else syntaxError(in.offset, "val in for comprehension must be followed by assignment") } - if (hasEq && eqOK) in.nextToken() + if (hasEq && eqOK && !hasCase) in.nextToken() else accept(LARROW) val rhs = expr() diff --git a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala index d9eeca9c056a..27114358a972 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala @@ -331,6 +331,16 @@ trait Scanners extends ScannersCommon { } } + /** Advance beyond a case token without marking the CASE in sepRegions. + * This method should be called to skip beyond CASE tokens that are + * not part of matches, i.e. no ARROW is expected after them. + */ + def skipCASE(): Unit = { + assert(token == CASE, s"Internal error: skipCASE() called on non-case token $token") + nextToken() + sepRegions = sepRegions.tail + } + /** Produce next token, filling TokenData fields of Scanner. */ def nextToken(): Unit = { diff --git a/test/files/neg/for-comprehension-case-future.check b/test/files/neg/for-comprehension-case-future.check new file mode 100644 index 000000000000..02dab922e0d4 --- /dev/null +++ b/test/files/neg/for-comprehension-case-future.check @@ -0,0 +1,7 @@ +for-comprehension-case-future.scala:22: error: '<-' expected but '=' found. + case y = x + 1 + ^ +for-comprehension-case-future.scala:23: error: illegal start of simple expression + } yield x + y + ^ +two errors found diff --git a/test/files/neg/for-comprehension-case-future.scala b/test/files/neg/for-comprehension-case-future.scala new file mode 100644 index 000000000000..05602e537759 --- /dev/null +++ b/test/files/neg/for-comprehension-case-future.scala @@ -0,0 +1,24 @@ +// scalac: -Xsource:3 +// +class A { + // ok + val a = + for { + case Some(x) <- List(Some(1), None) + y = x + 1 + } yield x + y + + // ok + val b = + for { + Some(x) <- List(Some(1), None) + Some(y) <- List(None, Some(2)) + } yield x+y + + // fail + val c = + for { + case Some(x) <- List(Some(1), None) + case y = x + 1 + } yield x + y +} diff --git a/test/files/neg/for-comprehension-case.check b/test/files/neg/for-comprehension-case.check new file mode 100644 index 000000000000..b1f2eb0849c5 --- /dev/null +++ b/test/files/neg/for-comprehension-case.check @@ -0,0 +1,13 @@ +for-comprehension-case.scala:5: error: `case` keyword in for comprehension requires the -Xsource:3 flag. + case Some(x) <- List(Some(1), None) + ^ +for-comprehension-case.scala:12: error: `case` keyword in for comprehension requires the -Xsource:3 flag. + case y = x + 1 + ^ +for-comprehension-case.scala:12: error: '<-' expected but '=' found. + case y = x + 1 + ^ +for-comprehension-case.scala:13: error: illegal start of simple expression + } yield x+y + ^ +four errors found diff --git a/test/files/neg/for-comprehension-case.scala b/test/files/neg/for-comprehension-case.scala new file mode 100644 index 000000000000..55e8d44a40e3 --- /dev/null +++ b/test/files/neg/for-comprehension-case.scala @@ -0,0 +1,14 @@ +class A { + // fail + val a = + for { + case Some(x) <- List(Some(1), None) + } yield x + + // fail + val b = + for { + Some(x) <- List(Some(1), None) + case y = x + 1 + } yield x+y +} From ff0801318c8501370a6e8f0197d7b4f6a3999f7f Mon Sep 17 00:00:00 2001 From: Guillaume Martres Date: Wed, 31 Mar 2021 15:16:54 +0200 Subject: [PATCH 111/769] Support `?` as wildcard marker under -Xsource:3 Like in Scala 3.0, this allows `?` to be used as a type argument in all situations where `_` could be used as a wildcard previously. This should allow us to deprecate the use of `_` as a wildcard in Scala 3 to be able to eventually repurpose it as explained in http://dotty.epfl.ch/docs/reference/changed-features/wildcards.html This is a source-breaking change since a type named `?` is legal in Scala 2 (but not in Scala 3 unless -source 3.0-migration is used). `?` also has a special meaning when the kind-projector plugin is used, but that usage has been deprecated in favor of `*` for a while now. --- .../scala/tools/nsc/ast/parser/Parsers.scala | 32 +++++++++++-------- .../scala/reflect/internal/StdNames.scala | 1 + test/files/pos/wildcards-future.scala | 21 ++++++++++++ 3 files changed, 40 insertions(+), 14 deletions(-) create mode 100644 test/files/pos/wildcards-future.scala diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala index 99df44e35b4e..9b7203aeb3db 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala @@ -704,6 +704,10 @@ self => def isRawBar = isRawIdent && in.name == raw.BAR def isRawIdent = in.token == IDENTIFIER + def isWildcardType = + in.token == USCORE || + currentRun.isScala3 && isRawIdent && in.name == raw.QMARK + def isIdent = in.token == IDENTIFIER || in.token == BACKQUOTED_IDENT def isMacro = in.token == IDENTIFIER && in.name == nme.MACROkw @@ -1042,12 +1046,14 @@ self => val start = in.offset simpleTypeRest(in.token match { case LPAREN => atPos(start)(makeSafeTupleType(inParens(types()), start)) - case USCORE => wildcardType(in.skipToken()) case _ => - path(thisOK = false, typeOK = true) match { - case r @ SingletonTypeTree(_) => r - case r => convertToTypeId(r) - } + if (isWildcardType) + wildcardType(in.skipToken()) + else + path(thisOK = false, typeOK = true) match { + case r @ SingletonTypeTree(_) => r + case r => convertToTypeId(r) + } }) } @@ -1915,18 +1921,16 @@ self => def functionArgType(): Tree = argType() def argType(): Tree = { val start = in.offset - in.token match { - case USCORE => + if (isWildcardType) { in.nextToken() if (in.token == SUBTYPE || in.token == SUPERTYPE) wildcardType(start) else atPos(start) { Bind(tpnme.WILDCARD, EmptyTree) } - case _ => - typ() match { - case Ident(name: TypeName) if nme.isVariableName(name) => - atPos(start) { Bind(name, EmptyTree) } - case t => t - } - } + } else + typ() match { + case Ident(name: TypeName) if nme.isVariableName(name) => + atPos(start) { Bind(name, EmptyTree) } + case t => t + } } /** {{{ diff --git a/src/reflect/scala/reflect/internal/StdNames.scala b/src/reflect/scala/reflect/internal/StdNames.scala index ab988783bd4e..ff23a9ee88cf 100644 --- a/src/reflect/scala/reflect/internal/StdNames.scala +++ b/src/reflect/scala/reflect/internal/StdNames.scala @@ -924,6 +924,7 @@ trait StdNames { final val PLUS : NameType = "+" final val STAR : NameType = "*" final val TILDE: NameType = "~" + final val QMARK: NameType = "?" final val isUnary: Set[Name] = Set(MINUS, PLUS, TILDE, BANG) } diff --git a/test/files/pos/wildcards-future.scala b/test/files/pos/wildcards-future.scala new file mode 100644 index 000000000000..928cab3648b0 --- /dev/null +++ b/test/files/pos/wildcards-future.scala @@ -0,0 +1,21 @@ +// scalac: -Xsource:3 +// +object Test { + val xs: List[?] = List(1, 2, 3) + val ys: Map[? <: AnyRef, ? >: Null] = Map() + + def foo(x: Any) = x match { + case x: List[?] => x + case _ => x + } + + // Only allowed in Scala 3 under -source 3.0-migration + type ? = Int + + val xs2: List[`?`] = List(1) + val xs3: List[Int] = xs2 + + def foo2(x: List[`?`]): List[Int] = x match { + case x: List[`?`] => x + } +} From 4aba41ff36e7e5920acaad3f2bcc0e62689a3427 Mon Sep 17 00:00:00 2001 From: Guillaume Martres Date: Mon, 19 Apr 2021 17:21:17 +0200 Subject: [PATCH 112/769] Support Scala 3 wildcard and renaming imports under -Xsource:3 Instead of: import foo._ One can now write: import foo.* and instead of: import foo.{bar => baz} One can now write: import foo.{bar as baz} As well as: import foo.bar as baz This will let us deprecate the old syntax in a future release of Scala 3 (it's currently only deprecated under `-source future`). See http://dotty.epfl.ch/docs/reference/changed-features/imports.html for details but note that unlike Scala 3 this commit does not implement support for: import java as j As that would require deeper changes in the compiler. --- .../scala/tools/nsc/ast/parser/Parsers.scala | 51 ++++++++++++------- .../scala/reflect/internal/StdNames.scala | 3 ++ test/files/neg/import-future.check | 4 ++ test/files/neg/import-future.scala | 27 ++++++++++ test/files/pos/import-future.scala | 25 +++++++++ 5 files changed, 92 insertions(+), 18 deletions(-) create mode 100644 test/files/neg/import-future.check create mode 100644 test/files/neg/import-future.scala create mode 100644 test/files/pos/import-future.scala diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala index 9b7203aeb3db..ed40ba59dadf 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala @@ -2513,19 +2513,27 @@ self => def loop(expr: Tree): Tree = { expr setPos expr.pos.makeTransparent val selectors: List[ImportSelector] = in.token match { - case USCORE => List(importSelector()) // import foo.bar._; - case LBRACE => importSelectors() // import foo.bar.{ x, y, z } - case _ => - val nameOffset = in.offset - val name = ident() - if (in.token == DOT) { - // import foo.bar.ident. and so create a select node and recurse. - val t = atPos(start, if (name == nme.ERROR) in.offset else nameOffset)(Select(expr, name)) - in.nextToken() - return loop(t) + case USCORE => + List(importSelector()) // import foo.bar._ + case IDENTIFIER if currentRun.isScala3 && in.name == raw.STAR => + List(importSelector()) // import foo.bar.* + case LBRACE => + importSelectors() // import foo.bar.{ x, y, z } + case _ => + if (settings.isScala3 && lookingAhead { isRawIdent && in.name == nme.as }) + List(importSelector()) // import foo.bar as baz + else { + val nameOffset = in.offset + val name = ident() + if (in.token == DOT) { + // import foo.bar.ident. and so create a select node and recurse. + val t = atPos(start, if (name == nme.ERROR) in.offset else nameOffset)(Select(expr, name)) + in.nextToken() + return loop(t) + } + // import foo.bar.Baz; + else List(makeImportSelector(name, nameOffset)) } - // import foo.bar.Baz; - else List(makeImportSelector(name, nameOffset)) } // reaching here means we're done walking. atPos(start)(Import(expr, selectors)) @@ -2568,18 +2576,25 @@ self => */ def importSelector(): ImportSelector = { val start = in.offset - val name = wildcardOrIdent() + val name = + if (currentRun.isScala3 && isRawIdent && in.name == raw.STAR) { + in.nextToken() + nme.WILDCARD + } + else wildcardOrIdent() var renameOffset = -1 - val rename = in.token match { - case ARROW => + val rename = + if (in.token == ARROW || (currentRun.isScala3 && isRawIdent && in.name == nme.as)) { in.nextToken() renameOffset = in.offset wildcardOrIdent() - case _ if name == nme.WILDCARD => null - case _ => + } + else if (name == nme.WILDCARD) null + else { renameOffset = start name - } + } + ImportSelector(name, start, rename, renameOffset) } diff --git a/src/reflect/scala/reflect/internal/StdNames.scala b/src/reflect/scala/reflect/internal/StdNames.scala index ff23a9ee88cf..fc919570a77b 100644 --- a/src/reflect/scala/reflect/internal/StdNames.scala +++ b/src/reflect/scala/reflect/internal/StdNames.scala @@ -627,6 +627,9 @@ trait StdNames { val long2Long: NameType = "long2Long" val boolean2Boolean: NameType = "boolean2Boolean" + // Scala 3 import syntax + val as: NameType = "as" + // Compiler utilized names val AnnotatedType: NameType = "AnnotatedType" diff --git a/test/files/neg/import-future.check b/test/files/neg/import-future.check new file mode 100644 index 000000000000..282b1ae95e4c --- /dev/null +++ b/test/files/neg/import-future.check @@ -0,0 +1,4 @@ +import-future.scala:15: error: not found: value unrelated + unrelated(1) // error + ^ +one error found diff --git a/test/files/neg/import-future.scala b/test/files/neg/import-future.scala new file mode 100644 index 000000000000..288fd3d0e240 --- /dev/null +++ b/test/files/neg/import-future.scala @@ -0,0 +1,27 @@ +// scalac: -Xsource:3 +// + +class D { + def *(y: Int): Int = y + def unrelated(y: Int): Int = y +} + +object Test { + val d = new D + + def one: Int = { + import d.`*` + + unrelated(1) // error + + *(1) + } + + def two: Int = { + import d.* + + unrelated(1) + + *(1) + } +} diff --git a/test/files/pos/import-future.scala b/test/files/pos/import-future.scala new file mode 100644 index 000000000000..cfaff804af02 --- /dev/null +++ b/test/files/pos/import-future.scala @@ -0,0 +1,25 @@ +// scalac: -Xsource:3 +// + +import java.io as jio +import scala.{collection as c} + +import c.mutable as mut +import mut.ArrayBuffer as Buf + +object O { + val x: jio.IOException = ??? + val y = Buf(1, 2, 3) + + type OString = String + def foo22(x: Int) = x +} + +class C { + import O.{ foo22 as foo, OString as OS } + println(foo(22)) + val s: OS = "" + + import mut.* + val ab = ArrayBuffer(1) +} From d79e333347de402671b6ad34709267b28ff83999 Mon Sep 17 00:00:00 2001 From: Guillaume Martres Date: Tue, 20 Apr 2021 15:40:58 +0200 Subject: [PATCH 113/769] Support Scala 3 vararg splice syntax under -Xsource:3 Instead of: foo(s: _*) One can now write: foo(s*) And instead of: case Seq(elems @ _*) => One can now write: case Seq(elems*) => See https://dotty.epfl.ch/docs/reference/changed-features/vararg-splices.html for details. --- .../scala/tools/nsc/ast/parser/Parsers.scala | 26 ++++++++++++++++--- test/files/pos/varargs-future.scala | 22 ++++++++++++++++ 2 files changed, 45 insertions(+), 3 deletions(-) create mode 100644 test/files/pos/varargs-future.scala diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala index ed40ba59dadf..a7882771d8bb 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala @@ -874,6 +874,16 @@ self => } } + /** Is current ident a `*`, and is it followed by a `)` or `, )`? */ + def followingIsScala3Vararg(): Boolean = + currentRun.isScala3 && isRawStar && lookingAhead { + in.token == RPAREN || + in.token == COMMA && { + in.nextToken() + in.token == RPAREN + } + } + /* --------- OPERAND/OPERATOR STACK --------------------------------------- */ /** Modes for infix types. */ @@ -1654,7 +1664,7 @@ self => val start = in.offset val base = opstack - def loop(top: Tree): Tree = if (!isIdent) top else { + def loop(top: Tree): Tree = if (!isIdent || followingIsScala3Vararg()) top else { pushOpInfo(reduceExprStack(base, top)) newLineOptWhenFollowing(isExprIntroToken) if (isExprIntro) @@ -1665,7 +1675,12 @@ self => else finishPostfixOp(start, base, popOpInfo()) } - reduceExprStack(base, loop(prefixExpr())) + val expr = reduceExprStack(base, loop(prefixExpr())) + if (followingIsScala3Vararg()) + atPos(expr.pos.start) { + Typed(expr, atPos(in.skipToken()) { Ident(tpnme.WILDCARD_STAR) }) + } + else expr } /** {{{ @@ -2020,7 +2035,12 @@ self => if (isCloseDelim) atPos(top.pos.start, in.prev.offset)(Star(stripParens(top))) else EmptyTree ) - case _ => EmptyTree + case Ident(name) if isSequenceOK && followingIsScala3Vararg() => + atPos(top.pos.start) { + Bind(name, atPos(in.skipToken()) { Star(Ident(nme.WILDCARD)) }) + } + case _ => + EmptyTree } def loop(top: Tree): Tree = reducePatternStack(base, top) match { case next if isIdent && !isRawBar => pushOpInfo(next) ; loop(simplePattern(badPattern3)) diff --git a/test/files/pos/varargs-future.scala b/test/files/pos/varargs-future.scala new file mode 100644 index 000000000000..e8c9057e564b --- /dev/null +++ b/test/files/pos/varargs-future.scala @@ -0,0 +1,22 @@ +// scalac: -Xsource:3 +// + +class Test { + def foo(xs: Int*): Seq[Int] = xs + + val s: Seq[Int] = Seq(1, 2, 3) + foo(s*) + + // not very useful, but supported by Scala 3 (and matches what works with `: _*` syntax) + foo( + s*, + ) + + s match { + case Seq(elems*) => println(elems) + } + + s match { + case Seq(x, rest*) => println(rest) + } +} From 97ccdff639db32955d533582ee23975e58a0323e Mon Sep 17 00:00:00 2001 From: Guillaume Martres Date: Thu, 1 Apr 2021 17:03:48 +0200 Subject: [PATCH 114/769] Allow soft keywords `open` and `infix` under -Xsource:3 Since everything is open and can be used infix by default in Scala 2, these keywords are no-op, but they're useful for cross-compiling with a future version of Scala 3 where they will be required in some cases (with Scala 3.0 they're only required to avoid warnings under `-source future`). See https://dotty.epfl.ch/docs/reference/changed-features/operators.html and http://dotty.epfl.ch/docs/reference/other-new-features/open-classes.html for details. --- .../scala/tools/nsc/ast/parser/Parsers.scala | 32 ++++++++++++++--- .../scala/tools/nsc/ast/parser/Scanners.scala | 2 ++ .../scala/reflect/internal/StdNames.scala | 4 +++ test/files/neg/open-infix-future.check | 22 ++++++++++++ test/files/neg/open-infix-future.scala | 17 +++++++++ test/files/pos/open-infix-future.scala | 36 +++++++++++++++++++ 6 files changed, 108 insertions(+), 5 deletions(-) create mode 100644 test/files/neg/open-infix-future.check create mode 100644 test/files/neg/open-infix-future.scala create mode 100644 test/files/pos/open-infix-future.scala diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala index a7882771d8bb..479fad69a2aa 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala @@ -673,6 +673,24 @@ self => case _ => false } + def isSoftModifier: Boolean = + currentRun.isScala3 && in.token == IDENTIFIER && softModifierNames.contains(in.name) + + /** Is the current token a soft modifier in a position where such a modifier is allowed? */ + def isValidSoftModifier: Boolean = + isSoftModifier && { + val mod = in.name + lookingAhead { + while (in.token == NEWLINE || isModifier || isSoftModifier) in.nextToken() + + in.token match { + case CLASS | CASECLASS => true + case DEF | TRAIT | TYPE => mod == nme.infix + case _ => false + } + } + } + def isAnnotation: Boolean = in.token == AT def isLocalModifier: Boolean = in.token match { @@ -727,12 +745,13 @@ self => def isSimpleExprIntro: Boolean = isExprIntroToken(in.token) - def isExprIntroToken(token: Token): Boolean = isLiteralToken(token) || (token match { + def isExprIntroToken(token: Token): Boolean = + !isValidSoftModifier && (isLiteralToken(token) || (token match { case IDENTIFIER | BACKQUOTED_IDENT | THIS | SUPER | IF | FOR | NEW | USCORE | TRY | WHILE | DO | RETURN | THROW | LPAREN | LBRACE | XMLSTART => true case _ => false - }) + })) def isExprIntro: Boolean = isExprIntroToken(in.token) @@ -2243,7 +2262,10 @@ self => in.nextToken() loop(mods) case _ => - mods + if (isValidSoftModifier) { + in.nextToken() + loop(mods) + } else mods } loop(NoMods) } @@ -3162,7 +3184,7 @@ self => case IMPORT => in.flushDoc importClause() - case _ if isAnnotation || isTemplateIntro || isModifier => + case _ if isAnnotation || isTemplateIntro || isModifier || isValidSoftModifier => joinComment(topLevelTmplDef :: Nil) } @@ -3212,7 +3234,7 @@ self => case IMPORT => in.flushDoc importClause() - case _ if isDefIntro || isModifier || isAnnotation => + case _ if isDefIntro || isModifier || isAnnotation || isValidSoftModifier => joinComment(nonLocalDefOrDcl) case _ if isExprIntro => in.flushDoc diff --git a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala index 27114358a972..408f74be55f2 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala @@ -1292,6 +1292,8 @@ trait Scanners extends ScannersCommon { final val token2name = (allKeywords map (_.swap)).toMap + final val softModifierNames = Set(nme.open, nme.infix) + // Token representation ---------------------------------------------------- /** Returns the string representation of given token. */ diff --git a/src/reflect/scala/reflect/internal/StdNames.scala b/src/reflect/scala/reflect/internal/StdNames.scala index fc919570a77b..b7e4e901fbd5 100644 --- a/src/reflect/scala/reflect/internal/StdNames.scala +++ b/src/reflect/scala/reflect/internal/StdNames.scala @@ -630,6 +630,10 @@ trait StdNames { // Scala 3 import syntax val as: NameType = "as" + // Scala 3 soft keywords + val infix: NameType = "infix" + val open: NameType = "open" + // Compiler utilized names val AnnotatedType: NameType = "AnnotatedType" diff --git a/test/files/neg/open-infix-future.check b/test/files/neg/open-infix-future.check new file mode 100644 index 000000000000..b39489cabad0 --- /dev/null +++ b/test/files/neg/open-infix-future.check @@ -0,0 +1,22 @@ +open-infix-future.scala:4: error: expected class or object definition +open trait A // error +^ +open-infix-future.scala:5: error: expected class or object definition +open object B // error +^ +open-infix-future.scala:8: error: ';' expected but 'val' found. + infix val a: Int = 1 // error + ^ +open-infix-future.scala:9: error: ';' expected but 'var' found. + infix var b: Int = 1 // error + ^ +open-infix-future.scala:11: error: ';' expected but 'type' found. + open type D // error + ^ +open-infix-future.scala:14: error: illegal start of statement + open class E // error + ^ +open-infix-future.scala:15: error: ';' expected but 'def' found. + open def bla(y: Int) = y // error + ^ +7 errors found diff --git a/test/files/neg/open-infix-future.scala b/test/files/neg/open-infix-future.scala new file mode 100644 index 000000000000..2a250f3b006e --- /dev/null +++ b/test/files/neg/open-infix-future.scala @@ -0,0 +1,17 @@ +// scalac: -Xsource:3 +// + +open trait A // error +open object B // error + +class C { + infix val a: Int = 1 // error + infix var b: Int = 1 // error + + open type D // error + + def foo: Unit = { + open class E // error + open def bla(y: Int) = y // error + } +} diff --git a/test/files/pos/open-infix-future.scala b/test/files/pos/open-infix-future.scala new file mode 100644 index 000000000000..8fee778d40cb --- /dev/null +++ b/test/files/pos/open-infix-future.scala @@ -0,0 +1,36 @@ +// scalac: -Xsource:3 +// + +open class A +infix class B[T, S] + +open infix class C[T, S] +open infix case class CC[T, S](x: Int) +infix open class D[T, S] +infix trait DT[T, S] + +open +infix +private +class E + +class F { + open infix class C1[T, S] + infix type X + + infix def foo(x: Int): Int = x +} + +object G { + open infix class C2[T, S] +} + +object Test { + val infix: Int = 1 + infix + 1 + val open: Int => Int = x => x + open(1) + open { + 2 + } +} From db2d31314eb37d3c83108a6622c9fae6f6c2c5a7 Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Fri, 23 Apr 2021 11:39:49 +0200 Subject: [PATCH 115/769] add travis notifications to our slack --- .travis.yml | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/.travis.yml b/.travis.yml index bc80e7ca1f48..3cd3bd0f46a2 100644 --- a/.travis.yml +++ b/.travis.yml @@ -167,4 +167,10 @@ cache: - $HOME/.rvm notifications: + slack: + rooms: + - typesafe:WoewGgHil2FkdGzJyV3phAhj + if: type = cron OR type = push + on_success: never + on_failure: change webhooks: https://scala-ci.typesafe.com/benchq/webhooks/travis From 36eda20ac31fe3b797fc6cdc6b746a1044a3409b Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Thu, 22 Apr 2021 15:27:05 +0200 Subject: [PATCH 116/769] Partest tests can require a java version range ... using a `// javaVersion: N / N+ / N - M` comment in the test soruce. The test is skipped if the java version is outside the range. --- CONTRIBUTING.md | 23 +++++++++-- .../scala/tools/partest/ConsoleLog.scala | 1 + .../scala/tools/partest/TestState.scala | 2 +- .../tools/partest/nest/AbstractRunner.scala | 3 +- .../scala/tools/partest/nest/Runner.scala | 39 ++++++++++++++++--- 5 files changed, 57 insertions(+), 11 deletions(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 78db0a59d6d8..59c9675e690d 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -115,8 +115,25 @@ To run a single negative test from sbt shell: root> partest --verbose test/files/neg/delayed-init-ref.scala ``` -To specify compiler flags such as `-Werror -Xlint`, you can add a comment -at the top of your source file of the form: `// scalac: -Werror -Xlint`. +A test can be either a single `.scala` file or a directory containing multiple `.scala` and `.java` files. +For testing separate compilation, files can be grouped using `_N` suffixes in the filename. For example, a test +with files (`A.scala`, `B_1.scala`, `C_1.java`, `Test_2.scala`) does: +``` +scalac A.scala -d out +scalac -cp out B_1.scala C_1.java -d out +javac -cp out C_1.java -d out +scalac -cp out Test_2.scala -d out +scala -cp out Test +``` + +**Flags** + - To specify compiler flags such as `-Werror -Xlint`, you can add a comment at the top of your source file of the form: `// scalac: -Werror -Xlint`. + - Similarly, a `// javac: ` comment in a Java source file passes flags to the Java compiler. + - A `// filter: ` comment eliminates output lines that match the filter before comparing to the `.check` file. + - A `// java: ` comment makes a `run` test execute in a separate JVM and passes the additional flags to the `java` command. + - A `// javaVersion ` comment makes partest skip the test if the java version is outside the requested range (e.g. `8`, `15+`, `9 - 11`) + +**Common Usage** To test that no warnings are emitted while compiling a `pos` test, use `-Werror`. That will fail a `pos` test if there are warnings. Note that `pos` tests do not have `.check` files. @@ -171,7 +188,7 @@ See `--help` for more info: root> partest --help ``` -Partests are compiled by the `quick` compiler (and `run` partests executed with the `quick` library), +Partests are compiled by the bootstrapped `quick` compiler (and `run` partests executed with the `quick` library), and therefore: * if you're working on the compiler, you must write a partest, or a `BytecodeTesting` JUnit test which invokes the compiler programmatically; however diff --git a/src/partest/scala/tools/partest/ConsoleLog.scala b/src/partest/scala/tools/partest/ConsoleLog.scala index 89feccd1ef78..5064f0fd5bfd 100644 --- a/src/partest/scala/tools/partest/ConsoleLog.scala +++ b/src/partest/scala/tools/partest/ConsoleLog.scala @@ -65,6 +65,7 @@ class ConsoleLog(colorEnabled: Boolean) { def echoWarning(msg: String) = echo(bold(red(msg))) def printDot(): Unit = printProgress(".") + def printS(): Unit = printProgress(_warning + "s" +_default) def printEx(): Unit = printProgress(_failure + "X" + _default) private def printProgress(icon: String): Unit = synchronized { if (dotCount >= DotWidth) { diff --git a/src/partest/scala/tools/partest/TestState.scala b/src/partest/scala/tools/partest/TestState.scala index 8867ffe72c8d..3b6dc49444ac 100644 --- a/src/partest/scala/tools/partest/TestState.scala +++ b/src/partest/scala/tools/partest/TestState.scala @@ -30,7 +30,7 @@ sealed abstract class TestState { def shortStatus = if (isOk) "ok" else "!!" - final def andAlso(next: => TestState): TestState = if (isOk) next else this + final def andAlso(next: => TestState): TestState = if (isOk && !isSkipped) next else this override def toString = status } diff --git a/src/partest/scala/tools/partest/nest/AbstractRunner.scala b/src/partest/scala/tools/partest/nest/AbstractRunner.scala index a38ca75e18ed..7f6dd9a5b794 100644 --- a/src/partest/scala/tools/partest/nest/AbstractRunner.scala +++ b/src/partest/scala/tools/partest/nest/AbstractRunner.scala @@ -99,7 +99,8 @@ class AbstractRunner(val config: RunnerSpec.Config, protected final val testSour diffed ::: logged } if (terse) { - if (state.isOk) { printDot() ; Nil } + if (state.isSkipped) { printS(); Nil } + else if (state.isOk) { printDot() ; Nil } else { printEx() ; statusLine(state, durationMs) :: errInfo } } else { echo(statusLine(state, durationMs)) diff --git a/src/partest/scala/tools/partest/nest/Runner.scala b/src/partest/scala/tools/partest/nest/Runner.scala index 8be3bd69d9af..67c3071c9877 100644 --- a/src/partest/scala/tools/partest/nest/Runner.scala +++ b/src/partest/scala/tools/partest/nest/Runner.scala @@ -514,9 +514,35 @@ class Runner(val testInfo: TestInfo, val suiteRunner: AbstractRunner) { def description = mkScalacString() lazy val result = { pushTranscript(description) ; attemptCompile(fs) } } + case class SkipRound(fs: List[File], state: TestState) extends CompileRound { + def description: String = state.status + lazy val result = { pushTranscript(description); state } + } + + def compilationRounds(file: File): List[CompileRound] = { + import scala.util.Properties.javaSpecVersion + val Range = """(\d+)(?:(\+)|(?: *\- *(\d+)))?""".r + lazy val currentJavaVersion = javaSpecVersion.stripPrefix("1.").toInt + val allFiles = sources(file) + val skipStates = toolArgsFor(allFiles)("javaVersion", split = false).flatMap({ + case v @ Range(from, plus, to) => + val ok = + if (plus == null) + if (to == null) currentJavaVersion == from.toInt + else from.toInt <= currentJavaVersion && currentJavaVersion <= to.toInt + else + currentJavaVersion >= from.toInt + if (ok) None + else Some(genSkip(s"skipped on Java $javaSpecVersion, only running on $v")) + case v => + Some(genFail(s"invalid javaVersion range in test comment: $v")) + }) + skipStates.headOption match { + case Some(state) => List(SkipRound(List(file), state)) + case _ => groupedFiles(allFiles).flatMap(mixedCompileGroup) + } + } - def compilationRounds(file: File): List[CompileRound] = - groupedFiles(sources(file)).map(mixedCompileGroup).flatten def mixedCompileGroup(allFiles: List[File]): List[CompileRound] = { val (scalaFiles, javaFiles) = allFiles partition (_.isScala) val round1 = if (scalaFiles.isEmpty) None else Some(ScalaAndJava(allFiles)) @@ -533,17 +559,18 @@ class Runner(val testInfo: TestInfo, val suiteRunner: AbstractRunner) { // pass if it checks and didn't crash the compiler // or, OK, we'll let you crash the compiler with a FatalError if you supply a check file def checked(r: CompileRound) = r.result match { + case s: Skip => s case crash @ Crash(_, t, _) if !checkFile.canRead || !t.isInstanceOf[FatalError] => crash - case dnc @ _ => diffIsOk + case _ => diffIsOk } - compilationRounds(testFile).find(!_.result.isOk).map(checked).getOrElse(genFail("expected compilation failure")) + compilationRounds(testFile).find(r => !r.result.isOk || r.result.isSkipped).map(checked).getOrElse(genFail("expected compilation failure")) } // run compilation until failure, evaluate `andAlso` on success def runTestCommon(andAlso: => TestState = genPass()): TestState = runInContext { // DirectCompiler already says compilation failed - val res = compilationRounds(testFile).find(!_.result.isOk).map(_.result).getOrElse(genPass()) + val res = compilationRounds(testFile).find(r => !r.result.isOk || r.result.isSkipped).map(_.result).getOrElse(genPass()) res andAlso andAlso } @@ -639,7 +666,7 @@ class Runner(val testInfo: TestInfo, val suiteRunner: AbstractRunner) { } private def runRunTest(): TestState = { - val argsFile = testFile changeExtension "javaopts" + val argsFile = testFile changeExtension "javaopts" // TODO: use `toolArgsFor` instead of a separate file val javaopts = readOptionsFile(argsFile) val execInProcess = PartestDefaults.execInProcess && javaopts.isEmpty && !Set("specialized", "instrumented").contains(testFile.getParentFile.getName) def exec() = if (execInProcess) execTestInProcess(outDir, logFile) else execTest(outDir, logFile) From 3c559d7994cb35ff85a205a93b573e34df7d1dd7 Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Thu, 22 Apr 2021 15:32:40 +0200 Subject: [PATCH 117/769] test case for issue 9530 --- test/files/run/t12348.scala | 9 +++++++++ 1 file changed, 9 insertions(+) create mode 100644 test/files/run/t12348.scala diff --git a/test/files/run/t12348.scala b/test/files/run/t12348.scala new file mode 100644 index 000000000000..fdbb4d9465df --- /dev/null +++ b/test/files/run/t12348.scala @@ -0,0 +1,9 @@ +// javaVersion: 11+ + +object Test { + def main(args: Array[String]): Unit = { + val a = new Array[Object](1) + val h = java.lang.invoke.MethodHandles.arrayElementVarHandle(a.getClass) + val r = h.setVolatile(a, 0, "foo") // important: no expected type + } +} From 3baff01900e965cfee2823c190ca53e82d32ce65 Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Thu, 22 Apr 2021 16:34:16 +0200 Subject: [PATCH 118/769] make some pre-jdk-8 tests more direct --- test/files/pos/sammy_java8/F.java | 6 +++++ test/files/pos/sammy_java8/Test.scala | 4 ++++ test/files/pos/t7398/Iterator.java | 10 ++++++++ test/files/pos/t7398/Test.scala | 5 ++++ test/files/pos/t8852/Interface.java | 5 ++++ test/files/pos/t8852/Test.scala | 5 ++++ test/files/run/sammy_java8.scala | 32 ------------------------- test/files/run/t7398.scala | 26 -------------------- test/files/run/t7825.scala | 34 --------------------------- test/files/run/t8852a.scala | 34 --------------------------- 10 files changed, 35 insertions(+), 126 deletions(-) create mode 100644 test/files/pos/sammy_java8/F.java create mode 100644 test/files/pos/sammy_java8/Test.scala create mode 100644 test/files/pos/t7398/Iterator.java create mode 100644 test/files/pos/t7398/Test.scala create mode 100644 test/files/pos/t8852/Interface.java create mode 100644 test/files/pos/t8852/Test.scala delete mode 100644 test/files/run/sammy_java8.scala delete mode 100644 test/files/run/t7398.scala delete mode 100644 test/files/run/t7825.scala delete mode 100644 test/files/run/t8852a.scala diff --git a/test/files/pos/sammy_java8/F.java b/test/files/pos/sammy_java8/F.java new file mode 100644 index 000000000000..5dac57a1e2ae --- /dev/null +++ b/test/files/pos/sammy_java8/F.java @@ -0,0 +1,6 @@ +public interface F { + U apply(T t); + default void yadayada() { + throw new UnsupportedOperationException("yadayada"); + } +} diff --git a/test/files/pos/sammy_java8/Test.scala b/test/files/pos/sammy_java8/Test.scala new file mode 100644 index 000000000000..61fcf4f0ce4f --- /dev/null +++ b/test/files/pos/sammy_java8/Test.scala @@ -0,0 +1,4 @@ +class T { + def app[T, U](x: T)(f: F[T, U]): U = f(x) + app(1)(x => List(x)) +} diff --git a/test/files/pos/t7398/Iterator.java b/test/files/pos/t7398/Iterator.java new file mode 100644 index 000000000000..75b5a8b303b7 --- /dev/null +++ b/test/files/pos/t7398/Iterator.java @@ -0,0 +1,10 @@ +public interface Iterator { + boolean hasNext(); + E next(); + default void remove() { + throw new UnsupportedOperationException("remove"); + } + default void forEachRemaining(java.util.function.Consumer action) { + throw new UnsupportedOperationException("forEachRemaining"); + } +} diff --git a/test/files/pos/t7398/Test.scala b/test/files/pos/t7398/Test.scala new file mode 100644 index 000000000000..2068acaa6dc7 --- /dev/null +++ b/test/files/pos/t7398/Test.scala @@ -0,0 +1,5 @@ +class Test extends Iterator[String] { + def hasNext = true + def next() = "" + def test = this.remove() +} diff --git a/test/files/pos/t8852/Interface.java b/test/files/pos/t8852/Interface.java new file mode 100644 index 000000000000..7b35f3b12f1e --- /dev/null +++ b/test/files/pos/t8852/Interface.java @@ -0,0 +1,5 @@ +public interface Interface { + public static int staticMethod() { + return 42; + } +} diff --git a/test/files/pos/t8852/Test.scala b/test/files/pos/t8852/Test.scala new file mode 100644 index 000000000000..acd36ec2a5a0 --- /dev/null +++ b/test/files/pos/t8852/Test.scala @@ -0,0 +1,5 @@ +object Test { + val x: Int = Interface.staticMethod() +} + +class C extends Interface // expect no errors about unimplemented members. diff --git a/test/files/run/sammy_java8.scala b/test/files/run/sammy_java8.scala deleted file mode 100644 index 39118486eddd..000000000000 --- a/test/files/run/sammy_java8.scala +++ /dev/null @@ -1,32 +0,0 @@ -import scala.tools.partest._ - -// java8 version of sammy_poly.scala -object Test extends CompilerTest { - import global._ - - override lazy val units: List[CompilationUnit] = { - // This test itself does not depend on JDK8. - javaCompilationUnits(global)(samSource) ++ - compilationUnits(global)(useSamSource) - } - - private def samSource = """ -// trait F[T, U] { def apply(x: T): U } -public interface F { - U apply(T t); - default void yadayada() { - throw new UnsupportedOperationException("yadayada"); - } -} - """ - - private def useSamSource = """ -class T { - def app[T, U](x: T)(f: F[T, U]): U = f(x) - app(1)(x => List(x)) -} - """ - - // We're only checking we can compile it. - def check(source: String, unit: global.CompilationUnit): Unit = () -} diff --git a/test/files/run/t7398.scala b/test/files/run/t7398.scala deleted file mode 100644 index 4b4685076810..000000000000 --- a/test/files/run/t7398.scala +++ /dev/null @@ -1,26 +0,0 @@ -import scala.tools.partest._ - -object Test extends CompilerTest { - import global._ - - override lazy val units: List[CompilationUnit] = { - // This test itself does not depend on JDK8. - javaCompilationUnits(global)(defaultMethodSource) - } - - private def defaultMethodSource = """ -public interface Iterator { - boolean hasNext(); - E next(); - default void remove() { - throw new UnsupportedOperationException("remove"); - } - default void forEachRemaining(Consumer action) { - throw new UnsupportedOperationException("forEachRemaining"); - } -} - """ - - // We're only checking we can compile it. - def check(source: String, unit: global.CompilationUnit): Unit = () -} diff --git a/test/files/run/t7825.scala b/test/files/run/t7825.scala deleted file mode 100644 index 65ca06fdfc09..000000000000 --- a/test/files/run/t7825.scala +++ /dev/null @@ -1,34 +0,0 @@ -import scala.tools.partest._ - -object Test extends CompilerTest { - import global._ - - override lazy val units: List[CompilationUnit] = { - // We can test this on JDK6. - javaCompilationUnits(global)(defaultMethodSource) ++ compilationUnits(global)(scalaExtendsDefault) - } - - private def defaultMethodSource = """ -public interface Iterator { - boolean hasNext(); - E next(); - default void remove() { - throw new UnsupportedOperationException("remove"); - } -} - """ - - private def scalaExtendsDefault = """ -object Test { - object X extends Iterator[String] { - def hasNext = true - def next = "!" - } -} - """ - - // We're only checking we that the Scala compilation unit passes refchecks - // No further checks are needed here. - def check(source: String, unit: global.CompilationUnit): Unit = { - } -} diff --git a/test/files/run/t8852a.scala b/test/files/run/t8852a.scala deleted file mode 100644 index cbff8ab75b91..000000000000 --- a/test/files/run/t8852a.scala +++ /dev/null @@ -1,34 +0,0 @@ -import scala.tools.partest._ - -// Test that static methods in Java interfaces (new in Java 8) -// are callable from jointly compiler Scala code. -object Test extends CompilerTest { - import global._ - - override lazy val units: List[CompilationUnit] = { - // This test itself does not depend on JDK8. - javaCompilationUnits(global)(staticMethodInInterface) ++ - compilationUnits(global)(scalaClient) - } - - private def staticMethodInInterface = """ -public interface Interface { - public static int staticMethod() { - return 42; - } -} - - """ - - private def scalaClient = """ -object Test { - val x: Int = Interface.staticMethod() -} - -class C extends Interface // expect no errors about unimplemented members. - - """ - - // We're only checking we can compile it. - def check(source: String, unit: global.CompilationUnit): Unit = () -} From 5603e832c79d641ea2df376ca4f0c451cd941280 Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Fri, 23 Apr 2021 16:50:07 +0200 Subject: [PATCH 119/769] Partest: use a `// java: -flags` comment instead of .javaopts file --- .../scala/tools/partest/nest/Runner.scala | 27 ++++++------------- src/partest/scala/tools/partest/package.scala | 14 ---------- test/files/jvm/methvsfield.javaopts | 1 - test/files/jvm/methvsfield/Test_2.scala | 1 + test/files/jvm/natives.javaopts | 1 - test/files/jvm/natives.scala | 2 ++ test/files/jvm/t1600.javaopts | 1 - test/files/jvm/t1600.scala | 1 + test/files/jvm/t8689.javaopts | 1 - test/files/jvm/t8689.scala | 1 + test/files/run/bridges.javaopts | 1 - test/files/run/bridges.scala | 2 ++ .../run/lambda-serialization-gc.javaopts | 1 - test/files/run/lambda-serialization-gc.scala | 2 ++ test/files/run/reflection-mem-glbs.javaopts | 1 - test/files/run/reflection-mem-glbs.scala | 2 ++ test/files/run/reflection-mem-tags.javaopts | 1 - test/files/run/reflection-mem-tags.scala | 2 ++ test/files/run/reify_copypaste1.javaopts | 1 - test/files/run/reify_copypaste1.scala | 2 ++ test/files/run/shutdownhooks.javaopts | 1 - test/files/run/shutdownhooks.scala | 2 ++ test/files/run/stream-gc.javaopts | 1 - test/files/run/stream-gc.scala | 2 ++ test/files/run/t2318.javaopts | 1 - test/files/run/t2318.scala | 1 + test/files/run/t4415.scala | 2 +- test/files/run/t6411a.javaopts | 1 - test/files/run/t6411a.scala | 1 + test/files/run/t6488.javaopts | 1 - test/files/run/t6488.scala | 2 ++ test/files/run/t7634.javaopts | 1 - test/files/run/t7634.scala | 2 ++ test/files/run/t7805-repl-i.javaopts | 1 - test/files/run/t7805-repl-i.scala | 2 ++ test/files/run/t8266-octal-interp.javaopts | 1 - test/files/run/t8928.javaopts | 1 - test/files/run/t8928/Test_1.scala | 1 + test/files/run/type-tag-leak.javaopts | 1 - test/files/run/type-tag-leak.scala | 2 ++ 40 files changed, 39 insertions(+), 53 deletions(-) delete mode 100644 test/files/jvm/methvsfield.javaopts delete mode 100644 test/files/jvm/natives.javaopts delete mode 100644 test/files/jvm/t1600.javaopts delete mode 100644 test/files/jvm/t8689.javaopts delete mode 100644 test/files/run/bridges.javaopts delete mode 100644 test/files/run/lambda-serialization-gc.javaopts delete mode 100644 test/files/run/reflection-mem-glbs.javaopts delete mode 100644 test/files/run/reflection-mem-tags.javaopts delete mode 100644 test/files/run/reify_copypaste1.javaopts delete mode 100644 test/files/run/shutdownhooks.javaopts delete mode 100644 test/files/run/stream-gc.javaopts delete mode 100644 test/files/run/t2318.javaopts delete mode 100644 test/files/run/t6411a.javaopts delete mode 100644 test/files/run/t6488.javaopts delete mode 100644 test/files/run/t7634.javaopts delete mode 100644 test/files/run/t7805-repl-i.javaopts delete mode 100644 test/files/run/t8266-octal-interp.javaopts delete mode 100644 test/files/run/t8928.javaopts delete mode 100644 test/files/run/type-tag-leak.javaopts diff --git a/src/partest/scala/tools/partest/nest/Runner.scala b/src/partest/scala/tools/partest/nest/Runner.scala index 67c3071c9877..906b021771b1 100644 --- a/src/partest/scala/tools/partest/nest/Runner.scala +++ b/src/partest/scala/tools/partest/nest/Runner.scala @@ -130,25 +130,15 @@ class Runner(val testInfo: TestInfo, val suiteRunner: AbstractRunner) { /** Fail the action. */ def nextTestActionFailing(reason: String): TestState = nextTestActionExpectTrue(reason, false) - private def assembleTestCommand(outDir: File, logFile: File): List[String] = { - // check whether there is a ".javaopts" file - val argsFile = testFile changeExtension "javaopts" - val javaopts = readOptionsFile(argsFile) + private def assembleTestCommand(outDir: File, javaopts: List[String]): List[String] = { if (javaopts.nonEmpty) - suiteRunner.verbose(s"Found javaopts file '$argsFile', using options: '${javaopts.mkString(",")}'") - - // Note! As this currently functions, suiteRunner.javaOpts must precede argString - // because when an option is repeated to java only the last one wins. - // That means until now all the .javaopts files were being ignored because - // they all attempt to change options which are also defined in - // partest.java_opts, leading to debug output like: - // - // debug: Found javaopts file 'files/shootout/message.scala-2.javaopts', using options: '-Xss32k' - // debug: java -Xss32k -Xss2m -Xms256M -Xmx1024M -classpath [...] + suiteRunner.verbose(s"Using java options: '${javaopts.mkString(",")}'") + val propertyOpts = propertyOptions(fork = true).map { case (k, v) => s"-D$k=$v" } val classpath = joinPaths(extraClasspath ++ testClassPath) + // `javaopts` last; for repeated arguments, the last one wins javaCmdPath +: ( (suiteRunner.javaOpts.split(' ') ++ extraJavaOptions ++ javaopts).filter(_ != "").toList ++ Seq( "-classpath", @@ -224,8 +214,8 @@ class Runner(val testInfo: TestInfo, val suiteRunner: AbstractRunner) { } } - private def execTest(outDir: File, logFile: File): TestState = { - val cmd = assembleTestCommand(outDir, logFile) + private def execTest(outDir: File, logFile: File, javaopts: List[String]): TestState = { + val cmd = assembleTestCommand(outDir, javaopts) pushTranscript((cmd mkString s" \\$EOL ") + " > " + logFile.getName) nextTestAction(runCommand(cmd, logFile)) { @@ -666,10 +656,9 @@ class Runner(val testInfo: TestInfo, val suiteRunner: AbstractRunner) { } private def runRunTest(): TestState = { - val argsFile = testFile changeExtension "javaopts" // TODO: use `toolArgsFor` instead of a separate file - val javaopts = readOptionsFile(argsFile) + val javaopts = toolArgs("java") val execInProcess = PartestDefaults.execInProcess && javaopts.isEmpty && !Set("specialized", "instrumented").contains(testFile.getParentFile.getName) - def exec() = if (execInProcess) execTestInProcess(outDir, logFile) else execTest(outDir, logFile) + def exec() = if (execInProcess) execTestInProcess(outDir, logFile) else execTest(outDir, logFile, javaopts) def noexec() = genSkip("no-exec: tests compiled but not run") runTestCommon(if (suiteRunner.config.optNoExec) noexec() else exec().andAlso(diffIsOk)) } diff --git a/src/partest/scala/tools/partest/package.scala b/src/partest/scala/tools/partest/package.scala index b4ba200511e5..d3e5f070eed9 100644 --- a/src/partest/scala/tools/partest/package.scala +++ b/src/partest/scala/tools/partest/package.scala @@ -129,8 +129,6 @@ package object partest { def fileSeparator = java.io.File.separator def pathSeparator = java.io.File.pathSeparator - def words(s: String): List[String] = (s.trim split "\\s+").toList - def timed[T](body: => T): (T, Long) = { val t1 = System.currentTimeMillis val result = body @@ -143,18 +141,6 @@ package object partest { def basename(name: String): String = Path(name).stripExtension - /** In order to allow for spaces in flags/options, this - * parses .flags, .javaopts, javacopts etc files as follows: - * If it is exactly one line, it is split (naively) on spaces. - * If it contains more than one line, each line is its own - * token, spaces and all. - */ - def readOptionsFile(file: File): List[String] = - file.fileLines match { - case x :: Nil => words(x) - case xs => xs - } - def findProgram(name: String): Option[File] = { val pathDirs = sys.env("PATH") match { case null => List("/usr/local/bin", "/usr/bin", "/bin") diff --git a/test/files/jvm/methvsfield.javaopts b/test/files/jvm/methvsfield.javaopts deleted file mode 100644 index 9740f07b079b..000000000000 --- a/test/files/jvm/methvsfield.javaopts +++ /dev/null @@ -1 +0,0 @@ --Dneeds.forked.jvm \ No newline at end of file diff --git a/test/files/jvm/methvsfield/Test_2.scala b/test/files/jvm/methvsfield/Test_2.scala index 5389836be277..b9ad46ac7426 100644 --- a/test/files/jvm/methvsfield/Test_2.scala +++ b/test/files/jvm/methvsfield/Test_2.scala @@ -1,3 +1,4 @@ +// java: -Dneeds.forked.jvm // bug #1062 object Test extends App { println((new MethVsField_1).three) diff --git a/test/files/jvm/natives.javaopts b/test/files/jvm/natives.javaopts deleted file mode 100644 index 57b2283c7fb3..000000000000 --- a/test/files/jvm/natives.javaopts +++ /dev/null @@ -1 +0,0 @@ --Dneeds.to.fork \ No newline at end of file diff --git a/test/files/jvm/natives.scala b/test/files/jvm/natives.scala index 2d19f3cbfda0..15a8b298f343 100644 --- a/test/files/jvm/natives.scala +++ b/test/files/jvm/natives.scala @@ -1,3 +1,5 @@ +// java: -Dneeds.to.fork + object Test { //println("java.library.path=" + System.getProperty("java.library.path")) diff --git a/test/files/jvm/t1600.javaopts b/test/files/jvm/t1600.javaopts deleted file mode 100644 index f4038254ba29..000000000000 --- a/test/files/jvm/t1600.javaopts +++ /dev/null @@ -1 +0,0 @@ --Dneeds.forked.jvm.maybe.because.context.classloader \ No newline at end of file diff --git a/test/files/jvm/t1600.scala b/test/files/jvm/t1600.scala index b434862adb1e..da04a5f7c923 100644 --- a/test/files/jvm/t1600.scala +++ b/test/files/jvm/t1600.scala @@ -1,3 +1,4 @@ +// java: -Dneeds.forked.jvm.maybe.because.context.classloader /** * Checks that serialization of hash-based collections works correctly if the hashCode diff --git a/test/files/jvm/t8689.javaopts b/test/files/jvm/t8689.javaopts deleted file mode 100644 index 9740f07b079b..000000000000 --- a/test/files/jvm/t8689.javaopts +++ /dev/null @@ -1 +0,0 @@ --Dneeds.forked.jvm \ No newline at end of file diff --git a/test/files/jvm/t8689.scala b/test/files/jvm/t8689.scala index 3ee20d711a92..2eeb12a12cf1 100644 --- a/test/files/jvm/t8689.scala +++ b/test/files/jvm/t8689.scala @@ -1,3 +1,4 @@ +// java: -Dneeds.forked.jvm object Test { def main(args: Array[String]): Unit = { import scala.concurrent._ diff --git a/test/files/run/bridges.javaopts b/test/files/run/bridges.javaopts deleted file mode 100644 index 3a63111bf2fd..000000000000 --- a/test/files/run/bridges.javaopts +++ /dev/null @@ -1 +0,0 @@ --Xss128M diff --git a/test/files/run/bridges.scala b/test/files/run/bridges.scala index 53494500a4d5..de641f03f6b5 100644 --- a/test/files/run/bridges.scala +++ b/test/files/run/bridges.scala @@ -1,3 +1,5 @@ +// java: -Xss128M + //############################################################################ // Test bridge methods //############################################################################ diff --git a/test/files/run/lambda-serialization-gc.javaopts b/test/files/run/lambda-serialization-gc.javaopts deleted file mode 100644 index 9ecdb8a4dafd..000000000000 --- a/test/files/run/lambda-serialization-gc.javaopts +++ /dev/null @@ -1 +0,0 @@ --Xmx512m \ No newline at end of file diff --git a/test/files/run/lambda-serialization-gc.scala b/test/files/run/lambda-serialization-gc.scala index 9a179d4ed5c1..529a32146302 100644 --- a/test/files/run/lambda-serialization-gc.scala +++ b/test/files/run/lambda-serialization-gc.scala @@ -1,3 +1,5 @@ +// java: -Xmx512m + import java.io._ import java.net.URLClassLoader diff --git a/test/files/run/reflection-mem-glbs.javaopts b/test/files/run/reflection-mem-glbs.javaopts deleted file mode 100644 index 9ecdb8a4dafd..000000000000 --- a/test/files/run/reflection-mem-glbs.javaopts +++ /dev/null @@ -1 +0,0 @@ --Xmx512m \ No newline at end of file diff --git a/test/files/run/reflection-mem-glbs.scala b/test/files/run/reflection-mem-glbs.scala index 2a76f1db86b0..790a445cc6d7 100644 --- a/test/files/run/reflection-mem-glbs.scala +++ b/test/files/run/reflection-mem-glbs.scala @@ -1,3 +1,5 @@ +// java: -Xmx512m + import scala.tools.partest.MemoryTest trait A { type T <: A } diff --git a/test/files/run/reflection-mem-tags.javaopts b/test/files/run/reflection-mem-tags.javaopts deleted file mode 100644 index 9ecdb8a4dafd..000000000000 --- a/test/files/run/reflection-mem-tags.javaopts +++ /dev/null @@ -1 +0,0 @@ --Xmx512m \ No newline at end of file diff --git a/test/files/run/reflection-mem-tags.scala b/test/files/run/reflection-mem-tags.scala index 6ea3c34c86bf..0ae1b9406afb 100644 --- a/test/files/run/reflection-mem-tags.scala +++ b/test/files/run/reflection-mem-tags.scala @@ -1,3 +1,5 @@ +// java: -Xmx512m + import scala.tools.partest.MemoryTest trait A { type T <: A } diff --git a/test/files/run/reify_copypaste1.javaopts b/test/files/run/reify_copypaste1.javaopts deleted file mode 100644 index 9740f07b079b..000000000000 --- a/test/files/run/reify_copypaste1.javaopts +++ /dev/null @@ -1 +0,0 @@ --Dneeds.forked.jvm \ No newline at end of file diff --git a/test/files/run/reify_copypaste1.scala b/test/files/run/reify_copypaste1.scala index 12cc7dfe19d1..16b6ffed21c7 100644 --- a/test/files/run/reify_copypaste1.scala +++ b/test/files/run/reify_copypaste1.scala @@ -1,3 +1,5 @@ +// java: -Dneeds.forked.jvm + import scala.reflect.runtime._ import scala.reflect.runtime.universe._ import scala.reflect.runtime.universe.definitions._ diff --git a/test/files/run/shutdownhooks.javaopts b/test/files/run/shutdownhooks.javaopts deleted file mode 100644 index 9740f07b079b..000000000000 --- a/test/files/run/shutdownhooks.javaopts +++ /dev/null @@ -1 +0,0 @@ --Dneeds.forked.jvm \ No newline at end of file diff --git a/test/files/run/shutdownhooks.scala b/test/files/run/shutdownhooks.scala index 518243598f97..1d22ea78380d 100644 --- a/test/files/run/shutdownhooks.scala +++ b/test/files/run/shutdownhooks.scala @@ -1,3 +1,5 @@ +// java: -Dneeds.forked.jvm + object Test { scala.sys.addShutdownHook { // sleep is added here so main#shutdown happens before this hook. diff --git a/test/files/run/stream-gc.javaopts b/test/files/run/stream-gc.javaopts deleted file mode 100644 index 58ba19b41eff..000000000000 --- a/test/files/run/stream-gc.javaopts +++ /dev/null @@ -1 +0,0 @@ --Xmx5M -Xms5M diff --git a/test/files/run/stream-gc.scala b/test/files/run/stream-gc.scala index 699ab621de0b..18d8b972c00c 100644 --- a/test/files/run/stream-gc.scala +++ b/test/files/run/stream-gc.scala @@ -1,3 +1,5 @@ +// java: -Xmx5M -Xms5M + import scala.collection.immutable._ object Test extends App { diff --git a/test/files/run/t2318.javaopts b/test/files/run/t2318.javaopts deleted file mode 100644 index 8bf493ce91e6..000000000000 --- a/test/files/run/t2318.javaopts +++ /dev/null @@ -1 +0,0 @@ --Ddummy=fresh_jvm_needed_to_test_security_manager \ No newline at end of file diff --git a/test/files/run/t2318.scala b/test/files/run/t2318.scala index bce56f6be33f..f00297b5c9e1 100644 --- a/test/files/run/t2318.scala +++ b/test/files/run/t2318.scala @@ -1,3 +1,4 @@ +// java: -Ddummy=fresh_jvm_needed_to_test_security_manager // filter: WARNING.* // for now, ignore warnings due to reflective invocation import java.security._ diff --git a/test/files/run/t4415.scala b/test/files/run/t4415.scala index 5892b0c16def..8a196b516dfd 100644 --- a/test/files/run/t4415.scala +++ b/test/files/run/t4415.scala @@ -3,7 +3,7 @@ * * Exception in thread "main" java.lang.VerifyError: (class: ExtractorIssue$$, method: convert signature: (LTopProperty;)LMyProp;) Accessing value from uninitialized register 5 * at ExtractorIssue.main(ExtractorIssue.scala) - * at com.intellij.rt.execution.application.AppMain.main(AppMain.java:115)] + * at com.intellij.rt.execution.application.AppMain.main(AppMain.java)] * * If lines 15/16 are present, the compiler crashes: * diff --git a/test/files/run/t6411a.javaopts b/test/files/run/t6411a.javaopts deleted file mode 100644 index 2e862e5f806e..000000000000 --- a/test/files/run/t6411a.javaopts +++ /dev/null @@ -1 +0,0 @@ --XX:CompileCommand=exclude,scala/runtime/BoxesRunTime.unboxToInt diff --git a/test/files/run/t6411a.scala b/test/files/run/t6411a.scala index f40c42d05965..bd2fdd37be52 100644 --- a/test/files/run/t6411a.scala +++ b/test/files/run/t6411a.scala @@ -1,3 +1,4 @@ +// java: -XX:CompileCommand=exclude,scala/runtime/BoxesRunTime.unboxToInt // filter: scala.runtime.BoxesRunTime.{1,2}unboxToInt // // noise from -XX:CompileCommand=exclude,scala/runtime/BoxesRunTime.unboxToInt diff --git a/test/files/run/t6488.javaopts b/test/files/run/t6488.javaopts deleted file mode 100644 index 0c252573c8f0..000000000000 --- a/test/files/run/t6488.javaopts +++ /dev/null @@ -1 +0,0 @@ --Dforked.test=yes.please diff --git a/test/files/run/t6488.scala b/test/files/run/t6488.scala index 1d99bd85d4cf..90d29b264964 100644 --- a/test/files/run/t6488.scala +++ b/test/files/run/t6488.scala @@ -1,3 +1,5 @@ +// java: -Dforked.test=yes.please + import scala.sys.process._ import scala.util.Try import scala.util.Properties.{javaHome, javaClassPath, userDir} diff --git a/test/files/run/t7634.javaopts b/test/files/run/t7634.javaopts deleted file mode 100644 index b0c90bb1f73a..000000000000 --- a/test/files/run/t7634.javaopts +++ /dev/null @@ -1 +0,0 @@ --Dneeds.forked.jvm.for.windows diff --git a/test/files/run/t7634.scala b/test/files/run/t7634.scala index 345138eb933f..5997b3d48fa5 100644 --- a/test/files/run/t7634.scala +++ b/test/files/run/t7634.scala @@ -1,3 +1,5 @@ +// java: -Dneeds.forked.jvm.for.windows + import java.io.File import scala.tools.partest.ReplTest import scala.util.Properties.propOrElse diff --git a/test/files/run/t7805-repl-i.javaopts b/test/files/run/t7805-repl-i.javaopts deleted file mode 100644 index 9740f07b079b..000000000000 --- a/test/files/run/t7805-repl-i.javaopts +++ /dev/null @@ -1 +0,0 @@ --Dneeds.forked.jvm \ No newline at end of file diff --git a/test/files/run/t7805-repl-i.scala b/test/files/run/t7805-repl-i.scala index 2a80ad8bda2d..816926b7c38a 100644 --- a/test/files/run/t7805-repl-i.scala +++ b/test/files/run/t7805-repl-i.scala @@ -1,3 +1,5 @@ +// java: -Dneeds.forked.jvm + import scala.tools.partest.ReplTest import scala.tools.nsc.{ GenericRunnerSettings, Settings } import scala.tools.nsc.settings.MutableSettings diff --git a/test/files/run/t8266-octal-interp.javaopts b/test/files/run/t8266-octal-interp.javaopts deleted file mode 100644 index 9740f07b079b..000000000000 --- a/test/files/run/t8266-octal-interp.javaopts +++ /dev/null @@ -1 +0,0 @@ --Dneeds.forked.jvm \ No newline at end of file diff --git a/test/files/run/t8928.javaopts b/test/files/run/t8928.javaopts deleted file mode 100644 index a8e6bbca18ae..000000000000 --- a/test/files/run/t8928.javaopts +++ /dev/null @@ -1 +0,0 @@ --Dneeds.forked.jvm diff --git a/test/files/run/t8928/Test_1.scala b/test/files/run/t8928/Test_1.scala index 1cef564ff1be..bcf94ce41e52 100644 --- a/test/files/run/t8928/Test_1.scala +++ b/test/files/run/t8928/Test_1.scala @@ -1,3 +1,4 @@ +// java: -Dneeds.forked.jvm import test._ object Test extends App { diff --git a/test/files/run/type-tag-leak.javaopts b/test/files/run/type-tag-leak.javaopts deleted file mode 100644 index 408a4e4cb595..000000000000 --- a/test/files/run/type-tag-leak.javaopts +++ /dev/null @@ -1 +0,0 @@ --Xmx192M -XX:+ExitOnOutOfMemoryError \ No newline at end of file diff --git a/test/files/run/type-tag-leak.scala b/test/files/run/type-tag-leak.scala index 245288802a84..277799f765eb 100644 --- a/test/files/run/type-tag-leak.scala +++ b/test/files/run/type-tag-leak.scala @@ -1,3 +1,5 @@ +// java: -Xmx192M -XX:+ExitOnOutOfMemoryError + import scala.reflect.runtime.universe import scala.reflect.runtime.universe._ import scala.tools.nsc.interpreter._ From a8225a093da82381cc4bd7634492c237a3defcc9 Mon Sep 17 00:00:00 2001 From: Alec Theriault Date: Tue, 23 Mar 2021 14:09:41 -0700 Subject: [PATCH 120/769] SI-12290: support JDK15 text blocks in Java parser JDK15 introduced text blocks (JEP 378) for writing multiline strings. This adds support for parsing these strings in the Java parser. The logic for interpretting the literals is a little complicated, but follows from the "3.10.6. Text Blocks" of the Java language specification. The test cases include examples from there and from the JEP. --- .../scala/tools/nsc/javac/JavaScanners.scala | 185 +++++++++++++++--- test/files/neg/text-blocks.check | 13 ++ test/files/neg/text-blocks/Invalid1.java | 7 + test/files/neg/text-blocks/Invalid2.java | 7 + test/files/run/t12290.check | 61 ++++++ test/files/run/t12290/Test.scala | 30 +++ test/files/run/t12290/TextBlocks.java | 78 ++++++++ 7 files changed, 357 insertions(+), 24 deletions(-) create mode 100644 test/files/neg/text-blocks.check create mode 100644 test/files/neg/text-blocks/Invalid1.java create mode 100644 test/files/neg/text-blocks/Invalid2.java create mode 100644 test/files/run/t12290.check create mode 100644 test/files/run/t12290/Test.scala create mode 100644 test/files/run/t12290/TextBlocks.java diff --git a/src/compiler/scala/tools/nsc/javac/JavaScanners.scala b/src/compiler/scala/tools/nsc/javac/JavaScanners.scala index 3f8ee1166a08..770e680012c0 100644 --- a/src/compiler/scala/tools/nsc/javac/JavaScanners.scala +++ b/src/compiler/scala/tools/nsc/javac/JavaScanners.scala @@ -239,6 +239,9 @@ trait JavaScanners extends ast.parser.ScannersCommon { */ protected def putChar(c: Char): Unit = { cbuf.append(c) } + /** Remove the last N characters from the buffer */ + private def popNChars(n: Int): Unit = if (n > 0) cbuf.setLength(cbuf.length - n) + /** Clear buffer and set name */ private def setName(): Unit = { name = newTermName(cbuf.toString()) @@ -322,15 +325,26 @@ trait JavaScanners extends ast.parser.ScannersCommon { case '\"' => in.next() - while (in.ch != '\"' && (in.isUnicode || in.ch != CR && in.ch != LF && in.ch != SU)) { - getlitch() - } - if (in.ch == '\"') { - token = STRINGLIT - setName() - in.next() + if (in.ch != '\"') { // "..." non-empty string literal + while (in.ch != '\"' && (in.isUnicode || in.ch != CR && in.ch != LF && in.ch != SU)) { + getlitch() + } + if (in.ch == '\"') { + token = STRINGLIT + setName() + in.next() + } else { + syntaxError("unclosed string literal") + } } else { - syntaxError("unclosed string literal") + in.next() + if (in.ch != '\"') { // "" empty string literal + token = STRINGLIT + setName() + } else { + in.next() + getTextBlock() + } } return @@ -664,9 +678,12 @@ trait JavaScanners extends ast.parser.ScannersCommon { // Literals ----------------------------------------------------------------- /** read next character in character or string literal: - */ - protected def getlitch() = - if (in.ch == '\\') { + * + * @param scanOnly skip emitting errors or adding to the literal buffer + * @param inTextBlock is this for a text block? + */ + protected def getlitch(scanOnly: Boolean = false, inTextBlock: Boolean = false): Unit = { + val c: Char = if (in.ch == '\\') { in.next() if ('0' <= in.ch && in.ch <= '7') { val leadch: Char = in.ch @@ -680,27 +697,147 @@ trait JavaScanners extends ast.parser.ScannersCommon { in.next() } } - putChar(oct.asInstanceOf[Char]) + oct.asInstanceOf[Char] } else { - in.ch match { - case 'b' => putChar('\b') - case 't' => putChar('\t') - case 'n' => putChar('\n') - case 'f' => putChar('\f') - case 'r' => putChar('\r') - case '\"' => putChar('\"') - case '\'' => putChar('\'') - case '\\' => putChar('\\') + val c: Char = in.ch match { + case 'b' => '\b' + case 's' => ' ' + case 't' => '\t' + case 'n' => '\n' + case 'f' => '\f' + case 'r' => '\r' + case '\"' => '\"' + case '\'' => '\'' + case '\\' => '\\' + case CR | LF if inTextBlock => + in.next() + return case _ => - syntaxError(in.cpos - 1, "invalid escape character") - putChar(in.ch) + if (!scanOnly) syntaxError(in.cpos - 1, "invalid escape character") + in.ch } in.next() + c } } else { - putChar(in.ch) + val c = in.ch in.next() + c } + if (!scanOnly) putChar(c) + } + + /** read a triple-quote delimited text block, starting after the first three + * double quotes + */ + private def getTextBlock(): Unit = { + // Open delimiter is followed by optional space, then a newline + while (in.ch == ' ' || in.ch == '\t' || in.ch == FF) { + in.next() + } + if (in.ch != LF && in.ch != CR) { // CR-LF is already normalized into LF by `JavaCharArrayReader` + syntaxError("illegal text block open delimiter sequence, missing line terminator") + return + } + in.next() + + /* Do a lookahead scan over the full text block to: + * - compute common white space prefix + * - find the offset where the text block ends + */ + var commonWhiteSpacePrefix = Int.MaxValue + var blockEndOffset = 0 + val backtrackTo = in.copy + var blockClosed = false + var lineWhiteSpacePrefix = 0 + var lineIsOnlyWhitespace = true + while (!blockClosed && (in.isUnicode || in.ch != SU)) { + if (in.ch == '\"') { // Potential end of the block + in.next() + if (in.ch == '\"') { + in.next() + if (in.ch == '\"') { + blockClosed = true + commonWhiteSpacePrefix = commonWhiteSpacePrefix min lineWhiteSpacePrefix + blockEndOffset = in.cpos - 2 + } + } + + // Not the end of the block - just a single or double " character + if (!blockClosed) { + lineIsOnlyWhitespace = false + } + } else if (in.ch == CR || in.ch == LF) { // new line in the block + in.next() + if (!lineIsOnlyWhitespace) { + commonWhiteSpacePrefix = commonWhiteSpacePrefix min lineWhiteSpacePrefix + } + lineWhiteSpacePrefix = 0 + lineIsOnlyWhitespace = true + } else if (lineIsOnlyWhitespace && Character.isWhitespace(in.ch)) { // extend white space prefix + in.next() + lineWhiteSpacePrefix += 1 + } else { + lineIsOnlyWhitespace = false + getlitch(scanOnly = true, inTextBlock = true) + } + } + + // Bail out if the block never did have an end + if (!blockClosed) { + syntaxError("unclosed text block") + return + } + + // Second pass: construct the literal string value this time + in = backtrackTo + while (in.cpos < blockEndOffset) { + // Drop the line's leading whitespace + var remainingPrefix = commonWhiteSpacePrefix + while (remainingPrefix > 0 && in.ch != CR && in.ch != LF && in.cpos < blockEndOffset) { + in.next() + remainingPrefix -= 1 + } + + var trailingWhitespaceLength = 0 + var escapedNewline = false // Does the line end with `\`? + while (in.ch != CR && in.ch != LF && in.cpos < blockEndOffset && !escapedNewline) { + if (Character.isWhitespace(in.ch)) { + trailingWhitespaceLength += 1 + } else { + trailingWhitespaceLength = 0 + } + + // Detect if the line is about to end with `\` + if (in.ch == '\\' && { + val lookahead = in.copy + lookahead.next() + lookahead.ch == CR || lookahead.ch == LF + }) { + escapedNewline = true + } + + getlitch(scanOnly = false, inTextBlock = true) + } + + // Drop the line's trailing whitespace + popNChars(trailingWhitespaceLength) + + // Normalize line terminators + if ((in.ch == CR || in.ch == LF) && !escapedNewline) { + in.next() + putChar('\n') + } + } + + token = STRINGLIT + setName() + + // Trailing """ + in.next() + in.next() + in.next() + } /** read fractional part and exponent of floating point number * if one is present. diff --git a/test/files/neg/text-blocks.check b/test/files/neg/text-blocks.check new file mode 100644 index 000000000000..8a9af6292a04 --- /dev/null +++ b/test/files/neg/text-blocks.check @@ -0,0 +1,13 @@ +text-blocks/Invalid1.java:4: error: illegal text block open delimiter sequence, missing line terminator + public static final String badOpeningDelimiter = """non-whitespace + ^ +text-blocks/Invalid1.java:4: error: expected + public static final String badOpeningDelimiter = """non-whitespace + ^ +text-blocks/Invalid1.java:6: error: illegal text block open delimiter sequence, missing line terminator + """; + ^ +text-blocks/Invalid2.java:6: error: unclosed string literal + foo""""; + ^ +4 errors diff --git a/test/files/neg/text-blocks/Invalid1.java b/test/files/neg/text-blocks/Invalid1.java new file mode 100644 index 000000000000..54c7e98d9219 --- /dev/null +++ b/test/files/neg/text-blocks/Invalid1.java @@ -0,0 +1,7 @@ +// javaVersion: 15+ +class Invalid1 { + + public static final String badOpeningDelimiter = """non-whitespace + foo + """; +} diff --git a/test/files/neg/text-blocks/Invalid2.java b/test/files/neg/text-blocks/Invalid2.java new file mode 100644 index 000000000000..08b0a57548aa --- /dev/null +++ b/test/files/neg/text-blocks/Invalid2.java @@ -0,0 +1,7 @@ +// javaVersion: 15+ +class Invalid2 { + + // Closing delimiter is first three eligible `"""`, not last + public static final String closingDelimiterIsNotScalas = """ + foo""""; +} diff --git a/test/files/run/t12290.check b/test/files/run/t12290.check new file mode 100644 index 000000000000..00d93b3657dd --- /dev/null +++ b/test/files/run/t12290.check @@ -0,0 +1,61 @@ +==== +A text + +==== + + +

Hello, world

+ + + +==== +SELECT "EMP_ID", "LAST_NAME" FROM "EMPLOYEE_TB" +WHERE "CITY" = 'INDIANAPOLIS' +ORDER BY "EMP_ID", "LAST_NAME"; + +==== + + +

Hello, world

+ + + +==== + + +

Hello, world

+ + + +==== + + +

Hello, world

+ + + + +==== + + +

Hello , world

+ + + +==== + this line has 4 tabs before it + this line has 5 spaces before it and space after it + this line has 2 tabs and 3 spaces before it +  this line has 6 spaces before it + +==== +String text = """ + A text block inside a text block +"""; + +==== +foo bar +baz +==== + +==== diff --git a/test/files/run/t12290/Test.scala b/test/files/run/t12290/Test.scala new file mode 100644 index 000000000000..13b01b51478c --- /dev/null +++ b/test/files/run/t12290/Test.scala @@ -0,0 +1,30 @@ +// javaVersion: 15+ +/* Using `valueOf` is a way to check that the Java string literals were properly + * parsed, since the parsed value is what the Scala compiler will use when + * resolving the singleton types + */ +object Test extends App { + println("====") + println(valueOf[TextBlocks.aText.type]) + println("====") + println(valueOf[TextBlocks.html1.type]) + println("====") + println(valueOf[TextBlocks.query.type]) + println("====") + println(valueOf[TextBlocks.html2.type]) + println("====") + println(valueOf[TextBlocks.html3.type]) + println("====") + println(valueOf[TextBlocks.html4.type]) + println("====") + println(valueOf[TextBlocks.html5.type]) + println("====") + println(valueOf[TextBlocks.mixedIndents.type]) + println("====") + println(valueOf[TextBlocks.code.type]) + println("====") + println(valueOf[TextBlocks.simpleString.type]) + println("====") + println(valueOf[TextBlocks.emptyString.type]) + println("====") +} diff --git a/test/files/run/t12290/TextBlocks.java b/test/files/run/t12290/TextBlocks.java new file mode 100644 index 000000000000..e1928e74c971 --- /dev/null +++ b/test/files/run/t12290/TextBlocks.java @@ -0,0 +1,78 @@ +// javaVersion: 15+ +class TextBlocks { + + final static String aText = """ + A text + """; + + final static String html1 = """ + + +

Hello, world

+ + + """; + + // quote characters are unescaped + final static String query = """ + SELECT "EMP_ID", "LAST_NAME" FROM "EMPLOYEE_TB" + WHERE "CITY" = 'INDIANAPOLIS' + ORDER BY "EMP_ID", "LAST_NAME"; + """; + + // incidental trailing spaces + final static String html2 = """ + + +

Hello, world

+ + + """; + + // trailing delimiter influences + final static String html3 = """ + + +

Hello, world

+ + + """; + + // blank line does not affect + final static String html4 = """ + + +

Hello, world

+ + + + """; + + // escape sequences + final static String html5 = """ + \n + \ +

Hello\s,\tworld

+ + + """; + + // mixed indentation + final static String mixedIndents = """ + \s this line has 4 tabs before it + this line has 5 spaces before it and space after it \u0020 \u000C\u0020 \u001E + this line has 2 tabs and 3 spaces before it +\u0020 \u000C\u0020 \u001E this line has 6 spaces before it + """; + + final static String code = + """ + String text = \""" + A text block inside a text block + \"""; + """; + + final static String simpleString = "foo\tbar\nbaz"; + + final static String emptyString = ""; +} From 52745d0bd089bcd169ad5c12c1a130a317a21873 Mon Sep 17 00:00:00 2001 From: Tom Grigg Date: Wed, 10 Feb 2021 23:53:49 -0800 Subject: [PATCH 121/769] [forward port from 2.12.x] GitHub Actions: build and test on Windows and remove obsolete CI scripts forward-ports #9496 and #9585 Co-authored-by: Seth Tisue --- .gitattributes | 3 +++ .github/workflows/ci.yml | 47 ++++++++++++++++++++++++++++++++++ scripts/jobs/integrate/ide | 35 ------------------------- scripts/jobs/integrate/windows | 22 ---------------- 4 files changed, 50 insertions(+), 57 deletions(-) create mode 100644 .github/workflows/ci.yml delete mode 100755 scripts/jobs/integrate/ide delete mode 100755 scripts/jobs/integrate/windows diff --git a/.gitattributes b/.gitattributes index da4421cb78ed..99eca173f23e 100644 --- a/.gitattributes +++ b/.gitattributes @@ -21,6 +21,9 @@ text eol=lf *.txt eol=lf *.xml eol=lf +# Some sbt launcher scripts can't handle CR in .jvmopts +.jvmopts eol=lf + # Windows-specific files get windows endings *.bat eol=crlf *.cmd eol=crlf diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 000000000000..70647980f2e2 --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,47 @@ +name: Scala Merge CI + +on: + push: + branches: ['2.*.x'] + +defaults: + run: + shell: bash + +jobs: + build_and_test: + name: Windows + runs-on: windows-latest + strategy: + fail-fast: false + steps: + - run: git config --global core.autocrlf false + - name: Checkout + uses: actions/checkout@v2 + + # Note that we don't use olafurpg/setup-scala; it wouldn't buy us anything + # over setup-java. (We don't want csbt or xsbt; we prefer the standard + # sbt launch script, which comes preinstalled on Windows (and Ubuntu).) + - name: Setup Java + uses: actions/setup-java@v2 + with: + distribution: adopt + java-version: 8 + + - name: Cache + uses: actions/cache@v2 + with: + path: | + ~/.sbt + ~/.ivy2/cache + ~/.cache/coursier + key: ${{ runner.os }}-sbt-cache-v2-${{ hashFiles('**/*.sbt') }}-${{ hashFiles('project/build.properties') }} + + - name: Build + run: | + sbt setupPublishCore generateBuildCharacterPropertiesFile headerCheck publishLocal + + - name: Test + run: | + STARR=`cat buildcharacter.properties | grep ^maven.version.number | cut -d= -f2` && echo $STARR + sbt -Dstarr.version=$STARR setupValidateTest test:compile info testAll diff --git a/scripts/jobs/integrate/ide b/scripts/jobs/integrate/ide deleted file mode 100755 index 1dc7b43139e8..000000000000 --- a/scripts/jobs/integrate/ide +++ /dev/null @@ -1,35 +0,0 @@ -#!/bin/bash -e -# requires checkout: root is a scala checkout with which to integrate (actually, only required file is versions.properties, as documented below) -# requires env: scalaVersion (specifies binary already built from above checkout), WORKSPACE (provided by jenkins), repo_ref (HEAD of the scala checkout), -# requires files: $WORKSPACE/versions.properties (from checkout -- defines version numbers for modules used to build scala for dbuild...) - -echo "IDE integration not yet available on 2.12.x. Punting." -exit 0 - -# TODO: remove when integration is up and running -if [ "woele$_scabot_last" != "woele1" ]; then echo "Scabot didn't mark this as last commit -- skipping."; exit 0; fi - -baseDir=${WORKSPACE-`pwd`} -uberBuildUrl=${uberBuildUrl-"https://github.com/scala-ide/uber-build.git"} -uberBuildConfig=${uberBuildConfig-"validator.conf"} # TODO: backport to 2.10.x: uberBuildConfig="validator-2.10.conf" - -uberBuildDir="$WORKSPACE/uber-build/" - -cd $WORKSPACE -if [[ -d $uberBuildDir ]]; then - ( cd $uberBuildDir && git fetch $uberBuildUrl HEAD && git checkout -f FETCH_HEAD && git clean -fxd ) -else - git clone $uberBuildUrl -fi - -echo "maven.version.number=$scalaVersion" >> versions.properties - -# pass prRepoUrl in, which uber-build passes along to dbuild (in sbt-builds-for-ide) -# the "-P pr-scala" maven arg accomplishes the same thing for maven (directly used in uber-build) -BASEDIR="$WORKSPACE" prRepoUrl="$prRepoUrl" IDE_M2_REPO="$prRepoUrl" MAVEN_ARGS="-P pr-scala"\ - $uberBuildDir/uber-build.sh $uberBuildDir/config/$uberBuildConfig $repo_ref $scalaVersion - -# uber-build puts its local repo under target/m2repo -# wipe the org/scala-lang part, which otherwise just keeps -# growing and growing due to the -$sha-SNAPSHOT approach -[[ -d $WORKSPACE/target/m2repo/org/scala-lang ]] && rm -rf $WORKSPACE/target/m2repo/org/scala-lang diff --git a/scripts/jobs/integrate/windows b/scripts/jobs/integrate/windows deleted file mode 100755 index 964b70383c09..000000000000 --- a/scripts/jobs/integrate/windows +++ /dev/null @@ -1,22 +0,0 @@ -#!/bin/bash - -source scripts/common - -java -version -javac -version - -generateRepositoriesConfig - -# it may not be necessary to set both COURSIER_HOME and sbt.coursier.home, -# but at least for now, doing it just in case; see discussion at -# https://github.com/scala/scala-dev/issues/666 -export COURSIER_HOME=$WORKSPACE/.coursier - -SBT="java $JAVA_OPTS -Dsbt.ivy.home=$WORKSPACE/.ivy2 -Dsbt.coursier.home=$WORKSPACE/.coursier -jar $sbtLauncher -Dsbt.override.build.repos=true -Dsbt.repository.config=$sbtRepositoryConfig" - -# Build locker with STARR -$SBT -warn "setupPublishCore" generateBuildCharacterPropertiesFile publishLocal - -# Build quick and run the tests -parseScalaProperties buildcharacter.properties -$SBT -Dstarr.version=$maven_version_number -warn "setupValidateTest" testAll From f7ae7af0b3055ea12bad1516f89e2942889c6173 Mon Sep 17 00:00:00 2001 From: Alec Theriault Date: Wed, 24 Mar 2021 08:08:30 -0700 Subject: [PATCH 122/769] SI-11908: support JDK16 records in Java parser JDK16 introduced records (JEP 395) for reducing the boilerplate associated with small immutable classes. This new construct automatically * makes fields `private`/`final` and generates accessors for them * overrides `equals`/`hashCode`/`toString` * creates a `final` class that extends `java.lang.Record` The details are in "8.10. Record Classes" of the Java language specification. Fixes scala/bug#11908 --- .../scala/tools/nsc/javac/JavaParsers.scala | 105 ++++++++++++++++-- .../scala/tools/nsc/javac/JavaTokens.scala | 1 + .../scala/reflect/internal/StdNames.scala | 1 + test/files/pos/t11908/C.scala | 55 +++++++++ test/files/pos/t11908/IntLike.scala | 4 + test/files/pos/t11908/R1.java | 7 ++ test/files/pos/t11908/R2.java | 12 ++ test/files/pos/t11908/R3.java | 23 ++++ 8 files changed, 196 insertions(+), 12 deletions(-) create mode 100644 test/files/pos/t11908/C.scala create mode 100644 test/files/pos/t11908/IntLike.scala create mode 100644 test/files/pos/t11908/R1.java create mode 100644 test/files/pos/t11908/R2.java create mode 100644 test/files/pos/t11908/R3.java diff --git a/src/compiler/scala/tools/nsc/javac/JavaParsers.scala b/src/compiler/scala/tools/nsc/javac/JavaParsers.scala index f2b820256630..c1d1b8924dbb 100644 --- a/src/compiler/scala/tools/nsc/javac/JavaParsers.scala +++ b/src/compiler/scala/tools/nsc/javac/JavaParsers.scala @@ -118,6 +118,8 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { def javaLangObject(): Tree = javaLangDot(tpnme.Object) + def javaLangRecord(): Tree = javaLangDot(tpnme.Record) + def arrayOf(tpt: Tree) = AppliedTypeTree(scalaDot(tpnme.Array), List(tpt)) @@ -564,6 +566,16 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { def definesInterface(token: Int) = token == INTERFACE || token == AT + /** If the next token is the identifier "record", convert it into a proper + * token. Technically, "record" is just a restricted identifier. However, + * once we've figured out that it is in a position where it identifies a + * "record" class, it is much more convenient to promote it to a token. + */ + def adaptRecordIdentifier(): Unit = { + if (in.token == IDENTIFIER && in.name.toString == "record") + in.token = RECORD + } + def termDecl(mods: Modifiers, parentToken: Int): List[Tree] = { val inInterface = definesInterface(parentToken) val tparams = if (in.token == LT) typeParams() else List() @@ -587,6 +599,10 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { DefDef(mods, nme.CONSTRUCTOR, tparams, List(vparams), TypeTree(), methodBody()) } } + } else if (in.token == LBRACE && parentToken == RECORD) { + // compact constructor + methodBody() + List.empty } else { var mods1 = mods if (mods hasFlag Flags.ABSTRACT) mods1 = mods &~ Flags.ABSTRACT | Flags.DEFERRED @@ -721,11 +737,14 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { } } - def memberDecl(mods: Modifiers, parentToken: Int): List[Tree] = in.token match { - case CLASS | ENUM | INTERFACE | AT => - typeDecl(if (definesInterface(parentToken)) mods | Flags.STATIC else mods) - case _ => - termDecl(mods, parentToken) + def memberDecl(mods: Modifiers, parentToken: Int): List[Tree] = { + adaptRecordIdentifier() + in.token match { + case CLASS | ENUM | RECORD | INTERFACE | AT => + typeDecl(if (definesInterface(parentToken)) mods | Flags.STATIC else mods) + case _ => + termDecl(mods, parentToken) + } } def makeCompanionObject(cdef: ClassDef, statics: List[Tree]): Tree = @@ -808,6 +827,61 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { }) } + def recordDecl(mods: Modifiers): List[Tree] = { + accept(RECORD) + val pos = in.currentPos + val name = identForType() + val tparams = typeParams() + val header = formalParams() + val superclass = javaLangRecord() + val interfaces = interfacesOpt() + val (statics, body) = typeBody(RECORD, name) + + // Records generate a canonical constructor and accessors, unless they are manually specified + var generateCanonicalCtor = true + var generateAccessors = header + .view + .map { case ValDef(_, name, tpt, _) => name -> tpt } + .toMap + for (DefDef(_, name, List(), List(params), tpt, _) <- body) { + if (name == nme.CONSTRUCTOR && params.size == header.size) { + val ctorParamsAreCanonical = params.lazyZip(header).forall { + case (ValDef(_, _, tpt1, _), ValDef(_, _, tpt2, _)) => tpt1 equalsStructure tpt2 + case _ => false + } + if (ctorParamsAreCanonical) generateCanonicalCtor = false + } else if (generateAccessors.contains(name) && params.isEmpty) { + generateAccessors -= name + } + } + + // Generate canonical constructor and accessors, if not already manually specified + val accessors = generateAccessors + .map { case (name, tpt) => + DefDef(Modifiers(Flags.JAVA), name, List(), List(), tpt, blankExpr) + } + .toList + val canonicalCtor = Option.when(generateCanonicalCtor) { + DefDef( + Modifiers(Flags.JAVA), + nme.CONSTRUCTOR, + List(), + List(header), + TypeTree(), + blankExpr + ) + } + + addCompanionObject(statics, atPos(pos) { + ClassDef( + mods | Flags.FINAL, + name, + tparams, + makeTemplate(superclass :: interfaces, canonicalCtor.toList ++ accessors ++ body) + ) + }) + } + def interfaceDecl(mods: Modifiers): List[Tree] = { accept(INTERFACE) val pos = in.currentPos @@ -847,7 +921,10 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { } else if (in.token == SEMI) { in.nextToken() } else { - if (in.token == ENUM || definesInterface(in.token)) mods |= Flags.STATIC + + // See "14.3. Local Class and Interface Declarations" + if (in.token == ENUM || in.token == RECORD || definesInterface(in.token)) + mods |= Flags.STATIC val decls = joinComment(memberDecl(mods, parentToken)) @tailrec @@ -956,12 +1033,16 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { (res, hasClassBody) } - def typeDecl(mods: Modifiers): List[Tree] = in.token match { - case ENUM => joinComment(enumDecl(mods)) - case INTERFACE => joinComment(interfaceDecl(mods)) - case AT => annotationDecl(mods) - case CLASS => joinComment(classDecl(mods)) - case _ => in.nextToken(); syntaxError("illegal start of type declaration", skipIt = true); List(errorTypeTree) + def typeDecl(mods: Modifiers): List[Tree] = { + adaptRecordIdentifier() + in.token match { + case ENUM => joinComment(enumDecl(mods)) + case INTERFACE => joinComment(interfaceDecl(mods)) + case AT => annotationDecl(mods) + case CLASS => joinComment(classDecl(mods)) + case RECORD => joinComment(recordDecl(mods)) + case _ => in.nextToken(); syntaxError("illegal start of type declaration", skipIt = true); List(errorTypeTree) + } } def tryLiteral(negate: Boolean = false): Option[Constant] = { diff --git a/src/compiler/scala/tools/nsc/javac/JavaTokens.scala b/src/compiler/scala/tools/nsc/javac/JavaTokens.scala index 855fe19e6706..a124d1b90aaa 100644 --- a/src/compiler/scala/tools/nsc/javac/JavaTokens.scala +++ b/src/compiler/scala/tools/nsc/javac/JavaTokens.scala @@ -20,6 +20,7 @@ object JavaTokens extends ast.parser.CommonTokens { /** identifiers */ final val IDENTIFIER = 10 + final val RECORD = 12 // restricted identifier, so not lexed directly def isIdentifier(code: Int) = code == IDENTIFIER diff --git a/src/reflect/scala/reflect/internal/StdNames.scala b/src/reflect/scala/reflect/internal/StdNames.scala index 66dee512f7bd..87eeb58b0c95 100644 --- a/src/reflect/scala/reflect/internal/StdNames.scala +++ b/src/reflect/scala/reflect/internal/StdNames.scala @@ -264,6 +264,7 @@ trait StdNames { final val Object: NameType = nameType("Object") final val PrefixType: NameType = nameType("PrefixType") final val Product: NameType = nameType("Product") + final val Record: NameType = nameType("Record") final val Serializable: NameType = nameType("Serializable") final val Singleton: NameType = nameType("Singleton") final val Throwable: NameType = nameType("Throwable") diff --git a/test/files/pos/t11908/C.scala b/test/files/pos/t11908/C.scala new file mode 100644 index 000000000000..e5b63c595360 --- /dev/null +++ b/test/files/pos/t11908/C.scala @@ -0,0 +1,55 @@ +// javaVersion: 16+ +object C { + + def useR1 = { + // constructor signature + val r1 = new R1(123, "hello") + + // accessors signature + val i: Int = r1.i + val s: String = r1.s + + // method + val s2: String = r1.someMethod() + + // supertype + val isRecord: java.lang.Record = r1 + + () + } + + def useR2 = { + // constructor signature + val r2 = new R2(123, "hello") + + // accessors signature + val i: Int = r2.i + val s: String = r2.s + + // method + val i2: Int = r2.getInt + + // supertype + val isIntLike: IntLike = r2 + val isRecord: java.lang.Record = r2 + + () + } + + def useR3 = { + // constructor signature + val r3 = new R3(123, 42L, "hi") + new R3("hi", 123) + + // accessors signature + val i: Int = r3.i + val l: Long = r3.l + val s: String = r3.s + + // method + val l2: Long = r3.l(43L, 44L) + + // supertype + val isRecord: java.lang.Record = r3 + } +} diff --git a/test/files/pos/t11908/IntLike.scala b/test/files/pos/t11908/IntLike.scala new file mode 100644 index 000000000000..9e45fd43bc98 --- /dev/null +++ b/test/files/pos/t11908/IntLike.scala @@ -0,0 +1,4 @@ +// javaVersion: 16+ +trait IntLike { + def getInt: Int +} diff --git a/test/files/pos/t11908/R1.java b/test/files/pos/t11908/R1.java new file mode 100644 index 000000000000..350ac64b987e --- /dev/null +++ b/test/files/pos/t11908/R1.java @@ -0,0 +1,7 @@ +// javaVersion: 16+ +record R1(int i, String s) { + + public String someMethod() { + return s + "!"; + } +} diff --git a/test/files/pos/t11908/R2.java b/test/files/pos/t11908/R2.java new file mode 100644 index 000000000000..3c4725354bce --- /dev/null +++ b/test/files/pos/t11908/R2.java @@ -0,0 +1,12 @@ +// javaVersion: 16+ +final record R2(int i, String s) implements IntLike { + public int getInt() { + return i; + } + + // Canonical constructor + public R2(int i, String s) { + this.i = i; + this.s = s.intern(); + } +} diff --git a/test/files/pos/t11908/R3.java b/test/files/pos/t11908/R3.java new file mode 100644 index 000000000000..03a06dfc6f37 --- /dev/null +++ b/test/files/pos/t11908/R3.java @@ -0,0 +1,23 @@ +// javaVersion: 16+ +public record R3(int i, long l, String s) { + + // User-specified accessor + public int i() { + return i + 1; // evil >:) + } + + // Not an accessor - too many parameters + public long l(long a1, long a2) { + return a1 + a2; + } + + // Secondary constructor + public R3(String s, int i) { + this(i, 42L, s); + } + + // Compact constructor + public R3 { + s = s.intern(); + } +} From 98da2599b48e76ed45090ee2d64a49b49b4d8c3a Mon Sep 17 00:00:00 2001 From: Guillaume Martres Date: Fri, 23 Apr 2021 17:32:57 +0200 Subject: [PATCH 123/769] Support writing `&` instead of `with` in types under `-Xsource:3` Instead of: val x: A with B = new A with B {} One can now write: val x: A & B = new A with B {} However mixing `&` with other infix operators is not allowed, because unlike Scala 3, we do not take operator precedence into account, cf #6147. This implementation is a bit more restrictive than the Scala 3 one which allows shadowing the built-in `&` with your own `&` type operator, but this cannot be done with the simple parser-based approach of this PR. --- .../scala/tools/nsc/ast/parser/Parsers.scala | 39 +++++++++++++++++-- .../scala/reflect/internal/StdNames.scala | 3 ++ test/files/neg/and-future.check | 7 ++++ test/files/neg/and-future.scala | 14 +++++++ test/files/pos/and-future.scala | 17 ++++++++ 5 files changed, 77 insertions(+), 3 deletions(-) create mode 100644 test/files/neg/and-future.check create mode 100644 test/files/neg/and-future.scala create mode 100644 test/files/pos/and-future.scala diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala index 8151f958aeb6..42767df41f78 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala @@ -1056,13 +1056,14 @@ self => else { ts foreach checkNotByNameOrVarargs val tuple = atPos(start) { makeSafeTupleType(ts) } - infixTypeRest( + val tpt = infixTypeRest( compoundTypeRest( annotTypeRest( simpleTypeRest( tuple))), InfixMode.FirstOp ) + if (currentRun.isScala3) andType(tpt) else tpt } } private def makeExistentialTypeTree(t: Tree) = { @@ -1228,12 +1229,44 @@ self => else t } + def andType(tpt: Tree): Tree = { + val parents = ListBuffer.empty[Tree] + var otherInfixOp: Tree = EmptyTree + def collect(tpt: Tree): Unit = tpt match { + case AppliedTypeTree(op @ Ident(tpnme.AND), List(left, right)) => + collect(left) + collect(right) + case AppliedTypeTree(op, args) if args.exists(arg => arg.pos.start < op.pos.point) => + otherInfixOp = op + parents += treeCopy.AppliedTypeTree(tpt, op, args.map(andType)) + case _ => + parents += tpt + } + collect(tpt) + if (parents.lengthCompare(1) > 0) { + if (!otherInfixOp.isEmpty) { + // TODO: Unlike Scala 3, we do not take precedence into account when + // parsing infix types, there's an unmerged PR that attempts to + // change that (#6147), but until that's merged we cannot accurately + // parse things like `A Map B & C`, so give up and emit an error + // rather than continuing with an incorrect parse tree. + syntaxError(otherInfixOp.pos.point, + s"Cannot parse infix type combining `&` and `$otherInfixOp`, please use `$otherInfixOp` as the head of a regular type application.") + } + atPos(tpt.pos.start)(CompoundTypeTree(Template(parents.toList, noSelfType, Nil))) + } + else + parents.head + } + /** {{{ * InfixType ::= CompoundType {id [nl] CompoundType} * }}} */ - def infixType(mode: InfixMode.Value): Tree = - placeholderTypeBoundary { infixTypeRest(compoundType(), mode) } + def infixType(mode: InfixMode.Value): Tree = { + val tpt = placeholderTypeBoundary { infixTypeRest(compoundType(), mode) } + if (currentRun.isScala3) andType(tpt) else tpt + } /** {{{ * Types ::= Type {`,` Type} diff --git a/src/reflect/scala/reflect/internal/StdNames.scala b/src/reflect/scala/reflect/internal/StdNames.scala index 66dee512f7bd..3d944af6c26d 100644 --- a/src/reflect/scala/reflect/internal/StdNames.scala +++ b/src/reflect/scala/reflect/internal/StdNames.scala @@ -322,6 +322,9 @@ trait StdNames { final val scala_ : NameType = nameType("scala") + // Scala 3 special type + val AND: NameType = nme.AND.toTypeName + def dropSingletonName(name: Name): TypeName = (name dropRight SINGLETON_SUFFIX.length).toTypeName def singletonName(name: Name): TypeName = (name append SINGLETON_SUFFIX).toTypeName } diff --git a/test/files/neg/and-future.check b/test/files/neg/and-future.check new file mode 100644 index 000000000000..c7992b38964e --- /dev/null +++ b/test/files/neg/and-future.check @@ -0,0 +1,7 @@ +and-future.scala:9: error: Cannot parse infix type combining `&` and `Map`, please use `Map` as the head of a regular type application. + val b: Int Map X & Int Map Y = Map[Int, X & Y]() // error: unsupported + ^ +and-future.scala:13: error: Cannot parse infix type combining `&` and `Map`, please use `Map` as the head of a regular type application. + val c: (Int Map X) & (Int Map Y) = Map[Int, X & Y]() // error: unsupported + ^ +2 errors diff --git a/test/files/neg/and-future.scala b/test/files/neg/and-future.scala new file mode 100644 index 000000000000..1092c013b186 --- /dev/null +++ b/test/files/neg/and-future.scala @@ -0,0 +1,14 @@ +// scalac: -Xsource:3 +// + +trait X +trait Y + +class Test { + val a: Map[Int, X] & Map[Int, Y] = Map[Int, X & Y]() // ok + val b: Int Map X & Int Map Y = Map[Int, X & Y]() // error: unsupported + + // This one is unambiguous but it's hard to check whether parens were present + // from the parser output so we also emit an error there. + val c: (Int Map X) & (Int Map Y) = Map[Int, X & Y]() // error: unsupported +} diff --git a/test/files/pos/and-future.scala b/test/files/pos/and-future.scala new file mode 100644 index 000000000000..f7e15e822ecc --- /dev/null +++ b/test/files/pos/and-future.scala @@ -0,0 +1,17 @@ +// scalac: -Xsource:3 +// + +trait X +trait Y + +class Test[A, B <: A & AnyRef] { + def foo[T >: A & Null <: A & AnyRef & Any](x: T & ""): "" & T = x + + val a: X & Y & AnyRef = new X with Y {} + val b: (X & Y) & AnyRef = new X with Y {} + val c: X & (Y & AnyRef) = new X with Y {} + + val d: X & Y = c match { + case xy: (X & Y) => xy + } +} From 86797543ae74ddc418e254c232be4e86710233b5 Mon Sep 17 00:00:00 2001 From: Guillaume Martres Date: Fri, 23 Apr 2021 17:32:57 +0200 Subject: [PATCH 124/769] Support writing `&` instead of `with` in types under `-Xsource:3` Instead of: val x: A with B = new A with B {} One can now write: val x: A & B = new A with B {} However mixing `&` with other infix operators is not allowed, because unlike Scala 3, we do not take operator precedence into account, cf #6147. This implementation is a bit more restrictive than the Scala 3 one which allows shadowing the built-in `&` with your own `&` type operator, but this cannot be done with the simple parser-based approach of this PR. --- .../scala/tools/nsc/ast/parser/Parsers.scala | 39 +++++++++++++++++-- .../scala/reflect/internal/StdNames.scala | 3 ++ test/files/neg/and-future.check | 7 ++++ test/files/neg/and-future.scala | 14 +++++++ test/files/pos/and-future.scala | 17 ++++++++ 5 files changed, 77 insertions(+), 3 deletions(-) create mode 100644 test/files/neg/and-future.check create mode 100644 test/files/neg/and-future.scala create mode 100644 test/files/pos/and-future.scala diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala index 479fad69a2aa..7df4b3a5b0b0 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala @@ -1008,13 +1008,14 @@ self => else { ts foreach checkNotByNameOrVarargs val tuple = atPos(start) { makeSafeTupleType(ts, start) } - infixTypeRest( + val tpt = infixTypeRest( compoundTypeRest( annotTypeRest( simpleTypeRest( tuple))), InfixMode.FirstOp ) + if (currentRun.isScala3) andType(tpt) else tpt } } } @@ -1163,12 +1164,44 @@ self => else t } + def andType(tpt: Tree): Tree = { + val parents = ListBuffer.empty[Tree] + var otherInfixOp: Tree = EmptyTree + def collect(tpt: Tree): Unit = tpt match { + case AppliedTypeTree(op @ Ident(tpnme.AND), List(left, right)) => + collect(left) + collect(right) + case AppliedTypeTree(op, args) if args.exists(arg => arg.pos.start < op.pos.point) => + otherInfixOp = op + parents += treeCopy.AppliedTypeTree(tpt, op, args.map(andType)) + case _ => + parents += tpt + } + collect(tpt) + if (parents.lengthCompare(1) > 0) { + if (!otherInfixOp.isEmpty) { + // TODO: Unlike Scala 3, we do not take precedence into account when + // parsing infix types, there's an unmerged PR that attempts to + // change that (#6147), but until that's merged we cannot accurately + // parse things like `A Map B & C`, so give up and emit an error + // rather than continuing with an incorrect parse tree. + syntaxError(otherInfixOp.pos.point, + s"Cannot parse infix type combining `&` and `$otherInfixOp`, please use `$otherInfixOp` as the head of a regular type application.") + } + atPos(tpt.pos.start)(CompoundTypeTree(Template(parents.toList, noSelfType, Nil))) + } + else + parents.head + } + /** {{{ * InfixType ::= CompoundType {id [nl] CompoundType} * }}} */ - def infixType(mode: InfixMode.Value): Tree = - placeholderTypeBoundary { infixTypeRest(compoundType(), mode) } + def infixType(mode: InfixMode.Value): Tree = { + val tpt = placeholderTypeBoundary { infixTypeRest(compoundType(), mode) } + if (currentRun.isScala3) andType(tpt) else tpt + } /** {{{ * Types ::= Type {`,' Type} diff --git a/src/reflect/scala/reflect/internal/StdNames.scala b/src/reflect/scala/reflect/internal/StdNames.scala index b7e4e901fbd5..6d688cfa0866 100644 --- a/src/reflect/scala/reflect/internal/StdNames.scala +++ b/src/reflect/scala/reflect/internal/StdNames.scala @@ -319,6 +319,9 @@ trait StdNames { final val scala_ : NameType = "scala" + // Scala 3 special type + val AND: NameType = nme.AND.toTypeName + def dropSingletonName(name: Name): TypeName = (name dropRight SINGLETON_SUFFIX.length).toTypeName def singletonName(name: Name): TypeName = (name append SINGLETON_SUFFIX).toTypeName } diff --git a/test/files/neg/and-future.check b/test/files/neg/and-future.check new file mode 100644 index 000000000000..6e2ea02e49c5 --- /dev/null +++ b/test/files/neg/and-future.check @@ -0,0 +1,7 @@ +and-future.scala:9: error: Cannot parse infix type combining `&` and `Map`, please use `Map` as the head of a regular type application. + val b: Int Map X & Int Map Y = Map[Int, X & Y]() // error: unsupported + ^ +and-future.scala:13: error: Cannot parse infix type combining `&` and `Map`, please use `Map` as the head of a regular type application. + val c: (Int Map X) & (Int Map Y) = Map[Int, X & Y]() // error: unsupported + ^ +two errors found diff --git a/test/files/neg/and-future.scala b/test/files/neg/and-future.scala new file mode 100644 index 000000000000..1092c013b186 --- /dev/null +++ b/test/files/neg/and-future.scala @@ -0,0 +1,14 @@ +// scalac: -Xsource:3 +// + +trait X +trait Y + +class Test { + val a: Map[Int, X] & Map[Int, Y] = Map[Int, X & Y]() // ok + val b: Int Map X & Int Map Y = Map[Int, X & Y]() // error: unsupported + + // This one is unambiguous but it's hard to check whether parens were present + // from the parser output so we also emit an error there. + val c: (Int Map X) & (Int Map Y) = Map[Int, X & Y]() // error: unsupported +} diff --git a/test/files/pos/and-future.scala b/test/files/pos/and-future.scala new file mode 100644 index 000000000000..b09e0e8ce411 --- /dev/null +++ b/test/files/pos/and-future.scala @@ -0,0 +1,17 @@ +// scalac: -Xsource:3 +// + +trait X +trait Y + +class Test[A, B <: A & AnyRef] { + def foo[T >: A & Null <: A & AnyRef & Any](x: T & String): String & T = x + + val a: X & Y & AnyRef = new X with Y {} + val b: (X & Y) & AnyRef = new X with Y {} + val c: X & (Y & AnyRef) = new X with Y {} + + val d: X & Y = c match { + case xy: (X & Y) => xy + } +} From 78440d7698bcbe12946ba39c6ecad4f1ddf57026 Mon Sep 17 00:00:00 2001 From: Seth Tisue Date: Mon, 26 Apr 2021 07:38:35 -0700 Subject: [PATCH 125/769] sbt 1.5.1 (was 1.5.0) --- project/build.properties | 2 +- scripts/common | 2 +- src/intellij/scala.ipr.SAMPLE | 36 ++++++++++++------------ test/benchmarks/project/build.properties | 2 +- test/jcstress/project/build.properties | 2 +- 5 files changed, 22 insertions(+), 22 deletions(-) diff --git a/project/build.properties b/project/build.properties index e67343ae796c..f0be67b9f729 100644 --- a/project/build.properties +++ b/project/build.properties @@ -1 +1 @@ -sbt.version=1.5.0 +sbt.version=1.5.1 diff --git a/scripts/common b/scripts/common index d5f3f715b496..82c41790df0e 100644 --- a/scripts/common +++ b/scripts/common @@ -11,7 +11,7 @@ else fi SBT_CMD=${SBT_CMD-sbt} -SBT_CMD="$SBT_CMD -sbt-version 1.5.0" +SBT_CMD="$SBT_CMD -sbt-version 1.5.1" # repo to publish builds integrationRepoUrl=${integrationRepoUrl-"https://scala-ci.typesafe.com/artifactory/scala-integration/"} diff --git a/src/intellij/scala.ipr.SAMPLE b/src/intellij/scala.ipr.SAMPLE index c6b626692a57..a6057f96db8b 100644 --- a/src/intellij/scala.ipr.SAMPLE +++ b/src/intellij/scala.ipr.SAMPLE @@ -325,13 +325,13 @@ - + - + - - + + @@ -355,7 +355,7 @@ - + @@ -369,8 +369,8 @@ - - + + @@ -382,13 +382,13 @@ - + - + @@ -402,16 +402,16 @@ - + - + - + - + @@ -435,18 +435,18 @@ - - + + - + - + - + diff --git a/test/benchmarks/project/build.properties b/test/benchmarks/project/build.properties index e67343ae796c..f0be67b9f729 100644 --- a/test/benchmarks/project/build.properties +++ b/test/benchmarks/project/build.properties @@ -1 +1 @@ -sbt.version=1.5.0 +sbt.version=1.5.1 diff --git a/test/jcstress/project/build.properties b/test/jcstress/project/build.properties index e67343ae796c..f0be67b9f729 100644 --- a/test/jcstress/project/build.properties +++ b/test/jcstress/project/build.properties @@ -1 +1 @@ -sbt.version=1.5.0 +sbt.version=1.5.1 From 320102e4d68c01a9f2fdda111dc1d539a2e8c379 Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Mon, 26 Apr 2021 16:48:38 +0200 Subject: [PATCH 126/769] Member records are static and a few simplifications --- .../scala/tools/nsc/javac/JavaParsers.scala | 30 +++++++++---------- test/files/pos/t11908/C.scala | 2 +- test/files/pos/t11908/R2.java | 20 +++++++------ 3 files changed, 27 insertions(+), 25 deletions(-) diff --git a/src/compiler/scala/tools/nsc/javac/JavaParsers.scala b/src/compiler/scala/tools/nsc/javac/JavaParsers.scala index c1d1b8924dbb..7dcfacdb3c2e 100644 --- a/src/compiler/scala/tools/nsc/javac/JavaParsers.scala +++ b/src/compiler/scala/tools/nsc/javac/JavaParsers.scala @@ -599,7 +599,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { DefDef(mods, nme.CONSTRUCTOR, tparams, List(vparams), TypeTree(), methodBody()) } } - } else if (in.token == LBRACE && parentToken == RECORD) { + } else if (in.token == LBRACE && rtptName != nme.EMPTY && parentToken == RECORD) { // compact constructor methodBody() List.empty @@ -738,10 +738,9 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { } def memberDecl(mods: Modifiers, parentToken: Int): List[Tree] = { - adaptRecordIdentifier() in.token match { case CLASS | ENUM | RECORD | INTERFACE | AT => - typeDecl(if (definesInterface(parentToken)) mods | Flags.STATIC else mods) + typeDecl(mods) case _ => termDecl(mods, parentToken) } @@ -821,7 +820,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { javaLangObject() } val interfaces = interfacesOpt() - val (statics, body) = typeBody(CLASS, name) + val (statics, body) = typeBody(CLASS) addCompanionObject(statics, atPos(pos) { ClassDef(mods, name, tparams, makeTemplate(superclass :: interfaces, body)) }) @@ -835,7 +834,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { val header = formalParams() val superclass = javaLangRecord() val interfaces = interfacesOpt() - val (statics, body) = typeBody(RECORD, name) + val (statics, body) = typeBody(RECORD) // Records generate a canonical constructor and accessors, unless they are manually specified var generateCanonicalCtor = true @@ -843,7 +842,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { .view .map { case ValDef(_, name, tpt, _) => name -> tpt } .toMap - for (DefDef(_, name, List(), List(params), tpt, _) <- body) { + for (DefDef(_, name, List(), List(params), _, _) <- body) { if (name == nme.CONSTRUCTOR && params.size == header.size) { val ctorParamsAreCanonical = params.lazyZip(header).forall { case (ValDef(_, _, tpt1, _), ValDef(_, _, tpt2, _)) => tpt1 equalsStructure tpt2 @@ -858,15 +857,15 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { // Generate canonical constructor and accessors, if not already manually specified val accessors = generateAccessors .map { case (name, tpt) => - DefDef(Modifiers(Flags.JAVA), name, List(), List(), tpt, blankExpr) + DefDef(Modifiers(Flags.JAVA), name, List(), List(), tpt.duplicate, blankExpr) } .toList val canonicalCtor = Option.when(generateCanonicalCtor) { DefDef( - Modifiers(Flags.JAVA), + mods, nme.CONSTRUCTOR, List(), - List(header), + List(header.map(_.duplicate)), TypeTree(), blankExpr ) @@ -894,7 +893,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { } else { List(javaLangObject()) } - val (statics, body) = typeBody(INTERFACE, name) + val (statics, body) = typeBody(INTERFACE) addCompanionObject(statics, atPos(pos) { ClassDef(mods | Flags.TRAIT | Flags.INTERFACE | Flags.ABSTRACT, name, tparams, @@ -902,14 +901,14 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { }) } - def typeBody(leadingToken: Int, parentName: Name): (List[Tree], List[Tree]) = { + def typeBody(leadingToken: Int): (List[Tree], List[Tree]) = { accept(LBRACE) - val defs = typeBodyDecls(leadingToken, parentName) + val defs = typeBodyDecls(leadingToken) accept(RBRACE) defs } - def typeBodyDecls(parentToken: Int, parentName: Name): (List[Tree], List[Tree]) = { + def typeBodyDecls(parentToken: Int): (List[Tree], List[Tree]) = { val inInterface = definesInterface(parentToken) val statics = new ListBuffer[Tree] val members = new ListBuffer[Tree] @@ -923,6 +922,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { } else { // See "14.3. Local Class and Interface Declarations" + adaptRecordIdentifier() if (in.token == ENUM || in.token == RECORD || definesInterface(in.token)) mods |= Flags.STATIC val decls = joinComment(memberDecl(mods, parentToken)) @@ -948,7 +948,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { accept(INTERFACE) val pos = in.currentPos val name = identForType() - val (statics, body) = typeBody(AT, name) + val (statics, body) = typeBody(AT) val templ = makeTemplate(annotationParents, body) addCompanionObject(statics, atPos(pos) { import Flags._ @@ -985,7 +985,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { val (statics, body) = if (in.token == SEMI) { in.nextToken() - typeBodyDecls(ENUM, name) + typeBodyDecls(ENUM) } else { (List(), List()) } diff --git a/test/files/pos/t11908/C.scala b/test/files/pos/t11908/C.scala index e5b63c595360..615277efc50b 100644 --- a/test/files/pos/t11908/C.scala +++ b/test/files/pos/t11908/C.scala @@ -20,7 +20,7 @@ object C { def useR2 = { // constructor signature - val r2 = new R2(123, "hello") + val r2 = new R2.R(123, "hello") // accessors signature val i: Int = r2.i diff --git a/test/files/pos/t11908/R2.java b/test/files/pos/t11908/R2.java index 3c4725354bce..52fb72b26e5e 100644 --- a/test/files/pos/t11908/R2.java +++ b/test/files/pos/t11908/R2.java @@ -1,12 +1,14 @@ // javaVersion: 16+ -final record R2(int i, String s) implements IntLike { - public int getInt() { - return i; - } +public class R2 { + final record R(int i, String s) implements IntLike { + public int getInt() { + return i; + } - // Canonical constructor - public R2(int i, String s) { - this.i = i; - this.s = s.intern(); + // Canonical constructor + public R(int i, String s) { + this.i = i; + this.s = s.intern(); + } } -} +} \ No newline at end of file From 9de0851096a753ce00d1d360e7b3b7b19a944e05 Mon Sep 17 00:00:00 2001 From: Dale Wijnand Date: Fri, 23 Apr 2021 14:15:33 +0100 Subject: [PATCH 127/769] Cleanup benchmark files Remove the old project files, as it's no longer a standalone build. And move some of its files into the right place. --- test/benchmarks/project/build.properties | 1 - test/benchmarks/project/plugins.sbt | 1 - .../scala/scala/{ => collection}/BitManipulationBenchmark.scala | 0 .../main/scala/{ => scala}/reflect/internal/LubBenchmark.scala | 0 .../scala/{ => scala}/reflect/internal/SymbolBenchmark.scala | 0 5 files changed, 2 deletions(-) delete mode 100644 test/benchmarks/project/build.properties delete mode 100644 test/benchmarks/project/plugins.sbt rename test/benchmarks/src/main/scala/scala/{ => collection}/BitManipulationBenchmark.scala (100%) rename test/benchmarks/src/main/scala/{ => scala}/reflect/internal/LubBenchmark.scala (100%) rename test/benchmarks/src/main/scala/{ => scala}/reflect/internal/SymbolBenchmark.scala (100%) diff --git a/test/benchmarks/project/build.properties b/test/benchmarks/project/build.properties deleted file mode 100644 index f0be67b9f729..000000000000 --- a/test/benchmarks/project/build.properties +++ /dev/null @@ -1 +0,0 @@ -sbt.version=1.5.1 diff --git a/test/benchmarks/project/plugins.sbt b/test/benchmarks/project/plugins.sbt deleted file mode 100644 index b57429f738ec..000000000000 --- a/test/benchmarks/project/plugins.sbt +++ /dev/null @@ -1 +0,0 @@ -addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.2.27") diff --git a/test/benchmarks/src/main/scala/scala/BitManipulationBenchmark.scala b/test/benchmarks/src/main/scala/scala/collection/BitManipulationBenchmark.scala similarity index 100% rename from test/benchmarks/src/main/scala/scala/BitManipulationBenchmark.scala rename to test/benchmarks/src/main/scala/scala/collection/BitManipulationBenchmark.scala diff --git a/test/benchmarks/src/main/scala/reflect/internal/LubBenchmark.scala b/test/benchmarks/src/main/scala/scala/reflect/internal/LubBenchmark.scala similarity index 100% rename from test/benchmarks/src/main/scala/reflect/internal/LubBenchmark.scala rename to test/benchmarks/src/main/scala/scala/reflect/internal/LubBenchmark.scala diff --git a/test/benchmarks/src/main/scala/reflect/internal/SymbolBenchmark.scala b/test/benchmarks/src/main/scala/scala/reflect/internal/SymbolBenchmark.scala similarity index 100% rename from test/benchmarks/src/main/scala/reflect/internal/SymbolBenchmark.scala rename to test/benchmarks/src/main/scala/scala/reflect/internal/SymbolBenchmark.scala From 310ae6f72b4f562452bf2d0dcc8c2defd6651b90 Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Mon, 26 Apr 2021 21:45:26 +0200 Subject: [PATCH 128/769] travis notifications on slack only for scala/scala --- .travis.yml | 2 +- build.sbt | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.travis.yml b/.travis.yml index 3cd3bd0f46a2..cfb5e32e83d9 100644 --- a/.travis.yml +++ b/.travis.yml @@ -170,7 +170,7 @@ notifications: slack: rooms: - typesafe:WoewGgHil2FkdGzJyV3phAhj - if: type = cron OR type = push + if: (type = cron OR type = push) AND repo = scala/scala on_success: never on_failure: change webhooks: https://scala-ci.typesafe.com/benchq/webhooks/travis diff --git a/build.sbt b/build.sbt index cbf59c39444b..c3a8851545b0 100644 --- a/build.sbt +++ b/build.sbt @@ -3,7 +3,7 @@ * * What you see below is very much work-in-progress. The following features are implemented: * - Compiling all classes for the compiler and library ("compile" in the respective subprojects) - * - Running JUnit ("junit/test"), ScalaCheck ("scalacheck/test"), and partest ("test/it:test") tests + * - Running JUnit ("junit/test"), ScalaCheck ("scalacheck/test"), and partest ("test/IntegrationTest/test") tests * - Creating build/quick with all compiled classes and launcher scripts ("dist/mkQuick") * - Creating build/pack with all JARs and launcher scripts ("dist/mkPack") * - Building all scaladoc sets ("doc") @@ -1206,7 +1206,7 @@ def generateServiceProviderResources(services: (String, String)*): Setting[_] = // Add tab completion to partest commands += Command("partest")(_ => PartestUtil.partestParser((ThisBuild / baseDirectory).value, (ThisBuild / baseDirectory).value / "test")) { (state, parsed) => - ("test/it:testOnly -- " + parsed) :: state + ("test/IntegrationTest/testOnly -- " + parsed) :: state } // Watch the test files also so ~partest triggers on test case changes From 807beb63be1260d08c28b6c520ec8d6d98f5ca99 Mon Sep 17 00:00:00 2001 From: Dale Wijnand Date: Fri, 23 Apr 2021 14:58:00 +0100 Subject: [PATCH 129/769] Benchmark and simplify AlmostFinalValue --- build.sbt | 3 +- .../internal/util/AlmostFinalValue.java | 104 ++++-------------- .../reflect/internal/util/Statistics.scala | 4 +- .../internal/util/StatisticsStatics.java | 48 ++------ .../AlmostFinalValueBenchmarkStatics.java | 12 ++ .../util/AlmostFinalValueBenchmark.scala | 56 ++++++++++ 6 files changed, 103 insertions(+), 124 deletions(-) create mode 100644 test/benchmarks/src/main/java/scala/reflect/internal/util/AlmostFinalValueBenchmarkStatics.java create mode 100644 test/benchmarks/src/main/scala/scala/reflect/internal/util/AlmostFinalValueBenchmark.scala diff --git a/build.sbt b/build.sbt index 4bffeca7cf91..aa59c8ec96de 100644 --- a/build.sbt +++ b/build.sbt @@ -663,12 +663,13 @@ lazy val bench = project.in(file("test") / "benchmarks") name := "test-benchmarks", autoScalaLibrary := false, crossPaths := true, // needed to enable per-scala-version source directories (https://github.com/sbt/sbt/pull/1799) + compileOrder := CompileOrder.JavaThenScala, // to allow inlining from Java ("... is defined in a Java source (mixed compilation), no bytecode is available") libraryDependencies += "org.openjdk.jol" % "jol-core" % "0.10", libraryDependencies ++= { if (benchmarkScalaVersion == "") Nil else "org.scala-lang" % "scala-compiler" % benchmarkScalaVersion :: Nil }, - scalacOptions ++= Seq("-feature", "-opt:l:inline", "-opt-inline-from:scala.**") + scalacOptions ++= Seq("-feature", "-opt:l:inline", "-opt-inline-from:scala/**", "-opt-warnings"), ).settings(inConfig(JmhPlugin.JmhKeys.Jmh)(scalabuild.JitWatchFilePlugin.jitwatchSettings)) diff --git a/src/reflect/scala/reflect/internal/util/AlmostFinalValue.java b/src/reflect/scala/reflect/internal/util/AlmostFinalValue.java index 415f91f9a8ff..f9bb24f00a85 100644 --- a/src/reflect/scala/reflect/internal/util/AlmostFinalValue.java +++ b/src/reflect/scala/reflect/internal/util/AlmostFinalValue.java @@ -14,93 +14,35 @@ import java.lang.invoke.MethodHandle; import java.lang.invoke.MethodHandles; -import java.lang.invoke.MethodType; import java.lang.invoke.MutableCallSite; -import java.lang.invoke.SwitchPoint; /** * Represents a value that is wrapped with JVM machinery to allow the JVM - * to speculate on its content and effectively optimize it as if it was final. - * - * This file has been drawn from JSR292 cookbook created by Rémi Forax. - * https://code.google.com/archive/p/jsr292-cookbook/. The explanation of the strategy - * can be found in https://community.oracle.com/blogs/forax/2011/12/17/jsr-292-goodness-almost-static-final-field. - * - * Before copying this file to the repository, I tried to adapt the most important - * parts of this implementation and special case it for `Statistics`, but that - * caused an important performance penalty (~10%). This performance penalty is - * due to the fact that using `static`s for the method handles and all the other + * to speculate on its content and effectively optimize it as if it was a constant. + * + * Originally from the JSR-292 cookbook created by Rémi Forax: + * https://code.google.com/archive/p/jsr292-cookbook/. + * + * Implemented in Java because using `static`s for the method handles and all the other * fields is extremely important for the JVM to correctly optimize the code, and * we cannot do that if we make `Statistics` an object extending `MutableCallSite` - * in Scala. We instead rely on the Java implementation that uses a boxed representation. + * in Scala. + * + * Subsequently specialised for booleans, to avoid needless Boolean boxing. + * + * Finally reworked to default to false and only allow for the value to be toggled on, + * using Rémi Forax's newer "MostlyConstant" as inspiration, in https://github.com/forax/exotic. */ -public class AlmostFinalValue { - private final AlmostFinalCallSite callsite = - new AlmostFinalCallSite(this); - - protected boolean initialValue() { - return false; - } - - public MethodHandle createGetter() { - return callsite.dynamicInvoker(); - } - - public void setValue(boolean value) { - callsite.setValue(value); - } - - private static class AlmostFinalCallSite extends MutableCallSite { - private Boolean value; - private SwitchPoint switchPoint; - private final AlmostFinalValue volatileFinalValue; - private final MethodHandle fallback; - private final Object lock; - - private static final Boolean NONE = null; - private static final MethodHandle FALLBACK; - static { - try { - FALLBACK = MethodHandles.lookup().findVirtual(AlmostFinalCallSite.class, "fallback", - MethodType.methodType(Boolean.TYPE)); - } catch (NoSuchMethodException|IllegalAccessException e) { - throw new AssertionError(e.getMessage(), e); - } - } - - AlmostFinalCallSite(AlmostFinalValue volatileFinalValue) { - super(MethodType.methodType(Boolean.TYPE)); - Object lock = new Object(); - MethodHandle fallback = FALLBACK.bindTo(this); - synchronized(lock) { - value = null; - switchPoint = new SwitchPoint(); - setTarget(fallback); - } - this.volatileFinalValue = volatileFinalValue; - this.lock = lock; - this.fallback = fallback; - } +final class AlmostFinalValue { + private static final MethodHandle K_FALSE = MethodHandles.constant(boolean.class, false); + private static final MethodHandle K_TRUE = MethodHandles.constant(boolean.class, true); + + private final MutableCallSite callsite = new MutableCallSite(K_FALSE); + final MethodHandle invoker = callsite.dynamicInvoker(); - boolean fallback() { - synchronized(lock) { - Boolean value = this.value; - if (value == NONE) { - value = volatileFinalValue.initialValue(); - } - MethodHandle target = switchPoint.guardWithTest(MethodHandles.constant(Boolean.TYPE, value), fallback); - setTarget(target); - return value; - } - } - - void setValue(boolean value) { - synchronized(lock) { - SwitchPoint switchPoint = this.switchPoint; - this.value = value; - this.switchPoint = new SwitchPoint(); - SwitchPoint.invalidateAll(new SwitchPoint[] {switchPoint}); - } - } + void toggleOnAndDeoptimize() { + if (callsite.getTarget() == K_TRUE) return; + callsite.setTarget(K_TRUE); + MutableCallSite.syncAll(new MutableCallSite[] { callsite }); } -} \ No newline at end of file +} diff --git a/src/reflect/scala/reflect/internal/util/Statistics.scala b/src/reflect/scala/reflect/internal/util/Statistics.scala index b9ef1220a003..28cb4f133446 100644 --- a/src/reflect/scala/reflect/internal/util/Statistics.scala +++ b/src/reflect/scala/reflect/internal/util/Statistics.scala @@ -301,7 +301,7 @@ quant) @inline final def enabled: Boolean = areColdStatsLocallyEnabled def enabled_=(cond: Boolean) = { if (cond && !enabled) { - StatisticsStatics.enableColdStats() + StatisticsStatics.enableColdStatsAndDeoptimize() areColdStatsLocallyEnabled = true } } @@ -310,7 +310,7 @@ quant) @inline final def hotEnabled: Boolean = enabled && areHotStatsLocallyEnabled def hotEnabled_=(cond: Boolean) = { if (cond && enabled && !areHotStatsLocallyEnabled) { - StatisticsStatics.enableHotStats() + StatisticsStatics.enableHotStatsAndDeoptimize() areHotStatsLocallyEnabled = true } } diff --git a/src/reflect/scala/reflect/internal/util/StatisticsStatics.java b/src/reflect/scala/reflect/internal/util/StatisticsStatics.java index dc9021471d87..d2d27a7af6c7 100644 --- a/src/reflect/scala/reflect/internal/util/StatisticsStatics.java +++ b/src/reflect/scala/reflect/internal/util/StatisticsStatics.java @@ -12,7 +12,6 @@ package scala.reflect.internal.util; -import scala.reflect.internal.util.AlmostFinalValue; import java.lang.invoke.MethodHandle; /** @@ -22,46 +21,15 @@ * which helps performance (see docs to find out why). */ public final class StatisticsStatics { - private static final AlmostFinalValue COLD_STATS = new AlmostFinalValue() { - @Override - protected boolean initialValue() { - return false; - } - }; + private static final AlmostFinalValue COLD_STATS = new AlmostFinalValue(); + private static final AlmostFinalValue HOT_STATS = new AlmostFinalValue(); - private static final AlmostFinalValue HOT_STATS = new AlmostFinalValue() { - @Override - protected boolean initialValue() { - return false; - } - }; + private static final MethodHandle COLD_STATS_GETTER = COLD_STATS.invoker; + private static final MethodHandle HOT_STATS_GETTER = HOT_STATS.invoker; - private static final MethodHandle COLD_STATS_GETTER = COLD_STATS.createGetter(); - private static final MethodHandle HOT_STATS_GETTER = HOT_STATS.createGetter(); - - public static boolean areSomeColdStatsEnabled() throws Throwable { - return (boolean) COLD_STATS_GETTER.invokeExact(); - } + public static boolean areSomeColdStatsEnabled() throws Throwable { return (boolean) COLD_STATS_GETTER.invokeExact(); } + public static boolean areSomeHotStatsEnabled() throws Throwable { return (boolean) HOT_STATS_GETTER.invokeExact(); } - public static boolean areSomeHotStatsEnabled() throws Throwable { - return (boolean) HOT_STATS_GETTER.invokeExact(); - } - - public static void enableColdStats() throws Throwable { - if (!areSomeColdStatsEnabled()) - COLD_STATS.setValue(true); - } - - public static void disableColdStats() { - COLD_STATS.setValue(false); - } - - public static void enableHotStats() throws Throwable { - if (!areSomeHotStatsEnabled()) - HOT_STATS.setValue(true); - } - - public static void disableHotStats() { - HOT_STATS.setValue(false); - } + public static void enableColdStatsAndDeoptimize() { COLD_STATS.toggleOnAndDeoptimize(); } + public static void enableHotStatsAndDeoptimize() { HOT_STATS.toggleOnAndDeoptimize(); } } diff --git a/test/benchmarks/src/main/java/scala/reflect/internal/util/AlmostFinalValueBenchmarkStatics.java b/test/benchmarks/src/main/java/scala/reflect/internal/util/AlmostFinalValueBenchmarkStatics.java new file mode 100644 index 000000000000..966adedb44e1 --- /dev/null +++ b/test/benchmarks/src/main/java/scala/reflect/internal/util/AlmostFinalValueBenchmarkStatics.java @@ -0,0 +1,12 @@ +package scala.reflect.internal.util; + +import java.lang.invoke.MethodHandle; + +final class AlmostFinalValueBenchmarkStatics { + static final boolean STATIC_FINAL_FALSE = false; + + private static final AlmostFinalValue ALMOST_FINAL_FALSE = new AlmostFinalValue(); + private static final MethodHandle ALMOST_FINAL_FALSE_GETTER = ALMOST_FINAL_FALSE.invoker; + + static boolean isTrue() throws Throwable { return (boolean) ALMOST_FINAL_FALSE_GETTER.invokeExact(); } +} diff --git a/test/benchmarks/src/main/scala/scala/reflect/internal/util/AlmostFinalValueBenchmark.scala b/test/benchmarks/src/main/scala/scala/reflect/internal/util/AlmostFinalValueBenchmark.scala new file mode 100644 index 000000000000..70d69178cb19 --- /dev/null +++ b/test/benchmarks/src/main/scala/scala/reflect/internal/util/AlmostFinalValueBenchmark.scala @@ -0,0 +1,56 @@ +package scala.reflect.internal.util + +import java.util.concurrent.TimeUnit + +import org.openjdk.jmh.annotations._ +import org.openjdk.jmh.infra.Blackhole + +class AlmostFinalValueBenchSettings extends scala.reflect.runtime.Settings { + val flag = new BooleanSetting(false) + + @inline final def isTrue2: Boolean = AlmostFinalValueBenchmarkStatics.isTrue && flag +} + +object AlmostFinalValueBenchSettings { + implicit class SettingsOps(private val settings: AlmostFinalValueBenchSettings) extends AnyVal { + @inline final def isTrue3: Boolean = AlmostFinalValueBenchmarkStatics.isTrue && settings.flag + } + + @inline def isTrue4(settings: AlmostFinalValueBenchSettings): Boolean = + AlmostFinalValueBenchmarkStatics.isTrue && settings.flag +} + +@Warmup(iterations = 10, time = 1, timeUnit = TimeUnit.SECONDS) +@Measurement(iterations = 10, time = 1, timeUnit = TimeUnit.SECONDS) +@Fork(3) +@BenchmarkMode(Array(Mode.AverageTime)) +@OutputTimeUnit(TimeUnit.NANOSECONDS) +@State(Scope.Benchmark) +class AlmostFinalValueBenchmark { + import AlmostFinalValueBenchmarkStatics.STATIC_FINAL_FALSE + val settings = new AlmostFinalValueBenchSettings(); import settings._ + + private def pretendToWorkHard() = Blackhole.consumeCPU(3) + + @Benchmark def bench0_unit = () + @Benchmark def bench0_usingStaticFinalFalse = if (STATIC_FINAL_FALSE && flag) pretendToWorkHard() + @Benchmark def bench0_workingHard = pretendToWorkHard() + + @Benchmark def bench1_usingAlmostFinalFalse = if (AlmostFinalValueBenchmarkStatics.isTrue && flag) pretendToWorkHard() + @Benchmark def bench2_usingInlineMethod = if (settings.isTrue2) pretendToWorkHard() + @Benchmark def bench3_usingExtMethod = if (settings.isTrue3) pretendToWorkHard() + @Benchmark def bench4_usingObjectMethod = if (AlmostFinalValueBenchSettings.isTrue4(settings)) pretendToWorkHard() + +/* + This benchmark is measuring two things: + 1. verifying that using AlmostFinalValue in an if block makes the block a no-op + 2. verifying and comparing which ergonomic wrapper around AlmostFinalValue maintains that + + The first point is satisfied. + + For the second: + 1. inline instance methods add a null-check overhead, slowing it down + 2. extension methods perform as quickly, are very ergonomic and so are the best choice + 3. object methods also perform as quickly, but can be less ergonomic if it requires an import +*/ +} From 518e6e076b0a75c4977a876b8ff3d7869f29dcf7 Mon Sep 17 00:00:00 2001 From: Dale Wijnand Date: Mon, 26 Apr 2021 11:05:23 +0100 Subject: [PATCH 130/769] Rework Statistics to be faster & avoid stale state ... by reusing settings, and using their postSetHook to sync their AlmostFinalValue. And use a value class extension method as its API. --- src/compiler/scala/tools/nsc/Global.scala | 7 +-- src/compiler/scala/tools/nsc/MainBench.scala | 5 +- .../nsc/backend/jvm/ClassfileWriters.scala | 4 +- .../backend/jvm/GeneratedClassHandler.scala | 2 +- .../tools/nsc/settings/ScalaSettings.scala | 6 +-- .../scala/tools/nsc/typechecker/Typers.scala | 2 +- .../internal/settings/MutableSettings.scala | 6 +++ .../reflect/internal/util/Statistics.scala | 49 ++++--------------- .../scala/reflect/runtime/Settings.scala | 5 +- 9 files changed, 30 insertions(+), 56 deletions(-) diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala index a80c5dbf4d51..ca49e51b198c 100644 --- a/src/compiler/scala/tools/nsc/Global.scala +++ b/src/compiler/scala/tools/nsc/Global.scala @@ -1283,11 +1283,8 @@ class Global(var currentSettings: Settings, reporter0: Reporter) checkPhaseSettings(including = true, inclusions.toSeq: _*) checkPhaseSettings(including = false, exclusions map (_.value): _*) - // Enable or disable depending on the current setting -- useful for interactive behaviour - statistics.initFromSettings(settings) - // Report the overhead of statistics measurements per every run - if (statistics.areStatisticsLocallyEnabled) + if (settings.areStatisticsEnabled) statistics.reportStatisticsOverhead(reporter) phase = first //parserPhase @@ -1512,7 +1509,7 @@ class Global(var currentSettings: Settings, reporter0: Reporter) warnDeprecatedAndConflictingSettings() globalPhase = fromPhase - val timePhases = statistics.areStatisticsLocallyEnabled + val timePhases = settings.areStatisticsEnabled val startTotal = if (timePhases) statistics.startTimer(totalCompileTime) else null while (globalPhase.hasNext && !reporter.hasErrors) { diff --git a/src/compiler/scala/tools/nsc/MainBench.scala b/src/compiler/scala/tools/nsc/MainBench.scala index ca78db7e2dfe..84b3b6e603e7 100644 --- a/src/compiler/scala/tools/nsc/MainBench.scala +++ b/src/compiler/scala/tools/nsc/MainBench.scala @@ -29,9 +29,8 @@ object MainBench extends Driver with EvalLoop { var start = System.nanoTime() for (i <- 0 until NIter) { if (i == NIter-1) { - theCompiler.settings.Ystatistics.value = List("all") - theCompiler.statistics.enabled = true - theCompiler.statistics.hotEnabled = true + theCompiler.settings.Ystatistics.value = List("all") + theCompiler.settings.YhotStatisticsEnabled.value = true } process(args) val end = System.nanoTime() diff --git a/src/compiler/scala/tools/nsc/backend/jvm/ClassfileWriters.scala b/src/compiler/scala/tools/nsc/backend/jvm/ClassfileWriters.scala index 716a1d6de31f..15bce5921204 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/ClassfileWriters.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/ClassfileWriters.scala @@ -63,7 +63,7 @@ abstract class ClassfileWriters { def apply(global: Global): ClassfileWriter = { //Note dont import global._ - its too easy to leak non threadsafe structures - import global.{cleanup, log, settings, statistics} + import global.{ cleanup, log, settings } def jarManifestMainClass: Option[String] = settings.mainClass.valueSetByUser.orElse { cleanup.getEntryPoints match { case List(name) => Some(name) @@ -91,7 +91,7 @@ abstract class ClassfileWriters { new DebugClassWriter(basicClassWriter, asmp, dump) } - val enableStats = statistics.enabled && settings.YaddBackendThreads.value == 1 + val enableStats = settings.areStatisticsEnabled && settings.YaddBackendThreads.value == 1 if (enableStats) new WithStatsWriter(withAdditionalFormats) else withAdditionalFormats } diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GeneratedClassHandler.scala b/src/compiler/scala/tools/nsc/backend/jvm/GeneratedClassHandler.scala index beec1ade9d06..5853b52a3142 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/GeneratedClassHandler.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/GeneratedClassHandler.scala @@ -59,7 +59,7 @@ private[jvm] object GeneratedClassHandler { new SyncWritingClassHandler(postProcessor) case maxThreads => - if (statistics.enabled) + if (settings.areStatisticsEnabled) runReporting.warning(NoPosition, "jvm statistics are not reliable with multi-threaded jvm class writing", WarningCategory.Other, site = "") val additionalThreads = maxThreads - 1 // The thread pool queue is limited in size. When it's full, the `CallerRunsPolicy` causes diff --git a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala index d070a7870652..1ba8433e022f 100644 --- a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala +++ b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala @@ -23,7 +23,7 @@ import scala.language.existentials import scala.annotation.elidable import scala.tools.util.PathResolver.Defaults import scala.collection.mutable -import scala.reflect.internal.util.StringContextStripMarginOps +import scala.reflect.internal.util.{ StatisticsStatics, StringContextStripMarginOps } import scala.tools.nsc.util.DefaultJarFactory import scala.util.chaining._ @@ -502,9 +502,9 @@ trait ScalaSettings extends StandardScalaSettings with Warnings { _: MutableSett val Ystatistics = PhasesSetting("-Vstatistics", "Print compiler statistics for specific phases", "parser,typer,patmat,erasure,cleanup,jvm") .withPostSetHook(s => YstatisticsEnabled.value = s.value.nonEmpty) .withAbbreviation("-Ystatistics") - val YstatisticsEnabled = BooleanSetting("-Ystatistics-enabled", "Internal setting, indicating that statistics are enabled for some phase.").internalOnly() + val YstatisticsEnabled = BooleanSetting("-Ystatistics-enabled", "Internal setting, indicating that statistics are enabled for some phase.").internalOnly().withPostSetHook(s => if (s) StatisticsStatics.enableColdStatsAndDeoptimize()) val YhotStatisticsEnabled = BooleanSetting("-Vhot-statistics", s"Enable `${Ystatistics.name}` to also print hot statistics.") - .withAbbreviation("-Yhot-statistics") + .withAbbreviation("-Yhot-statistics").withPostSetHook(s => if (s && YstatisticsEnabled) StatisticsStatics.enableHotStatsAndDeoptimize()) val Yshowsyms = BooleanSetting("-Vsymbols", "Print the AST symbol hierarchy after each phase.") withAbbreviation "-Yshow-syms" val Ytyperdebug = BooleanSetting("-Vtyper", "Trace type assignments.") withAbbreviation "-Ytyper-debug" val XlogImplicits = BooleanSetting("-Vimplicits", "Show more detail on why some implicits are not applicable.") diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index 54b82ebe4fdf..a3bc5d0615e2 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -6003,7 +6003,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper def typed(tree: Tree, mode: Mode, pt: Type): Tree = { lastTreeToTyper = tree - val statsEnabled = StatisticsStatics.areSomeHotStatsEnabled() && statistics.areHotStatsLocallyEnabled + val statsEnabled = StatisticsStatics.areSomeHotStatsEnabled && settings.areStatisticsEnabled && settings.YhotStatisticsEnabled val startByType = if (statsEnabled) statistics.pushTimer(byTypeStack, byTypeNanos(tree.getClass)) else null if (statsEnabled) statistics.incCounter(visitsByType, tree.getClass) val shouldPrintTyping = printTypings && !phase.erasedTypes && !noPrintTyping(tree) diff --git a/src/reflect/scala/reflect/internal/settings/MutableSettings.scala b/src/reflect/scala/reflect/internal/settings/MutableSettings.scala index ca8c24d6e8d3..2dfd46dcf71a 100644 --- a/src/reflect/scala/reflect/internal/settings/MutableSettings.scala +++ b/src/reflect/scala/reflect/internal/settings/MutableSettings.scala @@ -16,6 +16,8 @@ package scala package reflect.internal package settings +import scala.reflect.internal.util.StatisticsStatics + /** A mutable Settings object. */ abstract class MutableSettings extends AbsSettings { @@ -72,4 +74,8 @@ object MutableSettings { import scala.language.implicitConversions /** Support the common use case, `if (settings.debug) println("Hello, martin.")` */ @inline implicit def reflectSettingToBoolean(s: MutableSettings#BooleanSetting): Boolean = s.value + + implicit class SettingsOps(private val settings: MutableSettings) extends AnyVal { + @inline final def areStatisticsEnabled = StatisticsStatics.areSomeColdStatsEnabled && settings.YstatisticsEnabled + } } diff --git a/src/reflect/scala/reflect/internal/util/Statistics.scala b/src/reflect/scala/reflect/internal/util/Statistics.scala index 28cb4f133446..ce12b1c7a159 100644 --- a/src/reflect/scala/reflect/internal/util/Statistics.scala +++ b/src/reflect/scala/reflect/internal/util/Statistics.scala @@ -22,57 +22,49 @@ import scala.annotation.nowarn import scala.runtime.LongRef abstract class Statistics(val symbolTable: SymbolTable, settings: MutableSettings) { - - initFromSettings(settings) - - def initFromSettings(currentSettings: MutableSettings): Unit = { - enabled = currentSettings.YstatisticsEnabled - hotEnabled = currentSettings.YhotStatisticsEnabled - } - type TimerSnapshot = (Long, Long) /** If enabled, increment counter by one */ @inline final def incCounter(c: Counter): Unit = { - if (areStatisticsLocallyEnabled && c != null) c.value += 1 + if (enabled && c != null) c.value += 1 } /** If enabled, increment counter by given delta */ @inline final def incCounter(c: Counter, delta: Int): Unit = { - if (areStatisticsLocallyEnabled && c != null) c.value += delta + if (enabled && c != null) c.value += delta } /** If enabled, increment counter in map `ctrs` at index `key` by one */ @inline final def incCounter[K](ctrs: QuantMap[K, Counter], key: K) = - if (areStatisticsLocallyEnabled && ctrs != null) ctrs(key).value += 1 + if (enabled && ctrs != null) ctrs(key).value += 1 /** If enabled, start subcounter. While active it will track all increments of * its base counter. */ @inline final def startCounter(sc: SubCounter): (Int, Int) = - if (areStatisticsLocallyEnabled && sc != null) sc.start() else null + if (enabled && sc != null) sc.start() else null /** If enabled, stop subcounter from tracking its base counter. */ @inline final def stopCounter(sc: SubCounter, start: (Int, Int)): Unit = { - if (areStatisticsLocallyEnabled && sc != null) sc.stop(start) + if (enabled && sc != null) sc.stop(start) } /** If enabled, start timer */ @inline final def startTimer(tm: Timer): TimerSnapshot = - if (areStatisticsLocallyEnabled && tm != null) tm.start() else null + if (enabled && tm != null) tm.start() else null /** If enabled, stop timer */ @inline final def stopTimer(tm: Timer, start: TimerSnapshot): Unit = { - if (areStatisticsLocallyEnabled && tm != null) tm.stop(start) + if (enabled && tm != null) tm.stop(start) } /** If enabled, push and start a new timer in timer stack */ @inline final def pushTimer(timers: TimerStack, timer: => StackableTimer): TimerSnapshot = - if (areStatisticsLocallyEnabled && timers != null) timers.push(timer) else null + if (enabled && timers != null) timers.push(timer) else null /** If enabled, stop and pop timer from timer stack */ @inline final def popTimer(timers: TimerStack, prev: TimerSnapshot): Unit = { - if (areStatisticsLocallyEnabled && timers != null) timers.pop(prev) + if (enabled && timers != null) timers.pop(prev) } /** Create a new counter that shows as `prefix` and is active in given phases */ @@ -294,29 +286,8 @@ quant) } private[this] val qs = new mutable.HashMap[String, Quantity] - private[scala] var areColdStatsLocallyEnabled: Boolean = false - private[scala] var areHotStatsLocallyEnabled: Boolean = false - - /** Represents whether normal statistics can or cannot be enabled. */ - @inline final def enabled: Boolean = areColdStatsLocallyEnabled - def enabled_=(cond: Boolean) = { - if (cond && !enabled) { - StatisticsStatics.enableColdStatsAndDeoptimize() - areColdStatsLocallyEnabled = true - } - } - - /** Represents whether hot statistics can or cannot be enabled. */ - @inline final def hotEnabled: Boolean = enabled && areHotStatsLocallyEnabled - def hotEnabled_=(cond: Boolean) = { - if (cond && enabled && !areHotStatsLocallyEnabled) { - StatisticsStatics.enableHotStatsAndDeoptimize() - areHotStatsLocallyEnabled = true - } - } - /** Tells whether statistics should be definitely reported to the user for this `Global` instance. */ - @inline final def areStatisticsLocallyEnabled: Boolean = areColdStatsLocallyEnabled + @inline final def enabled: Boolean = settings.areStatisticsEnabled import scala.reflect.internal.Reporter /** Reports the overhead of measuring statistics via the nanoseconds variation. */ diff --git a/src/reflect/scala/reflect/runtime/Settings.scala b/src/reflect/scala/reflect/runtime/Settings.scala index 4b8b771f52c5..cfe11d25f8c3 100644 --- a/src/reflect/scala/reflect/runtime/Settings.scala +++ b/src/reflect/scala/reflect/runtime/Settings.scala @@ -15,6 +15,7 @@ package reflect package runtime import scala.reflect.internal.settings.MutableSettings +import scala.reflect.internal.util.StatisticsStatics /** The Settings class for runtime reflection. * This should be refined, so that settings are settable via command @@ -57,8 +58,8 @@ private[reflect] class Settings extends MutableSettings { val uniqid = new BooleanSetting(false) val verbose = new BooleanSetting(false) - val YhotStatisticsEnabled = new BooleanSetting(false) - val YstatisticsEnabled = new BooleanSetting(false) + val YhotStatisticsEnabled = new BooleanSetting(false) { override def postSetHook() = if (v && YstatisticsEnabled) StatisticsStatics.enableHotStatsAndDeoptimize() } + val YstatisticsEnabled = new BooleanSetting(false) { override def postSetHook() = if (v) StatisticsStatics.enableColdStatsAndDeoptimize() } val Yrecursion = new IntSetting(0) def isScala212 = true From 97ca3aaae3cf2f1dd1d1c0351e2a7c3d98e78f9b Mon Sep 17 00:00:00 2001 From: Dale Wijnand Date: Thu, 15 Apr 2021 09:08:14 +0100 Subject: [PATCH 131/769] Put all debug/developer behind an AlmostFinalValue false --- .../scala/tools/nsc/CompilerCommand.scala | 2 +- src/compiler/scala/tools/nsc/Global.scala | 14 ++++++------- .../scala/tools/nsc/MainTokenMetric.scala | 2 +- .../scala/tools/nsc/ast/Positions.scala | 2 +- .../nsc/backend/jvm/BCodeSkelBuilder.scala | 2 +- .../nsc/backend/jvm/BTypesFromSymbols.scala | 2 +- .../scala/tools/nsc/backend/jvm/CodeGen.scala | 2 +- .../jvm/PostProcessorFrontendAccess.scala | 2 +- .../scala/tools/nsc/plugins/Plugins.scala | 2 +- .../scala/tools/nsc/reporters/Reporter.scala | 2 +- .../tools/nsc/settings/ScalaSettings.scala | 4 ++-- .../tools/nsc/symtab/SymbolLoaders.scala | 2 +- .../tools/nsc/symtab/SymbolTrackers.scala | 2 +- .../symtab/classfile/ClassfileParser.scala | 11 +++++----- .../tools/nsc/symtab/classfile/Pickler.scala | 2 +- .../tools/nsc/tasty/bridge/ContextOps.scala | 1 - .../scala/tools/nsc/transform/CleanUp.scala | 2 +- .../scala/tools/nsc/transform/Erasure.scala | 4 ++-- .../transform/TypeAdaptingTransformer.scala | 4 ++-- .../nsc/transform/async/AsyncPhase.scala | 2 +- .../tools/nsc/typechecker/Implicits.scala | 4 ++-- .../scala/tools/nsc/typechecker/Infer.scala | 2 +- .../tools/nsc/typechecker/RefChecks.scala | 6 +++--- .../tools/nsc/typechecker/TreeCheckers.scala | 4 ++-- .../nsc/typechecker/TypeDiagnostics.scala | 2 +- .../tools/nsc/typechecker/TypeStrings.scala | 2 +- .../scala/tools/nsc/typechecker/Typers.scala | 4 ++-- .../nsc/typechecker/TypersTracking.scala | 6 +++--- .../scala/tools/reflect/ToolBoxFactory.scala | 4 ++-- .../scala/tools/nsc/interactive/REPL.scala | 2 +- .../scala/reflect/internal/Kinds.scala | 4 ++-- .../scala/reflect/internal/Mirrors.scala | 2 +- .../scala/reflect/internal/Printers.scala | 6 +++--- .../scala/reflect/internal/SymbolTable.scala | 7 ++++--- .../scala/reflect/internal/Symbols.scala | 18 ++++++++--------- .../scala/reflect/internal/Trees.scala | 2 +- .../scala/reflect/internal/Types.scala | 20 +++++++++---------- .../reflect/internal/pickling/UnPickler.scala | 2 -- .../internal/settings/MutableSettings.scala | 2 ++ .../scala/reflect/internal/tpe/GlbLubs.scala | 2 +- .../reflect/internal/tpe/TypeComparers.scala | 2 +- .../internal/tpe/TypeConstraints.scala | 2 +- .../reflect/internal/tpe/TypeToStrings.scala | 2 +- .../internal/util/StatisticsStatics.java | 8 ++++++++ .../scala/reflect/runtime/JavaMirrors.scala | 2 +- .../scala/reflect/runtime/Settings.scala | 4 ++-- .../scala/reflect/runtime/SymbolTable.scala | 2 +- src/scaladoc/scala/tools/nsc/ScalaDoc.scala | 2 +- .../tools/nsc/doc/ScaladocAnalyzer.scala | 2 +- .../scala/tools/nsc/doc/Uncompilable.scala | 4 +++- 50 files changed, 104 insertions(+), 93 deletions(-) diff --git a/src/compiler/scala/tools/nsc/CompilerCommand.scala b/src/compiler/scala/tools/nsc/CompilerCommand.scala index 5601f96459cc..44c107f55dad 100644 --- a/src/compiler/scala/tools/nsc/CompilerCommand.scala +++ b/src/compiler/scala/tools/nsc/CompilerCommand.scala @@ -113,7 +113,7 @@ class CompilerCommand(arguments: List[String], val settings: Settings) { else if (Yhelp) yusageMsg else if (showPlugins) global.pluginDescriptions else if (showPhases) global.phaseDescriptions + ( - if (debug) "\n" + global.phaseFlagDescriptions else "" + if (settings.isDebug) "\n" + global.phaseFlagDescriptions else "" ) else if (genPhaseGraph.isSetByUser) { val components = global.phaseNames // global.phaseDescriptors // one initializes diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala index 431bdec16563..bea3b0678099 100644 --- a/src/compiler/scala/tools/nsc/Global.scala +++ b/src/compiler/scala/tools/nsc/Global.scala @@ -282,7 +282,7 @@ class Global(var currentSettings: Settings, reporter0: Reporter) // ------------------ Debugging ------------------------------------- @inline final def ifDebug(body: => Unit): Unit = { - if (settings.debug) + if (settings.isDebug) body } @@ -313,7 +313,7 @@ class Global(var currentSettings: Settings, reporter0: Reporter) } @inline final override def debuglog(msg: => String): Unit = { - if (settings.debug) + if (settings.isDebug) log(msg) } @@ -417,7 +417,7 @@ class Global(var currentSettings: Settings, reporter0: Reporter) if ((unit ne null) && unit.exists) lastSeenSourceFile = unit.source - if (settings.debug && (settings.verbose || currentRun.size < 5)) + if (settings.isDebug && (settings.verbose || currentRun.size < 5)) inform("[running phase " + name + " on " + unit + "]") } @@ -713,7 +713,7 @@ class Global(var currentSettings: Settings, reporter0: Reporter) protected def computePhaseDescriptors: List[SubComponent] = { /* Allow phases to opt out of the phase assembly. */ def cullPhases(phases: List[SubComponent]) = { - val enabled = if (settings.debug && settings.isInfo) phases else phases filter (_.enabled) + val enabled = if (settings.isDebug && settings.isInfo) phases else phases filter (_.enabled) def isEnabled(q: String) = enabled exists (_.phaseName == q) val (satisfied, unhappy) = enabled partition (_.requires forall isEnabled) unhappy foreach (u => globalError(s"Phase '${u.phaseName}' requires: ${u.requires filterNot isEnabled}")) @@ -744,7 +744,7 @@ class Global(var currentSettings: Settings, reporter0: Reporter) } /** A description of the phases that will run in this configuration, or all if -Vdebug. */ - def phaseDescriptions: String = phaseHelp("description", elliptically = !settings.debug, phasesDescMap) + def phaseDescriptions: String = phaseHelp("description", elliptically = !settings.isDebug, phasesDescMap) /** Summary of the per-phase values of nextFlags and newFlags, shown under -Vphases -Vdebug. */ def phaseFlagDescriptions: String = { @@ -755,7 +755,7 @@ class Global(var currentSettings: Settings, reporter0: Reporter) else if (ph.phaseNewFlags != 0L && ph.phaseNextFlags != 0L) fstr1 + " " + fstr2 else fstr1 + fstr2 } - phaseHelp("new flags", elliptically = !settings.debug, fmt) + phaseHelp("new flags", elliptically = !settings.isDebug, fmt) } /** Emit a verbose phase table. @@ -1113,7 +1113,7 @@ class Global(var currentSettings: Settings, reporter0: Reporter) def echoPhaseSummary(ph: Phase) = { /* Only output a summary message under debug if we aren't echoing each file. */ - if (settings.debug && !(settings.verbose || currentRun.size < 5)) + if (settings.isDebug && !(settings.verbose || currentRun.size < 5)) inform("[running phase " + ph.name + " on " + currentRun.size + " compilation units]") } diff --git a/src/compiler/scala/tools/nsc/MainTokenMetric.scala b/src/compiler/scala/tools/nsc/MainTokenMetric.scala index c43683118028..ff8fcfa5c24f 100644 --- a/src/compiler/scala/tools/nsc/MainTokenMetric.scala +++ b/src/compiler/scala/tools/nsc/MainTokenMetric.scala @@ -50,7 +50,7 @@ object MainTokenMetric { tokenMetric(compiler, command.files) } catch { case ex @ FatalError(msg) => - if (command.settings.debug) + if (command.settings.isDebug) ex.printStackTrace() reporter.error(null, "fatal error: " + msg) } diff --git a/src/compiler/scala/tools/nsc/ast/Positions.scala b/src/compiler/scala/tools/nsc/ast/Positions.scala index 8cc1858297b4..6cda189bd82c 100644 --- a/src/compiler/scala/tools/nsc/ast/Positions.scala +++ b/src/compiler/scala/tools/nsc/ast/Positions.scala @@ -39,6 +39,6 @@ trait Positions extends scala.reflect.internal.Positions { } override protected[this] lazy val posAssigner: PosAssigner = - if (settings.Yrangepos && settings.debug || settings.Yposdebug) new ValidatingPosAssigner + if (settings.Yrangepos && settings.isDebug || settings.Yposdebug) new ValidatingPosAssigner else new DefaultPosAssigner } diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeSkelBuilder.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeSkelBuilder.scala index 41c34d056ad2..48d8290535d0 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BCodeSkelBuilder.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeSkelBuilder.scala @@ -644,7 +644,7 @@ abstract class BCodeSkelBuilder extends BCodeHelpers { case Return(_) | Block(_, Return(_)) | Throw(_) | Block(_, Throw(_)) => () case EmptyTree => globalError("Concrete method has no definition: " + dd + ( - if (settings.debug) "(found: " + methSymbol.owner.info.decls.toList.mkString(", ") + ")" + if (settings.isDebug) "(found: " + methSymbol.owner.info.decls.toList.mkString(", ") + ")" else "")) case _ => bc emitRETURN returnType diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala b/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala index ff76ec0dca3e..f6a1c2a3e092 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala @@ -92,7 +92,7 @@ abstract class BTypesFromSymbols[G <: Global](val global: G) extends BTypes { assert(classSym != NoSymbol, "Cannot create ClassBType from NoSymbol") assert(classSym.isClass, s"Cannot create ClassBType from non-class symbol $classSym") // note: classSym can be scala.Array, see https://github.com/scala/bug/issues/12225#issuecomment-729687859 - if (global.settings.debug) { + if (global.settings.isDebug) { // OPT this assertion has too much performance overhead to run unconditionally assert(!primitiveTypeToBType.contains(classSym) || isCompilingPrimitive, s"Cannot create ClassBType for primitive class symbol $classSym") } diff --git a/src/compiler/scala/tools/nsc/backend/jvm/CodeGen.scala b/src/compiler/scala/tools/nsc/backend/jvm/CodeGen.scala index 72cd7a0d5ca7..2765c063f17f 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/CodeGen.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/CodeGen.scala @@ -50,7 +50,7 @@ abstract class CodeGen[G <: Global](val global: G) extends PerRunInit { } catch { case ex: InterruptedException => throw ex case ex: Throwable => - if (settings.debug) ex.printStackTrace() + if (settings.isDebug) ex.printStackTrace() globalError(s"Error while emitting ${unit.source}\n${ex.getMessage}") } diff --git a/src/compiler/scala/tools/nsc/backend/jvm/PostProcessorFrontendAccess.scala b/src/compiler/scala/tools/nsc/backend/jvm/PostProcessorFrontendAccess.scala index b9ec6a85f060..748a8f3cc75a 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/PostProcessorFrontendAccess.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/PostProcessorFrontendAccess.scala @@ -184,7 +184,7 @@ object PostProcessorFrontendAccess { private def buildCompilerSettings(): CompilerSettings = new CompilerSettings { import global.{settings => s} - val debug: Boolean = s.debug + @inline def debug: Boolean = s.isDebug val target: String = s.target.value diff --git a/src/compiler/scala/tools/nsc/plugins/Plugins.scala b/src/compiler/scala/tools/nsc/plugins/Plugins.scala index 89da75e9628e..c808cc59a21c 100644 --- a/src/compiler/scala/tools/nsc/plugins/Plugins.scala +++ b/src/compiler/scala/tools/nsc/plugins/Plugins.scala @@ -150,7 +150,7 @@ trait Plugins { global: Global => } globalError("bad option: -P:" + opt) // Plugins may opt out, unless we just want to show info - plugs filter (p => p.init(p.options, globalError) || (settings.debug && settings.isInfo)) + plugs filter (p => p.init(p.options, globalError) || (settings.isDebug && settings.isInfo)) } lazy val plugins: List[Plugin] = loadPlugins() diff --git a/src/compiler/scala/tools/nsc/reporters/Reporter.scala b/src/compiler/scala/tools/nsc/reporters/Reporter.scala index 4262ec054914..219906e77fd8 100644 --- a/src/compiler/scala/tools/nsc/reporters/Reporter.scala +++ b/src/compiler/scala/tools/nsc/reporters/Reporter.scala @@ -119,7 +119,7 @@ abstract class FilteringReporter extends Reporter { // Invoked when an error or warning is filtered by position. @inline def suppress = { if (settings.prompt) doReport(pos, msg, severity) - else if (settings.debug) doReport(pos, s"[ suppressed ] $msg", severity) + else if (settings.isDebug) doReport(pos, s"[ suppressed ] $msg", severity) Suppress } if (!duplicateOk(pos, severity, msg)) suppress else if (!maxOk) Count else Display diff --git a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala index 7e640d05afc1..1b25f95f46c6 100644 --- a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala +++ b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala @@ -98,7 +98,7 @@ trait ScalaSettings extends StandardScalaSettings with Warnings { _: MutableSett val Xhelp = BooleanSetting ("-X", "Print a synopsis of advanced options.") val async = BooleanSetting ("-Xasync", "Enable the async phase for scala.async.Async.{async,await}.") val checkInit = BooleanSetting ("-Xcheckinit", "Wrap field accessors to throw an exception on uninitialized access.") - val developer = BooleanSetting ("-Xdev", "Issue warnings about anything which seems amiss in compiler internals. Intended for compiler developers") + val developer = BooleanSetting ("-Xdev", "Issue warnings about anything which seems amiss in compiler internals. Intended for compiler developers").withPostSetHook(s => if (s.value) StatisticsStatics.enableDeveloperAndDeoptimize()) val noassertions = BooleanSetting ("-Xdisable-assertions", "Generate no assertions or assumptions.") andThen (flag => if (flag) elidebelow.value = elidable.ASSERTION + 1) val elidebelow = IntSetting ("-Xelide-below", "Calls to @elidable methods are omitted if method priority is lower than argument", @@ -453,7 +453,7 @@ trait ScalaSettings extends StandardScalaSettings with Warnings { _: MutableSett */ val Vhelp = BooleanSetting("-V", "Print a synopsis of verbose options.") val browse = PhasesSetting("-Vbrowse", "Browse the abstract syntax tree after") withAbbreviation "-Ybrowse" - val debug = BooleanSetting("-Vdebug", "Increase the quantity of debugging output.") withAbbreviation "-Ydebug" + val debug = BooleanSetting("-Vdebug", "Increase the quantity of debugging output.") withAbbreviation "-Ydebug" withPostSetHook (s => if (s.value) StatisticsStatics.enableDebugAndDeoptimize()) val YdebugTasty = BooleanSetting("-Vdebug-tasty", "Increase the quantity of debugging output when unpickling tasty.") withAbbreviation "-Ydebug-tasty" val Ydocdebug = BooleanSetting("-Vdoc", "Trace scaladoc activity.") withAbbreviation "-Ydoc-debug" val Yidedebug = BooleanSetting("-Vide", "Generate, validate and output trees using the interactive compiler.") withAbbreviation "-Yide-debug" diff --git a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala index 4e8ad9ab2f59..5fe2387c2312 100644 --- a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala +++ b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala @@ -57,7 +57,7 @@ abstract class SymbolLoaders { } protected def signalError(root: Symbol, ex: Throwable): Unit = { - if (settings.debug) ex.printStackTrace() + if (settings.isDebug) ex.printStackTrace() globalError(ex.getMessage() match { case null => "i/o error while loading " + root.name case msg => "error while loading " + root.name + ", " + msg diff --git a/src/compiler/scala/tools/nsc/symtab/SymbolTrackers.scala b/src/compiler/scala/tools/nsc/symtab/SymbolTrackers.scala index e99ed0858a03..7a0af81ee22a 100644 --- a/src/compiler/scala/tools/nsc/symtab/SymbolTrackers.scala +++ b/src/compiler/scala/tools/nsc/symtab/SymbolTrackers.scala @@ -133,7 +133,7 @@ trait SymbolTrackers { else " (" + Flags.flagsToString(masked) + ")" } def symString(sym: Symbol) = ( - if (settings.debug && sym.hasCompleteInfo) { + if (settings.isDebug && sym.hasCompleteInfo) { val s = sym.defString take 240 if (s.length == 240) s + "..." else s } diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala index e1a218f5df4b..6816c6d01940 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala @@ -117,11 +117,11 @@ abstract class ClassfileParser(reader: ReusableInstance[ReusableDataReader]) { } private def handleMissing(e: MissingRequirementError) = { - if (settings.debug) e.printStackTrace + if (settings.isDebug) e.printStackTrace throw new IOException(s"Missing dependency '${e.req}', required by $file") } private def handleError(e: Exception) = { - if (settings.debug) e.printStackTrace() + if (settings.isDebug) e.printStackTrace() throw new IOException(s"class file '$file' is broken\n(${e.getClass}/${e.getMessage})") } private def mismatchError(c: Symbol) = { @@ -420,7 +420,8 @@ abstract class ClassfileParser(reader: ReusableInstance[ReusableDataReader]) { // - better owner than `NoSymbol` // - remove eager warning val msg = s"Class $name not found - continuing with a stub." - if ((!settings.isScaladoc) && (settings.verbose || settings.developer)) loaders.warning(NoPosition, msg, WarningCategory.OtherDebug, clazz.fullNameString) + if ((!settings.isScaladoc) && (settings.verbose || settings.isDeveloper)) + loaders.warning(NoPosition, msg, WarningCategory.OtherDebug, clazz.fullNameString) NoSymbol.newStubSymbol(name.toTypeName, msg) } @@ -471,7 +472,7 @@ abstract class ClassfileParser(reader: ReusableInstance[ReusableDataReader]) { case ex: FatalError => // getClassByName can throw a MissingRequirementError (which extends FatalError) // definitions.getMember can throw a FatalError, for example in pos/t5165b - if (settings.debug) + if (settings.isDebug) ex.printStackTrace() stubClassSymbol(newTypeName(name)) } @@ -1007,7 +1008,7 @@ abstract class ClassfileParser(reader: ReusableInstance[ReusableDataReader]) { // with a `FatalError` exception, handled above. Here you'd end up after a NPE (for example), // and that should never be swallowed silently. loaders.warning(NoPosition, s"Caught: $ex while parsing annotations in ${file}", WarningCategory.Other, clazz.fullNameString) - if (settings.debug) ex.printStackTrace() + if (settings.isDebug) ex.printStackTrace() None // ignore malformed annotations } diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala index 23ef2573d91a..029be7dd30c5 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala @@ -112,7 +112,7 @@ abstract class Pickler extends SubComponent { // // OPT: do this only as a recovery after fatal error. Checking in advance was expensive. if (t.isErroneous) { - if (settings.debug) e.printStackTrace() + if (settings.isDebug) e.printStackTrace() reporter.error(t.pos, "erroneous or inaccessible type") return } diff --git a/src/compiler/scala/tools/nsc/tasty/bridge/ContextOps.scala b/src/compiler/scala/tools/nsc/tasty/bridge/ContextOps.scala index de66f846786e..77fe08b23e7c 100644 --- a/src/compiler/scala/tools/nsc/tasty/bridge/ContextOps.scala +++ b/src/compiler/scala/tools/nsc/tasty/bridge/ContextOps.scala @@ -144,7 +144,6 @@ trait ContextOps { self: TastyUniverse => final def globallyVisibleOwner: Symbol = owner.logicallyEnclosingMember final def ignoreAnnotations: Boolean = u.settings.YtastyNoAnnotations - final def verboseDebug: Boolean = u.settings.debug def requiresLatentEntry(decl: Symbol): Boolean = decl.isScala3Inline def neverEntered(decl: Symbol): Boolean = decl.isPureMixinCtor diff --git a/src/compiler/scala/tools/nsc/transform/CleanUp.scala b/src/compiler/scala/tools/nsc/transform/CleanUp.scala index b2242116a7c3..41922c945662 100644 --- a/src/compiler/scala/tools/nsc/transform/CleanUp.scala +++ b/src/compiler/scala/tools/nsc/transform/CleanUp.scala @@ -369,7 +369,7 @@ abstract class CleanUp extends Statics with Transform with ast.TreeDSL { /* For testing purposes, the dynamic application's condition * can be printed-out in great detail. Remove? */ - if (settings.debug) { + if (settings.isDebug) { def paramsToString(xs: Any*) = xs map (_.toString) mkString ", " val mstr = ad.symbol.tpe match { case MethodType(mparams, resType) => diff --git a/src/compiler/scala/tools/nsc/transform/Erasure.scala b/src/compiler/scala/tools/nsc/transform/Erasure.scala index daf574fcabe8..c950d89fd258 100644 --- a/src/compiler/scala/tools/nsc/transform/Erasure.scala +++ b/src/compiler/scala/tools/nsc/transform/Erasure.scala @@ -94,7 +94,7 @@ abstract class Erasure extends InfoTransform if (! ts.isEmpty && ! result) { apply(ts.head) ; untilApply(ts.tail) } } - override protected def verifyJavaErasure = settings.Xverify || settings.debug + override protected def verifyJavaErasure = settings.Xverify || settings.isDebug private def needsJavaSig(sym: Symbol, tp: Type, throwsArgs: List[Type]) = !settings.Ynogenericsig && { def needs(tp: Type) = NeedsSigCollector(sym.isClassConstructor).collect(tp) needs(tp) || throwsArgs.exists(needs) @@ -518,7 +518,7 @@ abstract class Erasure extends InfoTransform clashErrors += Tuple2(pos, msg) } for (bc <- root.baseClasses) { - if (settings.debug) + if (settings.isDebug) exitingPostErasure(println( sm"""check bridge overrides in $bc |${bc.info.nonPrivateDecl(bridge.name)} diff --git a/src/compiler/scala/tools/nsc/transform/TypeAdaptingTransformer.scala b/src/compiler/scala/tools/nsc/transform/TypeAdaptingTransformer.scala index 0f327b540fa8..93eb50dc6939 100644 --- a/src/compiler/scala/tools/nsc/transform/TypeAdaptingTransformer.scala +++ b/src/compiler/scala/tools/nsc/transform/TypeAdaptingTransformer.scala @@ -97,7 +97,7 @@ trait TypeAdaptingTransformer { self: TreeDSL => case ArrayClass => assert(pt.typeSymbol != ArrayClass, "array") ; tree case _ => val unboxer = currentRun.runDefinitions.unboxMethod(pt.typeSymbol) - if (settings.developer) assert(boxedClass(pt.typeSymbol).tpe <:< tree.tpe, s"${tree.tpe} is not a boxed ${pt}") + if (settings.isDeveloper) assert(boxedClass(pt.typeSymbol).tpe <:< tree.tpe, s"${tree.tpe} is not a boxed ${pt}") Apply(unboxer, tree) // don't `setType pt` the Apply tree, as the Apply's fun won't be typechecked if the Apply tree already has a type } } @@ -116,7 +116,7 @@ trait TypeAdaptingTransformer { self: TreeDSL => * @note Pre-condition: pt eq pt.normalize */ final def cast(tree: Tree, pt: Type): Tree = { - if (settings.debug && (tree.tpe ne null) && !(tree.tpe =:= ObjectTpe)) { + if (settings.isDebug && (tree.tpe ne null) && !(tree.tpe =:= ObjectTpe)) { def word = if (tree.tpe <:< pt) "upcast" else if (pt <:< tree.tpe) "downcast" diff --git a/src/compiler/scala/tools/nsc/transform/async/AsyncPhase.scala b/src/compiler/scala/tools/nsc/transform/async/AsyncPhase.scala index 60d7c510723f..dd6f2f491640 100644 --- a/src/compiler/scala/tools/nsc/transform/async/AsyncPhase.scala +++ b/src/compiler/scala/tools/nsc/transform/async/AsyncPhase.scala @@ -178,7 +178,7 @@ abstract class AsyncPhase extends Transform with TypingTransformers with AnfTran val applyBody = atPos(asyncPos)(asyncBlock.onCompleteHandler) // Logging - if ((settings.debug.value && shouldLogAtThisPhase)) + if ((settings.isDebug && shouldLogAtThisPhase)) logDiagnostics(anfTree, asyncBlock, asyncBlock.asyncStates.map(_.toString)) // Offer async frontends a change to produce the .dot diagram transformState.dotDiagram(applySym, asyncBody).foreach(f => f(asyncBlock.toDot)) diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala index bb233527d6f6..c17b49a79d9d 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala @@ -1525,7 +1525,7 @@ trait Implicits extends splain.SplainData { if (args contains EmptyTree) EmptyTree else typedPos(tree.pos.focus) { val mani = gen.mkManifestFactoryCall(full, constructor, tparg, args.toList) - if (settings.debug) println("generated manifest: "+mani) // DEBUG + if (settings.isDebug) println("generated manifest: "+mani) // DEBUG mani } @@ -1762,7 +1762,7 @@ trait Implicits extends splain.SplainData { } } - if (result.isFailure && settings.debug) // debuglog is not inlined for some reason + if (result.isFailure && settings.isDebug) // debuglog is not inlined for some reason log(s"no implicits found for ${pt} ${pt.typeSymbol.info.baseClasses} ${implicitsOfExpectedType}") result diff --git a/src/compiler/scala/tools/nsc/typechecker/Infer.scala b/src/compiler/scala/tools/nsc/typechecker/Infer.scala index 094dc1032487..3457e2326bc5 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Infer.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Infer.scala @@ -239,7 +239,7 @@ trait Infer extends Checkable { // When filtering sym down to the accessible alternatives leaves us empty handed. private def checkAccessibleError(tree: Tree, sym: Symbol, pre: Type, site: Tree): Tree = { - if (settings.debug) { + if (settings.isDebug) { Console.println(context) Console.println(tree) Console.println("" + pre + " " + sym.owner + " " + context.owner + " " + context.outer.enclClass.owner + " " + sym.owner.thisType + (pre =:= sym.owner.thisType)) diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala index 47d6610f6e57..05ca87bb663c 100644 --- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala +++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala @@ -148,7 +148,7 @@ abstract class RefChecks extends Transform { } // This has become noisy with implicit classes. - if (settings.warnPolyImplicitOverload && settings.developer) { + if (settings.isDeveloper && settings.warnPolyImplicitOverload) { clazz.info.decls.foreach(sym => if (sym.isImplicit && sym.typeParams.nonEmpty) { // implicit classes leave both a module symbol and a method symbol as residue val alts = clazz.info.decl(sym.name).alternatives filterNot (_.isModule) @@ -303,7 +303,7 @@ abstract class RefChecks extends Transform { def isNeitherInClass = memberClass != clazz && otherClass != clazz val indent = " " - def overriddenWithAddendum(msg: String, foundReq: Boolean = settings.debug.value): String = { + def overriddenWithAddendum(msg: String, foundReq: Boolean = settings.isDebug): String = { val isConcreteOverAbstract = (otherClass isSubClass memberClass) && other.isDeferred && !member.isDeferred val addendum = @@ -1868,7 +1868,7 @@ abstract class RefChecks extends Transform { result1 } catch { case ex: TypeError => - if (settings.debug) ex.printStackTrace() + if (settings.isDebug) ex.printStackTrace() reporter.error(tree.pos, ex.getMessage()) tree } finally { diff --git a/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala b/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala index bda816b31af0..b4e0d5339c01 100644 --- a/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala @@ -130,7 +130,7 @@ abstract class TreeCheckers extends Analyzer { // new symbols if (newSyms.nonEmpty) { informFn("" + newSyms.size + " new symbols.") - val toPrint = if (settings.debug) sortedNewSyms mkString " " else "" + val toPrint = if (settings.isDebug) sortedNewSyms mkString " " else "" newSyms.clear() if (toPrint != "") @@ -177,7 +177,7 @@ abstract class TreeCheckers extends Analyzer { def errorFn(msg: Any): Unit = errorFn(NoPosition, msg) def informFn(msg: Any): Unit = { - if (settings.verbose || settings.debug) + if (settings.verbose || settings.isDebug) println("[check: %s] %s".format(phase.prev, msg)) } diff --git a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala index 4a0f049e585b..cef28da57f62 100644 --- a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala +++ b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala @@ -820,7 +820,7 @@ trait TypeDiagnostics extends splain.SplainDiagnostics { // but it seems that throwErrors excludes some of the errors that should actually be // buffered, causing TypeErrors to fly around again. This needs some more investigation. if (!context0.reportErrors) throw ex - if (settings.debug) ex.printStackTrace() + if (settings.isDebug) ex.printStackTrace() ex match { case CyclicReference(sym, info: TypeCompleter) => diff --git a/src/compiler/scala/tools/nsc/typechecker/TypeStrings.scala b/src/compiler/scala/tools/nsc/typechecker/TypeStrings.scala index 1290964fdffd..48b7b7c45bae 100644 --- a/src/compiler/scala/tools/nsc/typechecker/TypeStrings.scala +++ b/src/compiler/scala/tools/nsc/typechecker/TypeStrings.scala @@ -59,7 +59,7 @@ trait StructuredTypeStrings extends DestructureTypes { else block(level, grouping)(name, nodes) } private def shortClass(x: Any) = { - if (settings.debug) { + if (settings.isDebug) { val name = (x.getClass.getName split '.').last val str = if (TypeStrings.isAnonClass(x.getClass)) name else (name split '$').last diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index a3bc5d0615e2..c253fdc7e368 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -1100,7 +1100,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper def adaptExprNotFunMode(): Tree = { def lastTry(err: AbsTypeError = null): Tree = { debuglog("error tree = " + tree) - if (settings.debug && settings.explaintypes) explainTypes(tree.tpe, pt) + if (settings.isDebug && settings.explaintypes) explainTypes(tree.tpe, pt) if (err ne null) context.issue(err) if (tree.tpe.isErroneous || pt.isErroneous) setError(tree) else adaptMismatchedSkolems() @@ -5613,7 +5613,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper AppliedTypeNoParametersError(tree, tpt1.tpe) } else { //Console.println("\{tpt1}:\{tpt1.symbol}:\{tpt1.symbol.info}") - if (settings.debug) Console.println(s"$tpt1:${tpt1.symbol}:${tpt1.symbol.info}")//debug + if (settings.isDebug) Console.println(s"$tpt1:${tpt1.symbol}:${tpt1.symbol.info}")//debug AppliedTypeWrongNumberOfArgsError(tree, tpt1, tparams) } } diff --git a/src/compiler/scala/tools/nsc/typechecker/TypersTracking.scala b/src/compiler/scala/tools/nsc/typechecker/TypersTracking.scala index 8ffa6cbe0b40..95512297b20d 100644 --- a/src/compiler/scala/tools/nsc/typechecker/TypersTracking.scala +++ b/src/compiler/scala/tools/nsc/typechecker/TypersTracking.scala @@ -29,7 +29,7 @@ trait TypersTracking { def fullSiteString(context: Context): String = { def owner_long_s = ( - if (settings.debug.value) { + if (settings.isDebug) { def flags_s = context.owner.debugFlagString match { case "" => "" case s => " with flags " + inLightMagenta(s) @@ -70,7 +70,7 @@ trait TypersTracking { private def truncAndOneLine(s: String): String = { val s1 = s.replaceAll("\\s+", " ") - if (s1.length < 60 || settings.debug.value) s1 else s1.take(57) + "..." + if (s1.length < 60 || settings.isDebug) s1 else s1.take(57) + "..." } private class Frame(val tree: Tree) { } @@ -160,7 +160,7 @@ trait TypersTracking { // Some trees which are typed with mind-numbing frequency and // which add nothing by being printed. Did () type to Unit? Let's // gamble on yes. - def printingOk(t: Tree) = printTypings && (settings.debug.value || !noPrint(t)) + def printingOk(t: Tree) = printTypings && (settings.isDebug || !noPrint(t)) def noPrintTyping(t: Tree) = (t.tpe ne null) || !printingOk(t) def noPrintAdapt(tree1: Tree, tree2: Tree) = !printingOk(tree1) || ( (tree1.tpe == tree2.tpe) diff --git a/src/compiler/scala/tools/reflect/ToolBoxFactory.scala b/src/compiler/scala/tools/reflect/ToolBoxFactory.scala index 912c27ee6da5..0af5efeed818 100644 --- a/src/compiler/scala/tools/reflect/ToolBoxFactory.scala +++ b/src/compiler/scala/tools/reflect/ToolBoxFactory.scala @@ -53,7 +53,7 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf => extends ReflectGlobal(settings, reporter0, toolBoxSelf.classLoader) { import definitions._ - private val trace = scala.tools.nsc.util.trace when settings.debug.value + private val trace = scala.tools.nsc.util.trace when settings.isDebug private var wrapCount = 0 @@ -268,7 +268,7 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf => val msym = wrapInPackageAndCompile(mdef.name, mdef) val className = msym.fullName - if (settings.debug) println("generated: "+className) + if (settings.isDebug) println("generated: "+className) def moduleFileName(className: String) = className + "$" val jclazz = jClass.forName(moduleFileName(className), true, classLoader) val jmeth = jclazz.getDeclaredMethods.find(_.getName == wrapperMethodName).get diff --git a/src/interactive/scala/tools/nsc/interactive/REPL.scala b/src/interactive/scala/tools/nsc/interactive/REPL.scala index 8fb23516e734..300cf38b3ad0 100644 --- a/src/interactive/scala/tools/nsc/interactive/REPL.scala +++ b/src/interactive/scala/tools/nsc/interactive/REPL.scala @@ -57,7 +57,7 @@ object REPL { } } catch { case ex @ FatalError(msg) => - if (true || command.settings.debug) // !!! + if (true || command.settings.isDebug) // !!! ex.printStackTrace() reporter.error(null, "fatal error: " + msg) } diff --git a/src/reflect/scala/reflect/internal/Kinds.scala b/src/reflect/scala/reflect/internal/Kinds.scala index 698be6563c5c..d53da5a4ca37 100644 --- a/src/reflect/scala/reflect/internal/Kinds.scala +++ b/src/reflect/scala/reflect/internal/Kinds.scala @@ -152,7 +152,7 @@ trait Kinds { def kindCheck(cond: Boolean, f: KindErrors => KindErrors): Unit = if (!cond) kindErrors = f(kindErrors) - if (settings.debug) { + if (settings.isDebug) { log("checkKindBoundsHK expected: "+ param +" with params "+ hkparams +" by definition in "+ paramOwner) log("checkKindBoundsHK supplied: "+ arg +" with params "+ hkargs +" from "+ argOwner) log("checkKindBoundsHK under params: "+ underHKParams +" with args "+ withHKArgs) @@ -215,7 +215,7 @@ trait Kinds { else NoKindErrors } - if (settings.debug && (tparams.nonEmpty || targs.nonEmpty)) log( + if (settings.isDebug && (tparams.nonEmpty || targs.nonEmpty)) log( "checkKindBounds0(" + tparams + ", " + targs + ", " + pre + ", " + owner + ", " + explainErrors + ")" ) diff --git a/src/reflect/scala/reflect/internal/Mirrors.scala b/src/reflect/scala/reflect/internal/Mirrors.scala index e7d434ca3a95..4099423cbed8 100644 --- a/src/reflect/scala/reflect/internal/Mirrors.scala +++ b/src/reflect/scala/reflect/internal/Mirrors.scala @@ -61,7 +61,7 @@ trait Mirrors extends api.Mirrors { val result = if (name.isTermName) sym.suchThat(_ hasFlag MODULE) else sym if (result != NoSymbol) result else { - if (settings.debug) { log(sym.info); log(sym.info.members) }//debug + if (settings.isDebug) { log(sym.info); log(sym.info.members) }//debug thisMirror.missingHook(owner, name) orElse { MissingRequirementError.notFound((if (name.isTermName) "object " else "class ")+path+" in "+thisMirror) } diff --git a/src/reflect/scala/reflect/internal/Printers.scala b/src/reflect/scala/reflect/internal/Printers.scala index f869bd121981..efc2da391027 100644 --- a/src/reflect/scala/reflect/internal/Printers.scala +++ b/src/reflect/scala/reflect/internal/Printers.scala @@ -207,7 +207,7 @@ trait Printers extends api.Printers { self: SymbolTable => ) def printFlags(flags: Long, privateWithin: String) = { - val mask: Long = if (settings.debug) -1L else PrintableFlags + val mask: Long = if (settings.isDebug) -1L else PrintableFlags val s = flagsToString(flags & mask, privateWithin) if (s != "") print(s + " ") } @@ -320,7 +320,7 @@ trait Printers extends api.Printers { self: SymbolTable => if (qual.nonEmpty || (checkSymbol && tree.symbol != NoSymbol)) print(resultName + ".") print("super") if (mix.nonEmpty) print(s"[$mix]") - else if (settings.debug) tree.tpe match { + else if (settings.isDebug) tree.tpe match { case st: SuperType => print(s"[${st.supertpe}]") case tp: Type => print(s"[$tp]") case _ => @@ -479,7 +479,7 @@ trait Printers extends api.Printers { self: SymbolTable => case th @ This(qual) => printThis(th, symName(tree, qual)) - case Select(qual: New, name) if !settings.debug => + case Select(qual: New, name) if !settings.isDebug => print(qual) case Select(qualifier, name) => diff --git a/src/reflect/scala/reflect/internal/SymbolTable.scala b/src/reflect/scala/reflect/internal/SymbolTable.scala index 9c7abb1e1524..3113062c5b51 100644 --- a/src/reflect/scala/reflect/internal/SymbolTable.scala +++ b/src/reflect/scala/reflect/internal/SymbolTable.scala @@ -87,15 +87,16 @@ abstract class SymbolTable extends macros.Universe def shouldLogAtThisPhase = false def isPastTyper = false - final def isDeveloper: Boolean = settings.debug.value || settings.developer.value - def picklerPhase: Phase + @inline final def isDeveloper: Boolean = settings.isDebug || settings.isDeveloper + + def picklerPhase: Phase def erasurePhase: Phase def settings: MutableSettings /** Override with final implementation for inlining. */ - def debuglog(msg: => String): Unit = if (settings.debug) log(msg) + def debuglog(msg: => String): Unit = if (settings.isDebug) log(msg) /** dev-warns if dev-warning is enabled and `cond` is true; no-op otherwise */ @inline final def devWarningIf(cond: => Boolean)(msg: => String): Unit = diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala index a144fe6e8c63..be808ffdf20c 100644 --- a/src/reflect/scala/reflect/internal/Symbols.scala +++ b/src/reflect/scala/reflect/internal/Symbols.scala @@ -292,7 +292,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => def varianceString: String = variance.symbolicString override def flagMask = - if (settings.debug && !isAbstractType) AllFlags + if (settings.isDebug && !isAbstractType) AllFlags else if (owner.isRefinementClass) ExplicitFlags & ~OVERRIDE else ExplicitFlags @@ -2724,7 +2724,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => symbolKind.abbreviation final def kindString: String = - if (settings.debug.value) accurateKindString + if (settings.isDebug) accurateKindString else sanitizedKindString /** If the name of the symbol's owner should be used when you care about @@ -2748,7 +2748,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => * If settings.Yshowsymkinds, adds abbreviated symbol kind. */ def nameString: String = { - val name_s = if (settings.debug.value) "" + unexpandedName else unexpandedName.dropLocal.decode + val name_s = if (settings.isDebug) "" + unexpandedName else unexpandedName.dropLocal.decode val kind_s = if (settings.Yshowsymkinds.value) "#" + abbreviatedKindString else "" name_s + idString + kind_s @@ -2775,7 +2775,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => * If hasMeaninglessName is true, uses the owner's name to disambiguate identity. */ override def toString: String = { - val simplifyNames = !settings.debug + val simplifyNames = !settings.isDebug if (isPackageObjectOrClass && simplifyNames) s"package object ${owner.decodedName}" else { val kind = kindString @@ -2811,7 +2811,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => def isStructuralThisType = owner.isInitialized && owner.isStructuralRefinement && tp == owner.tpe // scala/bug#8158 // colon+space, preceded by an extra space if needed to prevent the colon glomming onto a symbolic name def postnominalColon: String = if (!followsParens && name.isOperatorName) " : " else ": " - def parents = if (settings.debug) parentsString(tp.parents) else briefParentsString(tp.parents) + def parents = if (settings.isDebug) parentsString(tp.parents) else briefParentsString(tp.parents) def typeRest = if (isClass) " extends " + parents else if (isAliasType) " = " + tp.resultType @@ -2871,7 +2871,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => /** String representation of existentially bound variable */ def existentialToString = - if (isSingletonExistential && !settings.debug.value) + if (isSingletonExistential && !settings.isDebug) "val " + tpnme.dropSingletonName(name) + ": " + dropSingletonType(info.upperBound) else defString } @@ -3328,7 +3328,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => owner.newTypeSkolemSymbol(name, origin, pos, newFlags) override def nameString: String = - if (settings.debug.value) (super.nameString + "&" + level) + if ((settings.isDebug)) (super.nameString + "&" + level) else super.nameString } @@ -3597,7 +3597,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => // Avoid issuing lots of redundant errors if (!hasFlag(IS_ERROR)) { globalError(pos, missingMessage) - if (settings.debug.value) + if (settings.isDebug) (new Throwable).printStackTrace this setFlag IS_ERROR @@ -3814,7 +3814,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => /** An exception for cyclic references of symbol definitions */ case class CyclicReference(sym: Symbol, info: Type) extends TypeError("illegal cyclic reference involving " + sym) { - if (settings.debug) printStackTrace() + if (settings.isDebug) printStackTrace() } /** A class for type histories */ diff --git a/src/reflect/scala/reflect/internal/Trees.scala b/src/reflect/scala/reflect/internal/Trees.scala index 47945ed0eed2..4c76b3471354 100644 --- a/src/reflect/scala/reflect/internal/Trees.scala +++ b/src/reflect/scala/reflect/internal/Trees.scala @@ -1467,7 +1467,7 @@ trait Trees extends api.Trees { private def requireLegal(value: Any, allowed: Any, what: String) = ( if (value != allowed) { log(s"can't set $what for $self to value other than $allowed") - if (settings.debug && settings.developer) + if (settings.isDebug && settings.isDeveloper) (new Throwable).printStackTrace } ) diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala index deec5ade2c71..7dbc627fe055 100644 --- a/src/reflect/scala/reflect/internal/Types.scala +++ b/src/reflect/scala/reflect/internal/Types.scala @@ -1411,7 +1411,7 @@ trait Types override def underlying: Type = sym.typeOfThis override def isHigherKinded = sym.isRefinementClass && underlying.isHigherKinded override def prefixString = - if (settings.debug) sym.nameString + ".this." + if (settings.isDebug) sym.nameString + ".this." else if (sym.isAnonOrRefinementClass) "this." else if (sym.isOmittablePrefix) "" else if (sym.isModuleClass) sym.fullNameString + "." @@ -1689,7 +1689,7 @@ trait Types override def isStructuralRefinement: Boolean = typeSymbol.isAnonOrRefinementClass && (decls exists symbolIsPossibleInRefinement) - protected def shouldForceScope = settings.debug || parents.isEmpty || !decls.isEmpty + protected def shouldForceScope = settings.isDebug || parents.isEmpty || !decls.isEmpty protected def initDecls = fullyInitializeScope(decls) protected def scopeString = if (shouldForceScope) initDecls.mkString("{", "; ", "}") else "" override def safeToString = parentsString(parents) + scopeString @@ -2056,7 +2056,7 @@ trait Types /** A nicely formatted string with newlines and such. */ def formattedToString = parents.mkString("\n with ") + scopeString - override protected def shouldForceScope = settings.debug || decls.size > 1 + override protected def shouldForceScope = settings.isDebug || decls.size > 1 override protected def scopeString = initDecls.mkString(" {\n ", "\n ", "\n}") override def safeToString = if (shouldForceScope) formattedToString else super.safeToString } @@ -2642,7 +2642,7 @@ trait Types } // ensure that symbol is not a local copy with a name coincidence private def needsPreString = ( - settings.debug + settings.isDebug || !shorthands(sym.fullName) || (sym.ownersIterator exists (s => !s.isClass)) ) @@ -2713,12 +2713,12 @@ trait Types case _ => "" } override def safeToString = { - val custom = if (settings.debug) "" else customToString + val custom = if (settings.isDebug) "" else customToString if (custom != "") custom else finishPrefix(preString + sym.nameString + argsString) } override def prefixString = "" + ( - if (settings.debug) + if (settings.isDebug) super.prefixString else if (sym.isOmittablePrefix) "" @@ -3152,7 +3152,7 @@ trait Types } override def nameAndArgsString: String = underlying match { - case TypeRef(_, sym, args) if !settings.debug && isRepresentableWithWildcards => + case TypeRef(_, sym, args) if !settings.isDebug && isRepresentableWithWildcards => sym.name.toString + wildcardArgsString(quantified.toSet, args).mkString("[", ",", "]") case TypeRef(_, sym, args) => sym.name.toString + args.mkString("[", ",", "]") + existentialClauses @@ -3192,7 +3192,7 @@ trait Types } override def safeToString: String = underlying match { - case TypeRef(pre, sym, args) if !settings.debug && isRepresentableWithWildcards => + case TypeRef(pre, sym, args) if !settings.isDebug && isRepresentableWithWildcards => val ref = typeRef(pre, sym, Nil).toString val wildcards = wildcardArgsString(quantified.toSet, args) if (wildcards.isEmpty) ref else ref + wildcards.mkString("[", ", ", "]") @@ -5192,7 +5192,7 @@ trait Types def this(msg: String) = this(NoPosition, msg) final override def fillInStackTrace() = - if (settings.debug) super.fillInStackTrace() else this + if (settings.isDebug) super.fillInStackTrace() else this } // TODO: RecoverableCyclicReference should be separated from TypeError, @@ -5200,7 +5200,7 @@ trait Types /** An exception for cyclic references from which we can recover */ case class RecoverableCyclicReference(sym: Symbol) extends TypeError("illegal cyclic reference involving " + sym) { - if (settings.debug) printStackTrace() + if (settings.isDebug) printStackTrace() } class NoCommonType(tps: List[Type]) extends ControlThrowable( diff --git a/src/reflect/scala/reflect/internal/pickling/UnPickler.scala b/src/reflect/scala/reflect/internal/pickling/UnPickler.scala index d8abf5b30c13..09f3e8009b98 100644 --- a/src/reflect/scala/reflect/internal/pickling/UnPickler.scala +++ b/src/reflect/scala/reflect/internal/pickling/UnPickler.scala @@ -58,8 +58,6 @@ abstract class UnPickler { class Scan(_bytes: Array[Byte], offset: Int, classRoot: ClassSymbol, moduleRoot: ModuleSymbol, filename: String) extends PickleBuffer(_bytes, offset, -1) { //println("unpickle " + classRoot + " and " + moduleRoot)//debug - protected def debug = settings.debug.value - checkVersion() private[this] val loadingMirror = mirrorThatLoaded(classRoot) diff --git a/src/reflect/scala/reflect/internal/settings/MutableSettings.scala b/src/reflect/scala/reflect/internal/settings/MutableSettings.scala index 0e84fe6c90d7..c4791fcbccd2 100644 --- a/src/reflect/scala/reflect/internal/settings/MutableSettings.scala +++ b/src/reflect/scala/reflect/internal/settings/MutableSettings.scala @@ -74,5 +74,7 @@ object MutableSettings { implicit class SettingsOps(private val settings: MutableSettings) extends AnyVal { @inline final def areStatisticsEnabled = StatisticsStatics.areSomeColdStatsEnabled && settings.YstatisticsEnabled + @inline final def isDebug: Boolean = StatisticsStatics.isDebug && settings.debug + @inline final def isDeveloper: Boolean = StatisticsStatics.isDeveloper && settings.developer } } diff --git a/src/reflect/scala/reflect/internal/tpe/GlbLubs.scala b/src/reflect/scala/reflect/internal/tpe/GlbLubs.scala index 2e7b7a058b3e..e5c982ce33af 100644 --- a/src/reflect/scala/reflect/internal/tpe/GlbLubs.scala +++ b/src/reflect/scala/reflect/internal/tpe/GlbLubs.scala @@ -396,7 +396,7 @@ private[internal] trait GlbLubs { // parameters are not handled correctly. val ok = ts forall { t => isSubType(t, lubRefined, depth.decr) || { - if (settings.debug || printLubs) { + if (settings.isDebug || printLubs) { Console.println( "Malformed lub: " + lubRefined + "\n" + "Argument " + t + " does not conform. Falling back to " + lubBase diff --git a/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala b/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala index 441b25bb9d74..f919d1ea18e6 100644 --- a/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala +++ b/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala @@ -66,7 +66,7 @@ trait TypeComparers { private def isSubPre(pre1: Type, pre2: Type, sym: Symbol) = if ((pre1 ne pre2) && (pre1 ne NoPrefix) && (pre2 ne NoPrefix) && pre1 <:< pre2) { - if (settings.debug) println(s"new isSubPre $sym: $pre1 <:< $pre2") + if (settings.isDebug) println(s"new isSubPre $sym: $pre1 <:< $pre2") true } else false diff --git a/src/reflect/scala/reflect/internal/tpe/TypeConstraints.scala b/src/reflect/scala/reflect/internal/tpe/TypeConstraints.scala index 7af1bb9376a3..9376640a5d17 100644 --- a/src/reflect/scala/reflect/internal/tpe/TypeConstraints.scala +++ b/src/reflect/scala/reflect/internal/tpe/TypeConstraints.scala @@ -62,7 +62,7 @@ private[internal] trait TypeConstraints { } def clear(): Unit = { - if (settings.debug) + if (settings.isDebug) self.log("Clearing " + log.size + " entries from the undoLog.") log = Nil } diff --git a/src/reflect/scala/reflect/internal/tpe/TypeToStrings.scala b/src/reflect/scala/reflect/internal/tpe/TypeToStrings.scala index e9691b9b404f..8a8540df3cea 100644 --- a/src/reflect/scala/reflect/internal/tpe/TypeToStrings.scala +++ b/src/reflect/scala/reflect/internal/tpe/TypeToStrings.scala @@ -39,7 +39,7 @@ private[internal] trait TypeToStrings { // else if (toStringRecursions >= maxToStringRecursions) { devWarning("Exceeded recursion depth attempting to print " + util.shortClassOfInstance(tpe)) - if (settings.debug) + if (settings.isDebug) (new Throwable).printStackTrace "..." diff --git a/src/reflect/scala/reflect/internal/util/StatisticsStatics.java b/src/reflect/scala/reflect/internal/util/StatisticsStatics.java index d2d27a7af6c7..1143a64268a7 100644 --- a/src/reflect/scala/reflect/internal/util/StatisticsStatics.java +++ b/src/reflect/scala/reflect/internal/util/StatisticsStatics.java @@ -23,13 +23,21 @@ public final class StatisticsStatics { private static final AlmostFinalValue COLD_STATS = new AlmostFinalValue(); private static final AlmostFinalValue HOT_STATS = new AlmostFinalValue(); + private static final AlmostFinalValue DEBUG = new AlmostFinalValue(); + private static final AlmostFinalValue DEVELOPER = new AlmostFinalValue(); private static final MethodHandle COLD_STATS_GETTER = COLD_STATS.invoker; private static final MethodHandle HOT_STATS_GETTER = HOT_STATS.invoker; + private static final MethodHandle DEBUG_GETTER = DEBUG.invoker; + private static final MethodHandle DEVELOPER_GETTER = DEVELOPER.invoker; public static boolean areSomeColdStatsEnabled() throws Throwable { return (boolean) COLD_STATS_GETTER.invokeExact(); } public static boolean areSomeHotStatsEnabled() throws Throwable { return (boolean) HOT_STATS_GETTER.invokeExact(); } + public static boolean isDebug() throws Throwable { return (boolean) DEBUG_GETTER.invokeExact(); } + public static boolean isDeveloper() throws Throwable { return (boolean) DEVELOPER_GETTER.invokeExact(); } public static void enableColdStatsAndDeoptimize() { COLD_STATS.toggleOnAndDeoptimize(); } public static void enableHotStatsAndDeoptimize() { HOT_STATS.toggleOnAndDeoptimize(); } + public static void enableDebugAndDeoptimize() { DEBUG.toggleOnAndDeoptimize(); } + public static void enableDeveloperAndDeoptimize() { DEVELOPER.toggleOnAndDeoptimize(); } } diff --git a/src/reflect/scala/reflect/runtime/JavaMirrors.scala b/src/reflect/scala/reflect/runtime/JavaMirrors.scala index 3d7b7bcd8947..4e227174901b 100644 --- a/src/reflect/scala/reflect/runtime/JavaMirrors.scala +++ b/src/reflect/scala/reflect/runtime/JavaMirrors.scala @@ -638,7 +638,7 @@ private[scala] trait JavaMirrors extends internal.SymbolTable with api.JavaUnive def markAbsent(tpe: Type) = setAllInfos(clazz, module, tpe) def handleError(ex: Exception) = { markAbsent(ErrorType) - if (settings.debug) ex.printStackTrace() + if (settings.isDebug) ex.printStackTrace() val msg = ex.getMessage() MissingRequirementError.signal( (if (msg eq null) "reflection error while loading " + clazz.name diff --git a/src/reflect/scala/reflect/runtime/Settings.scala b/src/reflect/scala/reflect/runtime/Settings.scala index 21acdff3b990..56786a5581d9 100644 --- a/src/reflect/scala/reflect/runtime/Settings.scala +++ b/src/reflect/scala/reflect/runtime/Settings.scala @@ -51,8 +51,8 @@ private[reflect] class Settings extends MutableSettings { val Yshowsymowners = new BooleanSetting(false) val Yshowsymkinds = new BooleanSetting(false) val breakCycles = new BooleanSetting(false) - val debug = new BooleanSetting(false) - val developer = new BooleanSetting(false) + val debug = new BooleanSetting(false) { override def postSetHook() = if (v) StatisticsStatics.enableDebugAndDeoptimize() } + val developer = new BooleanSetting(false) { override def postSetHook() = if (v) StatisticsStatics.enableDeveloperAndDeoptimize() } val explaintypes = new BooleanSetting(false) val printtypes = new BooleanSetting(false) val uniqid = new BooleanSetting(false) diff --git a/src/reflect/scala/reflect/runtime/SymbolTable.scala b/src/reflect/scala/reflect/runtime/SymbolTable.scala index 3bb674953521..ccb94eb2dec0 100644 --- a/src/reflect/scala/reflect/runtime/SymbolTable.scala +++ b/src/reflect/scala/reflect/runtime/SymbolTable.scala @@ -25,7 +25,7 @@ private[scala] trait SymbolTable extends internal.SymbolTable with JavaMirrors w if (settings.verbose) println("[reflect-compiler] "+msg) def debugInfo(msg: => String) = - if (settings.debug) info(msg) + if (settings.isDebug) info(msg) /** Declares that this is a runtime reflection universe. * diff --git a/src/scaladoc/scala/tools/nsc/ScalaDoc.scala b/src/scaladoc/scala/tools/nsc/ScalaDoc.scala index 644d0b839ed2..3ddbe03c9b35 100644 --- a/src/scaladoc/scala/tools/nsc/ScalaDoc.scala +++ b/src/scaladoc/scala/tools/nsc/ScalaDoc.scala @@ -48,7 +48,7 @@ class ScalaDoc { try { new DocFactory(reporter, docSettings) document command.files } catch { case ex @ FatalError(msg) => - if (docSettings.debug.value) ex.printStackTrace() + if (docSettings.isDebug) ex.printStackTrace() reporter.error(null, "fatal error: " + msg) } finally reporter.finish() diff --git a/src/scaladoc/scala/tools/nsc/doc/ScaladocAnalyzer.scala b/src/scaladoc/scala/tools/nsc/doc/ScaladocAnalyzer.scala index ab5ebf0f17aa..e361e7299010 100644 --- a/src/scaladoc/scala/tools/nsc/doc/ScaladocAnalyzer.scala +++ b/src/scaladoc/scala/tools/nsc/doc/ScaladocAnalyzer.scala @@ -96,7 +96,7 @@ trait ScaladocAnalyzer extends Analyzer { typedStats(trees, NoSymbol) useCase.defined = context.scope.toList filterNot (useCase.aliases contains _) - if (settings.debug) + if (settings.isDebug) useCase.defined foreach (sym => println("defined use cases: %s:%s".format(sym, sym.tpe))) useCase.defined diff --git a/src/scaladoc/scala/tools/nsc/doc/Uncompilable.scala b/src/scaladoc/scala/tools/nsc/doc/Uncompilable.scala index 00a888b3f65f..bdec5a30f6b6 100644 --- a/src/scaladoc/scala/tools/nsc/doc/Uncompilable.scala +++ b/src/scaladoc/scala/tools/nsc/doc/Uncompilable.scala @@ -12,7 +12,9 @@ package scala.tools.nsc package doc + import scala.language.implicitConversions + import scala.reflect.internal.util.NoPosition import scala.tools.nsc.Reporting.WarningCategory @@ -63,7 +65,7 @@ trait Uncompilable { def symbols = pairs map (_._1) def templates = symbols.filter(x => x.isClass || x.isTrait || x == AnyRefClass/* which is now a type alias */).toSet def comments = { - if (settings.debug || settings.verbose) + if (settings.isDebug || settings.verbose) inform("Found %d uncompilable files: %s".format(files.size, files mkString ", ")) if (pairs.isEmpty) From a774c4fb282fdfbd46d8c0f54b75d6f03b77c338 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Thu, 29 Apr 2021 11:06:25 +1000 Subject: [PATCH 132/769] Fully JIT inlinable settings/statistics enabled checks Wrapping the method handle invocation in a static method relies on that method being JIT inlined. Otherwise, an unlucky caller can still incur the machine-code subroutine call overhead to a no-op method. Example: ``` [info] \-> TypeProfile (34723/34723 counts) = scala/tools/nsc/Global$GlobalMirror [info] @ 1 scala.reflect.internal.Mirrors$Roots::RootClass (21 bytes) inline (hot) [info] !m @ 12 scala.reflect.internal.Mirrors$Roots::RootClass$lzycompute (49 bytes) inline (hot) [info] @ 19 scala.reflect.internal.Mirrors$Roots$RootClass:: (61 bytes) inline (hot) [info] @ 13 scala.reflect.internal.Mirrors$Roots::scala$reflect$internal$Mirrors$Roots$$$outer (5 bytes) accessor [info] @ 21 scala.reflect.internal.Mirrors$Roots::scala$reflect$internal$Mirrors$Roots$$$outer (5 bytes) accessor [info] @ 24 scala.reflect.internal.SymbolTable::NoPosition (5 bytes) accessor [info] @ 28 scala.reflect.internal.Mirrors$Roots::scala$reflect$internal$Mirrors$Roots$$$outer (5 bytes) accessor [info] @ 31 scala.reflect.internal.SymbolTable::tpnme (16 bytes) inline (hot) [info] !m @ 8 scala.reflect.internal.SymbolTable::tpnme$lzycompute$1 (27 bytes) inline (hot) [info] @ 15 scala.reflect.internal.StdNames$tpnme$:: (6 bytes) inline (hot) [info] @ 2 scala.reflect.internal.StdNames$TypeNames:: (757 bytes) hot method too big [info] @ 34 scala.reflect.internal.StdNames$CommonNames::ROOT (5 bytes) accessor [info] @ 40 scala.reflect.internal.Symbols$PackageClassSymbol:: (10 bytes) inline (hot) [info] @ 6 scala.reflect.internal.Symbols$ModuleClassSymbol:: (41 bytes) inline (hot) [info] @ 11 scala.reflect.internal.Symbols$ClassSymbol:: (164 bytes) inline (hot) [info] @ 6 scala.reflect.internal.Symbols$TypeSymbol:: (145 bytes) inline (hot) [info] @ 6 scala.reflect.internal.Symbols$Symbol:: (168 bytes) inlining too deep [info] @ 11 scala.reflect.internal.SymbolTable::NoSymbol (22 bytes) inlining too deep [info] @ 14 scala.reflect.internal.Symbols$Symbol::privateWithin_$eq (6 bytes) inlining too deep [info] @ 32 scala.reflect.internal.util.StatisticsStatics::areSomeColdStatsEnabled (7 bytes) inlining too deep ``` Instead, push the `invokeExact` into the `@inline` checker methods, and use these pervasively. --- .../tools/nsc/symtab/SymbolLoaders.scala | 6 +-- .../tools/nsc/transform/patmat/Logic.scala | 6 +-- .../nsc/transform/patmat/MatchAnalysis.scala | 9 ++-- .../transform/patmat/MatchTranslation.scala | 6 +-- .../tools/nsc/transform/patmat/Solving.scala | 5 +- .../tools/nsc/typechecker/Analyzer.scala | 6 +-- .../tools/nsc/typechecker/Implicits.scala | 52 +++++++++---------- .../scala/tools/nsc/typechecker/Macros.scala | 8 +-- .../scala/tools/nsc/typechecker/Typers.scala | 45 ++++++++-------- .../scala/reflect/internal/BaseTypeSeqs.scala | 6 +-- .../scala/reflect/internal/Scopes.scala | 10 ++-- .../scala/reflect/internal/Symbols.scala | 6 +-- .../scala/reflect/internal/Types.scala | 36 ++++++------- .../internal/settings/MutableSettings.scala | 7 +-- .../reflect/internal/tpe/FindMembers.scala | 12 ++--- .../scala/reflect/internal/tpe/GlbLubs.scala | 17 +++--- .../reflect/internal/tpe/TypeComparers.scala | 3 +- .../internal/util/StatisticsStatics.java | 13 ++--- .../scala/reflect/io/AbstractFile.scala | 2 +- src/reflect/scala/reflect/io/Path.scala | 10 ++-- 20 files changed, 126 insertions(+), 139 deletions(-) diff --git a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala index 5fe2387c2312..8836a1d80885 100644 --- a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala +++ b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala @@ -19,7 +19,7 @@ import java.io.IOException import scala.reflect.internal.MissingRequirementError import scala.reflect.io.{AbstractFile, NoAbstractFile} import scala.tools.nsc.util.{ClassPath, ClassRepresentation} -import scala.reflect.internal.util.{ReusableInstance, StatisticsStatics} +import scala.reflect.internal.util.ReusableInstance import scala.tools.nsc.Reporting.WarningCategory /** This class ... @@ -337,11 +337,11 @@ abstract class SymbolLoaders { protected def description = "class file "+ classfile.toString protected def doComplete(root: Symbol): Unit = { - val start = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.startTimer(statistics.classReadNanos) else null + val start = if (settings.areStatisticsEnabled) statistics.startTimer(statistics.classReadNanos) else null classfileParser.parse(classfile, clazz, module) if (clazz.associatedFile eq NoAbstractFile) clazz.associatedFile = classfile if (module.associatedFile eq NoAbstractFile) module.associatedFile = classfile - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.stopTimer(statistics.classReadNanos, start) + if (settings.areStatisticsEnabled) statistics.stopTimer(statistics.classReadNanos, start) } override def sourcefile: Option[AbstractFile] = classfileParser.srcfile override def associatedFile(self: Symbol): AbstractFile = classfile diff --git a/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala b/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala index a06f648680ce..d88f1505b7b6 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala @@ -16,7 +16,7 @@ package tools.nsc.transform.patmat import scala.collection.mutable import scala.collection.immutable.ArraySeq import scala.reflect.internal.util.Collections._ -import scala.reflect.internal.util.{HashSet, StatisticsStatics} +import scala.reflect.internal.util.HashSet trait Logic extends Debugging { import global._ @@ -408,7 +408,7 @@ trait Logic extends Debugging { // V1 = Nil implies -(V2 = Ci) for all Ci in V2's domain (i.e., it is unassignable) // may throw an AnalysisBudget.Exception def removeVarEq(props: List[Prop], modelNull: Boolean = false): (Prop, List[Prop]) = { - val start = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.startTimer(statistics.patmatAnaVarEq) else null + val start = if (settings.areStatisticsEnabled) statistics.startTimer(statistics.patmatAnaVarEq) else null val vars = new mutable.LinkedHashSet[Var] @@ -491,7 +491,7 @@ trait Logic extends Debugging { debug.patmat(s"eqAxioms:\n${eqAxiomsSeq.mkString("\n")}") debug.patmat(s"pure:${pure.mkString("\n")}") - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.stopTimer(statistics.patmatAnaVarEq, start) + if (settings.areStatisticsEnabled) statistics.stopTimer(statistics.patmatAnaVarEq, start) (And(eqAxiomsSeq: _*), pure) } diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala index 3730a5668bcb..99aafbee6a03 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala @@ -14,7 +14,6 @@ package scala.tools.nsc.transform.patmat import scala.annotation.tailrec import scala.collection.mutable -import scala.reflect.internal.util.StatisticsStatics import scala.tools.nsc.Reporting.WarningCategory trait TreeAndTypeAnalysis extends Debugging { @@ -459,7 +458,7 @@ trait MatchAnalysis extends MatchApproximation { // or, equivalently, P \/ -C, or C => P def unreachableCase(prevBinder: Symbol, cases: List[List[TreeMaker]], pt: Type): Option[Int] = { debug.patmat("reachability analysis") - val start = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.startTimer(statistics.patmatAnaReach) else null + val start = if (settings.areStatisticsEnabled) statistics.startTimer(statistics.patmatAnaReach) else null // use the same approximator so we share variables, // but need different conditions depending on whether we're conservatively looking for failure or success @@ -503,7 +502,7 @@ trait MatchAnalysis extends MatchApproximation { } } - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.stopTimer(statistics.patmatAnaReach, start) + if (settings.areStatisticsEnabled) statistics.stopTimer(statistics.patmatAnaReach, start) if (reachable) None else Some(caseIndex) } catch { @@ -521,7 +520,7 @@ trait MatchAnalysis extends MatchApproximation { // - approximate the pattern `List()` (unapplySeq on List with empty length) as `Nil`, // otherwise the common (xs: List[Any]) match { case List() => case x :: xs => } is deemed unexhaustive // - back off (to avoid crying exhaustive too often) in unhandled cases - val start = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.startTimer(statistics.patmatAnaExhaust) else null + val start = if (settings.areStatisticsEnabled) statistics.startTimer(statistics.patmatAnaExhaust) else null var backoff = false val strict = !settings.nonStrictPatmatAnalysis.value @@ -578,7 +577,7 @@ trait MatchAnalysis extends MatchApproximation { // and make sure the strings are distinct, see Shmeez & TestSequence06 in run/patmatnew.scala val pruned = CounterExample.prune(counterExamples.sortBy(_.toString)).map(_.toString).distinct - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.stopTimer(statistics.patmatAnaExhaust, start) + if (settings.areStatisticsEnabled) statistics.stopTimer(statistics.patmatAnaExhaust, start) pruned } catch { case ex: AnalysisBudget.Exception => diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala index 6d5a8eab3919..108d0e646e68 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala @@ -12,8 +12,6 @@ package scala.tools.nsc.transform.patmat -import scala.reflect.internal.util.StatisticsStatics - /** Translate typed Trees that represent pattern matches into the patternmatching IR, defined by TreeMakers. */ trait MatchTranslation { @@ -209,7 +207,7 @@ trait MatchTranslation { debug.patmat("translating "+ cases.mkString("{", "\n", "}")) - val start = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.startTimer(statistics.patmatNanos) else null + val start = if (settings.areStatisticsEnabled) statistics.startTimer(statistics.patmatNanos) else null val selectorTp = repeatedToSeq(elimAnonymousClass(selector.tpe.withoutAnnotations)) @@ -225,7 +223,7 @@ trait MatchTranslation { // pt = Any* occurs when compiling test/files/pos/annotDepMethType.scala val combined = combineCases(selector, selectorSym, nonSyntheticCases map translateCase(selectorSym, pt), pt, selectorPos, matchOwner, defaultOverride, getSuppression(selector)) - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.stopTimer(statistics.patmatNanos, start) + if (settings.areStatisticsEnabled) statistics.stopTimer(statistics.patmatNanos, start) combined } diff --git a/src/compiler/scala/tools/nsc/transform/patmat/Solving.scala b/src/compiler/scala/tools/nsc/transform/patmat/Solving.scala index 4146db459b4e..dd6a524549dc 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/Solving.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/Solving.scala @@ -15,7 +15,6 @@ package scala.tools.nsc.transform.patmat import scala.annotation.tailrec import scala.collection.mutable.ArrayBuffer import scala.collection.{immutable, mutable} -import scala.reflect.internal.util.StatisticsStatics /** Solve pattern matcher exhaustivity problem via DPLL. */ trait Solving extends Logic { @@ -479,12 +478,12 @@ trait Solving extends Logic { def hasModel(solvable: Solvable): Boolean = findTseitinModelFor(solvable.cnf) != NoTseitinModel def findTseitinModelFor(clauses: Array[Clause]): TseitinModel = { - val start = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.startTimer(statistics.patmatAnaDPLL) else null + val start = if (settings.areStatisticsEnabled) statistics.startTimer(statistics.patmatAnaDPLL) else null debug.patmat(s"DPLL\n${cnfString(clauses)}") val satisfiableWithModel = findTseitinModel0((java.util.Arrays.copyOf(clauses, clauses.length), Nil) :: Nil) - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.stopTimer(statistics.patmatAnaDPLL, start) + if (settings.areStatisticsEnabled) statistics.stopTimer(statistics.patmatAnaDPLL, start) satisfiableWithModel } diff --git a/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala b/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala index cd5278776a57..a48dad7c960c 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala @@ -13,8 +13,6 @@ package scala.tools.nsc package typechecker -import scala.reflect.internal.util.StatisticsStatics - /** Defines the sub-components for the namer, packageobjects, and typer phases. */ trait Analyzer extends AnyRef @@ -96,7 +94,7 @@ trait Analyzer extends AnyRef // compiler run). This is good enough for the resident compiler, which was the most affected. undoLog.clear() override def run(): Unit = { - val start = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.startTimer(statistics.typerNanos) else null + val start = if (settings.areStatisticsEnabled) statistics.startTimer(statistics.typerNanos) else null global.echoPhaseSummary(this) val units = currentRun.units while (units.hasNext) { @@ -106,7 +104,7 @@ trait Analyzer extends AnyRef finishComputeParamAlias() // defensive measure in case the bookkeeping in deferred macro expansion is buggy clearDelayed() - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.stopTimer(statistics.typerNanos, start) + if (settings.areStatisticsEnabled) statistics.stopTimer(statistics.typerNanos, start) } def apply(unit: CompilationUnit): Unit = { try { diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala index c17b49a79d9d..fe3a8549c5d0 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala @@ -24,7 +24,7 @@ import scala.collection.mutable import mutable.{LinkedHashMap, ListBuffer} import scala.util.matching.Regex import symtab.Flags._ -import scala.reflect.internal.util.{ReusableInstance, Statistics, StatisticsStatics, TriState} +import scala.reflect.internal.util.{ReusableInstance, Statistics, TriState} import scala.reflect.internal.TypesStats import scala.language.implicitConversions import scala.tools.nsc.Reporting.WarningCategory @@ -99,9 +99,9 @@ trait Implicits extends splain.SplainData { // Note that the isInvalidConversionTarget seems to make a lot more sense right here, before all the // work is performed, than at the point where it presently exists. val shouldPrint = printTypings && !context.undetparams.isEmpty - val findMemberStart = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.startCounter(findMemberImpl) else null - val subtypeStart = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.startCounter(subtypeImpl) else null - val start = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.startTimer(implicitNanos) else null + val findMemberStart = if (settings.areStatisticsEnabled) statistics.startCounter(findMemberImpl) else null + val subtypeStart = if (settings.areStatisticsEnabled) statistics.startCounter(subtypeImpl) else null + val start = if (settings.areStatisticsEnabled) statistics.startTimer(implicitNanos) else null if (shouldPrint) typingStack.printTyping(tree, "typing implicit: %s %s".format(tree, context.undetparamsString)) val implicitSearchContext = context.makeImplicit(reportAmbiguous) @@ -123,9 +123,9 @@ trait Implicits extends splain.SplainData { // and then filter out any which *were* inferred and are part of the substitutor in the implicit search result. context.undetparams = ((context.undetparams ++ result.undetparams) filterNot result.subst.from.contains).distinct - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.stopTimer(implicitNanos, start) - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.stopCounter(findMemberImpl, findMemberStart) - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.stopCounter(subtypeImpl, subtypeStart) + if (settings.areStatisticsEnabled) statistics.stopTimer(implicitNanos, start) + if (settings.areStatisticsEnabled) statistics.stopCounter(findMemberImpl, findMemberStart) + if (settings.areStatisticsEnabled) statistics.stopCounter(subtypeImpl, subtypeStart) if (result.isSuccess && settings.lintImplicitRecursion && result.tree.symbol != null) { val s = @@ -422,7 +422,7 @@ trait Implicits extends splain.SplainData { } import infer._ - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(implicitSearchCount) + if (settings.areStatisticsEnabled) statistics.incCounter(implicitSearchCount) /** The type parameters to instantiate */ val undetParams = if (isView) Nil else context.outer.undetparams @@ -452,12 +452,12 @@ trait Implicits extends splain.SplainData { /** Is implicit info `info1` better than implicit info `info2`? */ def improves(info1: ImplicitInfo, info2: ImplicitInfo) = { - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(improvesCount) + if (settings.areStatisticsEnabled) statistics.incCounter(improvesCount) (info2 == NoImplicitInfo) || (info1 != NoImplicitInfo) && { if (info1.sym.isStatic && info2.sym.isStatic) { improvesCache get ((info1, info2)) match { - case Some(b) => if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(improvesCachedCount); b + case Some(b) => if (settings.areStatisticsEnabled) statistics.incCounter(improvesCachedCount); b case None => val result = isStrictlyMoreSpecific(info1.tpe, info2.tpe, info1.sym, info2.sym) improvesCache((info1, info2)) = result @@ -650,14 +650,14 @@ trait Implicits extends splain.SplainData { * This method is performance critical: 5-8% of typechecking time. */ private def matchesPt(tp: Type, pt: Type, undet: List[Symbol]): Boolean = { - val start = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.startTimer(matchesPtNanos) else null + val start = if (settings.areStatisticsEnabled) statistics.startTimer(matchesPtNanos) else null val result = normSubType(tp, pt) || isView && { pt match { case Function1(arg1, arg2) => matchesPtView(tp, arg1, arg2, undet) case _ => false } } - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.stopTimer(matchesPtNanos, start) + if (settings.areStatisticsEnabled) statistics.stopTimer(matchesPtNanos, start) result } private def matchesPt(info: ImplicitInfo): Boolean = ( @@ -684,7 +684,7 @@ trait Implicits extends splain.SplainData { } private def matchesPtInst(info: ImplicitInfo): Boolean = { - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(matchesPtInstCalls) + if (settings.areStatisticsEnabled) statistics.incCounter(matchesPtInstCalls) info.tpe match { case PolyType(tparams, restpe) => try { @@ -693,7 +693,7 @@ trait Implicits extends splain.SplainData { val tp = ApproximateDependentMap(restpe) val tpInstantiated = tp.instantiateTypeParams(allUndetparams, tvars) if(!matchesPt(tpInstantiated, wildPt, allUndetparams)) { - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(matchesPtInstMismatch1) + if (settings.areStatisticsEnabled) statistics.incCounter(matchesPtInstMismatch1) false } else { // we can't usefully prune views any further because we would need to type an application @@ -703,7 +703,7 @@ trait Implicits extends splain.SplainData { val adjusted = adjustTypeArgs(allUndetparams, tvars, targs) val tpSubst = deriveTypeWithWildcards(adjusted.undetParams)(tp.instantiateTypeParams(adjusted.okParams, adjusted.okArgs)) if(!matchesPt(tpSubst, wildPt, adjusted.undetParams)) { - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(matchesPtInstMismatch2) + if (settings.areStatisticsEnabled) statistics.incCounter(matchesPtInstMismatch2) false } else true } @@ -801,7 +801,7 @@ trait Implicits extends splain.SplainData { } private def typedImplicit0(info: ImplicitInfo, ptChecked: Boolean, isLocalToCallsite: Boolean): SearchResult = { - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(plausiblyCompatibleImplicits) + if (settings.areStatisticsEnabled) statistics.incCounter(plausiblyCompatibleImplicits) val ok = ptChecked || matchesPt(info) && { def word = if (isLocalToCallsite) "local " else "" typingLog("match", s"$word$info") @@ -811,7 +811,7 @@ trait Implicits extends splain.SplainData { } private def typedImplicit1(info: ImplicitInfo, isLocalToCallsite: Boolean): SearchResult = { - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(matchingImplicits) + if (settings.areStatisticsEnabled) statistics.incCounter(matchingImplicits) // workaround for deficient context provided by ModelFactoryImplicitSupport#makeImplicitConstraints val isScaladoc = context.tree == EmptyTree @@ -867,7 +867,7 @@ trait Implicits extends splain.SplainData { case None => } - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(typedImplicits) + if (settings.areStatisticsEnabled) statistics.incCounter(typedImplicits) val itree3 = if (isView) treeInfo.dissectApplied(itree2).callee else adapt(itree2, EXPRmode, wildPt) @@ -961,7 +961,7 @@ trait Implicits extends splain.SplainData { fail("typing TypeApply reported errors for the implicit tree: " + err.errMsg) case None => val result = new SearchResult(unsuppressMacroExpansion(itree3), subst, context.undetparams) - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(foundImplicits) + if (settings.areStatisticsEnabled) statistics.incCounter(foundImplicits) typingLog("success", s"inferred value of type $ptInstantiated is $result") result } @@ -1280,11 +1280,11 @@ trait Implicits extends splain.SplainData { * @return map from infos to search results */ def applicableInfos(iss: Infoss, isLocalToCallsite: Boolean): mutable.LinkedHashMap[ImplicitInfo, SearchResult] = { - val start = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.startCounter(subtypeAppInfos) else null + val start = if (settings.areStatisticsEnabled) statistics.startCounter(subtypeAppInfos) else null val computation = new ImplicitComputation(iss, isLocalToCallsite) { } val applicable = computation.findAll() - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.stopCounter(subtypeAppInfos, start) + if (settings.areStatisticsEnabled) statistics.stopCounter(subtypeAppInfos, start) applicable } @@ -1428,13 +1428,13 @@ trait Implicits extends splain.SplainData { * such that some part of `tp` has C as one of its superclasses. */ private def implicitsOfExpectedType: Infoss = { - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(implicitCacheAccs) + if (settings.areStatisticsEnabled) statistics.incCounter(implicitCacheAccs) implicitsCache get pt match { case Some(implicitInfoss) => - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(implicitCacheHits) + if (settings.areStatisticsEnabled) statistics.incCounter(implicitCacheHits) implicitInfoss case None => - val start = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.startTimer(subtypeETNanos) else null + val start = if (settings.areStatisticsEnabled) statistics.startTimer(subtypeETNanos) else null // val implicitInfoss = companionImplicits(pt) val implicitInfoss1 = companionImplicitMap(pt).valuesIterator.toList // val is1 = implicitInfoss.flatten.toSet @@ -1443,7 +1443,7 @@ trait Implicits extends splain.SplainData { // if (!(is2 contains i)) println("!!! implicit infos of "+pt+" differ, new does not contain "+i+",\nold: "+implicitInfoss+",\nnew: "+implicitInfoss1) // for (i <- is2) // if (!(is1 contains i)) println("!!! implicit infos of "+pt+" differ, old does not contain "+i+",\nold: "+implicitInfoss+",\nnew: "+implicitInfoss1) - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.stopTimer(subtypeETNanos, start) + if (settings.areStatisticsEnabled) statistics.stopTimer(subtypeETNanos, start) implicitsCache(pt) = implicitInfoss1 if (implicitsCache.size >= sizeLimit) implicitsCache -= implicitsCache.keysIterator.next() @@ -1689,7 +1689,7 @@ trait Implicits extends splain.SplainData { * If all fails return SearchFailure */ def bestImplicit: SearchResult = { - val stats = StatisticsStatics.areSomeColdStatsEnabled + val stats = settings.areStatisticsEnabled val failstart = if (stats) statistics.startTimer(inscopeFailNanos) else null val succstart = if (stats) statistics.startTimer(inscopeSucceedNanos) else null diff --git a/src/compiler/scala/tools/nsc/typechecker/Macros.scala b/src/compiler/scala/tools/nsc/typechecker/Macros.scala index b7bf7a219dcb..073cf5e13968 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Macros.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Macros.scala @@ -18,7 +18,7 @@ import java.lang.Math.min import symtab.Flags._ import scala.reflect.internal.util.ScalaClassLoader import scala.reflect.runtime.ReflectionUtils -import scala.reflect.internal.util.{Statistics, StatisticsStatics} +import scala.reflect.internal.util.Statistics import scala.reflect.internal.TypesStats import scala.reflect.macros.util._ import scala.util.control.ControlThrowable @@ -562,8 +562,8 @@ trait Macros extends MacroRuntimes with Traces with Helpers { if (macroDebugVerbose) println(s"macroExpand: ${summary()}") linkExpandeeAndDesugared(expandee, desugared) - val start = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.startTimer(statistics.macroExpandNanos) else null - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(statistics.macroExpandCount) + val start = if (settings.areStatisticsEnabled) statistics.startTimer(statistics.macroExpandNanos) else null + if (settings.areStatisticsEnabled) statistics.incCounter(statistics.macroExpandCount) try { withInfoLevel(nodePrinters.InfoLevel.Quiet) { // verbose printing might cause recursive macro expansions if (expandee.symbol.isErroneous || (expandee exists (_.isErroneous))) { @@ -596,7 +596,7 @@ trait Macros extends MacroRuntimes with Traces with Helpers { } } } finally { - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.stopTimer(statistics.macroExpandNanos, start) + if (settings.areStatisticsEnabled) statistics.stopTimer(statistics.macroExpandNanos, start) } } } diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index c253fdc7e368..b456ce4a9a78 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -17,7 +17,7 @@ package typechecker import scala.annotation.tailrec import scala.collection.mutable import scala.reflect.internal.{Chars, TypesStats} -import scala.reflect.internal.util.{FreshNameCreator, ListOfNil, Statistics, StatisticsStatics} +import scala.reflect.internal.util.{FreshNameCreator, ListOfNil, Statistics} import scala.tools.nsc.Reporting.{MessageFilter, Suppression, WConf, WarningCategory} import scala.util.chaining._ import mutable.ListBuffer @@ -672,13 +672,13 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper def silent[T](op: Typer => T, reportAmbiguousErrors: Boolean = context.ambiguousErrors, newtree: Tree = context.tree): SilentResult[T] = { - val findMemberStart = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.startCounter(findMemberFailed) else null - val subtypeStart = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.startCounter(subtypeFailed) else null - val failedSilentStart = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.startTimer(failedSilentNanos) else null + val findMemberStart = if (settings.areStatisticsEnabled) statistics.startCounter(findMemberFailed) else null + val subtypeStart = if (settings.areStatisticsEnabled) statistics.startCounter(subtypeFailed) else null + val failedSilentStart = if (settings.areStatisticsEnabled) statistics.startTimer(failedSilentNanos) else null def stopStats() = { - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.stopCounter(findMemberFailed, findMemberStart) - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.stopCounter(subtypeFailed, subtypeStart) - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.stopTimer(failedSilentNanos, failedSilentStart) + if (settings.areStatisticsEnabled) statistics.stopCounter(findMemberFailed, findMemberStart) + if (settings.areStatisticsEnabled) statistics.stopCounter(subtypeFailed, subtypeStart) + if (settings.areStatisticsEnabled) statistics.stopTimer(failedSilentNanos, failedSilentStart) } @inline def wrapResult(reporter: ContextReporter, result: T) = if (reporter.hasErrors) { @@ -4205,9 +4205,9 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper def isCapturedExistential(sym: Symbol) = (sym hasAllFlags EXISTENTIAL | CAPTURED) && { - val start = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.startTimer(isReferencedNanos) else null + val start = if (settings.areStatisticsEnabled) statistics.startTimer(isReferencedNanos) else null try !isReferencedFrom(context, sym) - finally if (StatisticsStatics.areSomeColdStatsEnabled) statistics.stopTimer(isReferencedNanos, start) + finally if (settings.areStatisticsEnabled) statistics.stopTimer(isReferencedNanos, start) } def packCaptured(tpe: Type): Type = { @@ -4930,10 +4930,10 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper * insert an implicit conversion. */ def tryTypedApply(fun: Tree, args: List[Tree]): Tree = { - val start = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.startTimer(failedApplyNanos) else null + val start = if (settings.areStatisticsEnabled) statistics.startTimer(failedApplyNanos) else null def onError(typeErrors: Seq[AbsTypeError], warnings: Seq[(Position, String, WarningCategory, Symbol)]): Tree = { - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.stopTimer(failedApplyNanos, start) + if (settings.areStatisticsEnabled) statistics.stopTimer(failedApplyNanos, start) // If the problem is with raw types, convert to existentials and try again. // See #4712 for a case where this situation arises, @@ -5014,8 +5014,8 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper // TODO: replace `fun.symbol.isStable` by `treeInfo.isStableIdentifierPattern(fun)` val stableApplication = (fun.symbol ne null) && fun.symbol.isMethod && fun.symbol.isStable val funpt = if (mode.inPatternMode) pt else WildcardType - val appStart = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.startTimer(failedApplyNanos) else null - val opeqStart = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.startTimer(failedOpEqNanos) else null + val appStart = if (settings.areStatisticsEnabled) statistics.startTimer(failedApplyNanos) else null + val opeqStart = if (settings.areStatisticsEnabled) statistics.startTimer(failedOpEqNanos) else null def isConversionCandidate(qual: Tree, name: Name): Boolean = !mode.inPatternMode && nme.isOpAssignmentName(TermName(name.decode)) && !qual.exists(_.isErroneous) @@ -5045,7 +5045,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper case Select(qual, name) if isConversionCandidate(qual, name) => val qual1 = typedQualifier(qual) if (treeInfo.isVariableOrGetter(qual1)) { - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.stopTimer(failedOpEqNanos, opeqStart) + if (settings.areStatisticsEnabled) statistics.stopTimer(failedOpEqNanos, opeqStart) val erred = qual1.exists(_.isErroneous) || args.exists(_.isErroneous) if (erred) reportError(error) else { val convo = convertToAssignment(fun, qual1, name, args) @@ -5057,7 +5057,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper } } } else { - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.stopTimer(failedApplyNanos, appStart) + if (settings.areStatisticsEnabled) statistics.stopTimer(failedApplyNanos, appStart) val Apply(Select(qual2, _), args2) = tree: @unchecked val erred = qual2.exists(_.isErroneous) || args2.exists(_.isErroneous) reportError { @@ -5065,7 +5065,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper } } case _ => - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.stopTimer(failedApplyNanos, appStart) + if (settings.areStatisticsEnabled) statistics.stopTimer(failedApplyNanos, appStart) reportError(error) } val silentResult = silent( @@ -5076,7 +5076,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper silentResult match { case SilentResultValue(fun1) => val fun2 = if (stableApplication) stabilizeFun(fun1, mode, pt) else fun1 - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(typedApplyCount) + if (settings.areStatisticsEnabled) statistics.incCounter(typedApplyCount) val noSecondTry = ( isPastTyper || context.inSecondTry @@ -5404,7 +5404,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper else UnstableTreeError(qualTyped) typedSelect(tree, qualStableOrError, name) } else { - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(typedSelectCount) + if (settings.areStatisticsEnabled) statistics.incCounter(typedSelectCount) val qualTyped = checkDead(context, typedQualifier(qual, mode)) val tree1 = typedSelect(tree, qualTyped, name) @@ -5511,7 +5511,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper def typedIdentOrWildcard(tree: Ident) = { val name = tree.name - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(typedIdentCount) + if (settings.areStatisticsEnabled) statistics.incCounter(typedIdentCount) if (!tree.isBackquoted && ((name == nme.WILDCARD && mode.typingPatternNotConstructor) || (name == tpnme.WILDCARD && mode.inTypeMode))) @@ -6003,9 +6003,8 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper def typed(tree: Tree, mode: Mode, pt: Type): Tree = { lastTreeToTyper = tree - val statsEnabled = StatisticsStatics.areSomeHotStatsEnabled && settings.areStatisticsEnabled && settings.YhotStatisticsEnabled - val startByType = if (statsEnabled) statistics.pushTimer(byTypeStack, byTypeNanos(tree.getClass)) else null - if (statsEnabled) statistics.incCounter(visitsByType, tree.getClass) + val startByType = if (settings.areHotStatisticsEnabled) statistics.pushTimer(byTypeStack, byTypeNanos(tree.getClass)) else null + if (settings.areHotStatisticsEnabled) statistics.incCounter(visitsByType, tree.getClass) val shouldPrintTyping = printTypings && !phase.erasedTypes && !noPrintTyping(tree) val shouldPopTypingStack = shouldPrintTyping && typingStack.beforeNextTyped(tree, mode, pt, context) @@ -6091,7 +6090,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper throw ex } finally { if (shouldPopTypingStack) typingStack.pop(tree) - if (statsEnabled) statistics.popTimer(byTypeStack, startByType) + if (settings.areHotStatisticsEnabled) statistics.popTimer(byTypeStack, startByType) if (shouldInsertStabilizers) context.pendingStabilizers = savedPendingStabilizer } } diff --git a/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala b/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala index b99f40770791..570a94e960ed 100644 --- a/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala +++ b/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala @@ -16,7 +16,7 @@ package internal // todo implement in terms of BitSet import scala.collection.mutable -import util.{Statistics, StatisticsStatics} +import util.Statistics /** A base type sequence (BaseTypeSeq) is an ordered sequence spanning all the base types * of a type. It characterized by the following two laws: @@ -50,8 +50,8 @@ trait BaseTypeSeqs { */ class BaseTypeSeq protected[reflect] (private[BaseTypeSeqs] val parents: List[Type], private[BaseTypeSeqs] val elems: Array[Type]) { self => - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(baseTypeSeqCount) - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(baseTypeSeqLenTotal, elems.length) + if (settings.areStatisticsEnabled) statistics.incCounter(baseTypeSeqCount) + if (settings.areStatisticsEnabled) statistics.incCounter(baseTypeSeqLenTotal, elems.length) private[this] val typeSymbols = { val tmp = new Array[Int](elems.length) var i = 0 diff --git a/src/reflect/scala/reflect/internal/Scopes.scala b/src/reflect/scala/reflect/internal/Scopes.scala index e428747db7cb..f0bdf01331a7 100644 --- a/src/reflect/scala/reflect/internal/Scopes.scala +++ b/src/reflect/scala/reflect/internal/Scopes.scala @@ -17,7 +17,7 @@ package internal import scala.annotation.tailrec import scala.collection.{AbstractIterable, AbstractIterator} import scala.collection.mutable.Clearable -import scala.reflect.internal.util.{Statistics, StatisticsStatics} +import scala.reflect.internal.util.Statistics trait Scopes extends api.Scopes { self: SymbolTable => @@ -515,22 +515,22 @@ trait Scopes extends api.Scopes { self: SymbolTable => /** Create a new scope nested in another one with which it shares its elements */ final def newNestedScope(outer: Scope): Scope = { - val startTime = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.startTimer(statistics.scopePopulationTime) else null + val startTime = if (settings.areStatisticsEnabled) statistics.startTimer(statistics.scopePopulationTime) else null val nested = newScope // not `new Scope`, we must allow the runtime reflection universe to mixin SynchronizedScopes! nested.elems = outer.elems nested.nestinglevel = outer.nestinglevel + 1 if (outer.hashtable ne null) nested.hashtable = java.util.Arrays.copyOf(outer.hashtable, outer.hashtable.length) - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.stopTimer(statistics.scopePopulationTime, startTime) + if (settings.areStatisticsEnabled) statistics.stopTimer(statistics.scopePopulationTime, startTime) nested } /** Create a new scope with given initial elements */ def newScopeWith(elems: Symbol*): Scope = { - val startTime = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.startTimer(statistics.scopePopulationTime) else null + val startTime = if (settings.areStatisticsEnabled) statistics.startTimer(statistics.scopePopulationTime) else null val scope = newScope elems foreach scope.enter - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.stopTimer(statistics.scopePopulationTime, startTime) + if (settings.areStatisticsEnabled) statistics.stopTimer(statistics.scopePopulationTime, startTime) scope } diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala index be808ffdf20c..20f75fa7f14c 100644 --- a/src/reflect/scala/reflect/internal/Symbols.scala +++ b/src/reflect/scala/reflect/internal/Symbols.scala @@ -20,7 +20,7 @@ package internal import scala.collection.immutable import scala.collection.mutable.ListBuffer -import util.{ Statistics, shortClassOfInstance, StatisticsStatics } +import util.{ Statistics, shortClassOfInstance } import Flags._ import scala.annotation.tailrec import scala.reflect.io.{AbstractFile, NoAbstractFile} @@ -3278,7 +3278,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => * info for T in Test1 should be >: Nothing <: Test3[_] */ - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(typeSymbolCount) + if (settings.areStatisticsEnabled) statistics.incCounter(typeSymbolCount) } implicit val TypeSymbolTag = ClassTag[TypeSymbol](classOf[TypeSymbol]) @@ -3498,7 +3498,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => else super.toString ) - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(classSymbolCount) + if (settings.areStatisticsEnabled) statistics.incCounter(classSymbolCount) } implicit val ClassSymbolTag = ClassTag[ClassSymbol](classOf[ClassSymbol]) diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala index 7dbc627fe055..b96fe784a704 100644 --- a/src/reflect/scala/reflect/internal/Types.scala +++ b/src/reflect/scala/reflect/internal/Types.scala @@ -22,7 +22,7 @@ import mutable.{ListBuffer, LinkedHashSet} import Flags._ import scala.util.control.ControlThrowable import scala.annotation.{tailrec, unused} -import util.{Statistics, StatisticsStatics} +import util.Statistics import util.ThreeValues._ import Variance._ import Depth._ @@ -692,7 +692,7 @@ trait Types * = Int */ def asSeenFrom(pre: Type, clazz: Symbol): Type = { - val start = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.pushTimer(typeOpsStack, asSeenFromNanos) else null + val start = if (settings.areStatisticsEnabled) statistics.pushTimer(typeOpsStack, asSeenFromNanos) else null try { val trivial = ( this.isTrivial @@ -708,7 +708,7 @@ trait Types if (m.capturedSkolems.isEmpty) tp1 else deriveType(m.capturedSkolems, _.cloneSymbol setFlag CAPTURED)(tp1) } - } finally if (StatisticsStatics.areSomeColdStatsEnabled) statistics.popTimer(typeOpsStack, start) + } finally if (settings.areStatisticsEnabled) statistics.popTimer(typeOpsStack, start) } /** The info of `sym`, seen as a member of this type. @@ -814,7 +814,7 @@ trait Types /** Is this type a subtype of that type? */ def <:<(that: Type): Boolean = { - if (StatisticsStatics.areSomeColdStatsEnabled) stat_<:<(that) + if (settings.areStatisticsEnabled) stat_<:<(that) else { (this eq that) || (if (explainSwitch) explain("<:", isSubType(_: Type, _: Type), this, that) @@ -846,26 +846,26 @@ trait Types }) def stat_<:<(that: Type): Boolean = { - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(subtypeCount) - val start = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.pushTimer(typeOpsStack, subtypeNanos) else null + if (settings.areStatisticsEnabled) statistics.incCounter(subtypeCount) + val start = if (settings.areStatisticsEnabled) statistics.pushTimer(typeOpsStack, subtypeNanos) else null val result = (this eq that) || (if (explainSwitch) explain("<:", isSubType(_: Type, _: Type), this, that) else isSubType(this, that)) - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.popTimer(typeOpsStack, start) + if (settings.areStatisticsEnabled) statistics.popTimer(typeOpsStack, start) result } /** Is this type a weak subtype of that type? True also for numeric types, i.e. Int weak_<:< Long. */ def weak_<:<(that: Type): Boolean = { - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(subtypeCount) - val start = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.pushTimer(typeOpsStack, subtypeNanos) else null + if (settings.areStatisticsEnabled) statistics.incCounter(subtypeCount) + val start = if (settings.areStatisticsEnabled) statistics.pushTimer(typeOpsStack, subtypeNanos) else null val result = ((this eq that) || (if (explainSwitch) explain("weak_<:", isWeakSubType, this, that) else isWeakSubType(this, that))) - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.popTimer(typeOpsStack, start) + if (settings.areStatisticsEnabled) statistics.popTimer(typeOpsStack, start) result } @@ -1760,8 +1760,8 @@ trait Types tpe.baseTypeSeqCache = tpWithoutTypeVars.baseTypeSeq lateMap paramToVar } else { - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(compoundBaseTypeSeqCount) - val start = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.pushTimer(typeOpsStack, baseTypeSeqNanos) else null + if (settings.areStatisticsEnabled) statistics.incCounter(compoundBaseTypeSeqCount) + val start = if (settings.areStatisticsEnabled) statistics.pushTimer(typeOpsStack, baseTypeSeqNanos) else null try { tpe.baseTypeSeqCache = undetBaseTypeSeq tpe.baseTypeSeqCache = @@ -1770,7 +1770,7 @@ trait Types else compoundBaseTypeSeq(tpe) } finally { - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.popTimer(typeOpsStack, start) + if (settings.areStatisticsEnabled) statistics.popTimer(typeOpsStack, start) } // [Martin] suppressing memoization solves the problem with "same type after erasure" errors // when compiling with @@ -1793,13 +1793,13 @@ trait Types if (period != currentPeriod) { tpe.baseClassesPeriod = currentPeriod if (!isValidForBaseClasses(period)) { - val start = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.pushTimer(typeOpsStack, baseClassesNanos) else null + val start = if (settings.areStatisticsEnabled) statistics.pushTimer(typeOpsStack, baseClassesNanos) else null try { tpe.baseClassesCache = null tpe.baseClassesCache = tpe.memo(computeBaseClasses(tpe))(tpe.typeSymbol :: _.baseClasses.tail) } finally { - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.popTimer(typeOpsStack, start) + if (settings.areStatisticsEnabled) statistics.popTimer(typeOpsStack, start) } } } @@ -2796,13 +2796,13 @@ trait Types if (period != currentPeriod) { tpe.baseTypeSeqPeriod = currentPeriod if (!isValidForBaseClasses(period)) { - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(typerefBaseTypeSeqCount) - val start = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.pushTimer(typeOpsStack, baseTypeSeqNanos) else null + if (settings.areStatisticsEnabled) statistics.incCounter(typerefBaseTypeSeqCount) + val start = if (settings.areStatisticsEnabled) statistics.pushTimer(typeOpsStack, baseTypeSeqNanos) else null try { tpe.baseTypeSeqCache = undetBaseTypeSeq tpe.baseTypeSeqCache = tpe.baseTypeSeqImpl } finally { - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.popTimer(typeOpsStack, start) + if (settings.areStatisticsEnabled) statistics.popTimer(typeOpsStack, start) } } } diff --git a/src/reflect/scala/reflect/internal/settings/MutableSettings.scala b/src/reflect/scala/reflect/internal/settings/MutableSettings.scala index c4791fcbccd2..57c880f894c7 100644 --- a/src/reflect/scala/reflect/internal/settings/MutableSettings.scala +++ b/src/reflect/scala/reflect/internal/settings/MutableSettings.scala @@ -73,8 +73,9 @@ object MutableSettings { @inline implicit def reflectSettingToBoolean(s: MutableSettings#BooleanSetting): Boolean = s.value implicit class SettingsOps(private val settings: MutableSettings) extends AnyVal { - @inline final def areStatisticsEnabled = StatisticsStatics.areSomeColdStatsEnabled && settings.YstatisticsEnabled - @inline final def isDebug: Boolean = StatisticsStatics.isDebug && settings.debug - @inline final def isDeveloper: Boolean = StatisticsStatics.isDeveloper && settings.developer + @inline final def areStatisticsEnabled = (StatisticsStatics.COLD_STATS_GETTER.invokeExact(): Boolean) && settings.YstatisticsEnabled + @inline final def areHotStatisticsEnabled = (StatisticsStatics.HOT_STATS_GETTER.invokeExact(): Boolean) && settings.YhotStatisticsEnabled + @inline final def isDebug: Boolean = (StatisticsStatics.DEBUG_GETTER.invokeExact(): Boolean) && settings.debug + @inline final def isDeveloper: Boolean = (StatisticsStatics.DEVELOPER_GETTER.invokeExact(): Boolean) && settings.developer } } diff --git a/src/reflect/scala/reflect/internal/tpe/FindMembers.scala b/src/reflect/scala/reflect/internal/tpe/FindMembers.scala index fa2ba469c276..7cc3f799430a 100644 --- a/src/reflect/scala/reflect/internal/tpe/FindMembers.scala +++ b/src/reflect/scala/reflect/internal/tpe/FindMembers.scala @@ -13,7 +13,7 @@ package scala.reflect.internal package tpe -import util.{ReusableInstance, StatisticsStatics} +import util.ReusableInstance import Flags._ import scala.runtime.Statics.releaseFence @@ -51,10 +51,10 @@ trait FindMembers { // Main entry point def apply(): T = { - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(findMemberCount) - val start = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.pushTimer(typeOpsStack, findMemberNanos) else null + if (settings.areStatisticsEnabled) statistics.incCounter(findMemberCount) + val start = if (settings.areStatisticsEnabled) statistics.pushTimer(typeOpsStack, findMemberNanos) else null try searchConcreteThenDeferred - finally if (StatisticsStatics.areSomeColdStatsEnabled) statistics.popTimer(typeOpsStack, start) + finally if (settings.areStatisticsEnabled) statistics.popTimer(typeOpsStack, start) } protected def result: T @@ -316,11 +316,11 @@ trait FindMembers { // Assemble the result from the hand-rolled ListBuffer protected def result: Symbol = if (members eq null) { if (member0 == NoSymbol) { - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(noMemberCount) + if (settings.areStatisticsEnabled) statistics.incCounter(noMemberCount) NoSymbol } else member0 } else { - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(multMemberCount) + if (settings.areStatisticsEnabled) statistics.incCounter(multMemberCount) lastM.next = Nil releaseFence() initBaseClasses.head.newOverloaded(tpe, members) diff --git a/src/reflect/scala/reflect/internal/tpe/GlbLubs.scala b/src/reflect/scala/reflect/internal/tpe/GlbLubs.scala index e5c982ce33af..ffb24459fce0 100644 --- a/src/reflect/scala/reflect/internal/tpe/GlbLubs.scala +++ b/src/reflect/scala/reflect/internal/tpe/GlbLubs.scala @@ -17,7 +17,6 @@ package tpe import scala.collection.mutable import scala.annotation.tailrec -import scala.reflect.internal.util.StatisticsStatics import Variance._ private[internal] trait GlbLubs { @@ -278,8 +277,8 @@ private[internal] trait GlbLubs { case Nil => NothingTpe case t :: Nil => t case _ => - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(lubCount) - val start = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.pushTimer(typeOpsStack, lubNanos) else null + if (settings.areStatisticsEnabled) statistics.incCounter(lubCount) + val start = if (settings.areStatisticsEnabled) statistics.pushTimer(typeOpsStack, lubNanos) else null try { val res = lub(ts, lubDepth(ts)) // If the number of unapplied type parameters in all incoming @@ -297,7 +296,7 @@ private[internal] trait GlbLubs { finally { lubResults.clear() glbResults.clear() - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.popTimer(typeOpsStack, start) + if (settings.areStatisticsEnabled) statistics.popTimer(typeOpsStack, start) } } @@ -420,7 +419,7 @@ private[internal] trait GlbLubs { indent = indent + " " assert(indent.length <= 100, "LUB is highly indented") } - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(nestedLubCount) + if (settings.areStatisticsEnabled) statistics.incCounter(nestedLubCount) val res = lub0(ts) if (printLubs) { indent = indent stripSuffix " " @@ -445,14 +444,14 @@ private[internal] trait GlbLubs { case List() => AnyTpe case List(t) => t case ts0 => - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(lubCount) - val start = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.pushTimer(typeOpsStack, lubNanos) else null + if (settings.areStatisticsEnabled) statistics.incCounter(lubCount) + val start = if (settings.areStatisticsEnabled) statistics.pushTimer(typeOpsStack, lubNanos) else null try { glbNorm(ts0, lubDepth(ts0)) } finally { lubResults.clear() glbResults.clear() - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.popTimer(typeOpsStack, start) + if (settings.areStatisticsEnabled) statistics.popTimer(typeOpsStack, start) } } @@ -575,7 +574,7 @@ private[internal] trait GlbLubs { } } // if (settings.debug.value) { println(indent + "glb of " + ts + " at depth "+depth); indent = indent + " " } //DEBUG - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(nestedLubCount) + if (settings.areStatisticsEnabled) statistics.incCounter(nestedLubCount) glb0(ts) // if (settings.debug.value) { indent = indent.substring(0, indent.length() - 2); log(indent + "glb of " + ts + " is " + res) }//DEBUG } diff --git a/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala b/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala index f919d1ea18e6..77276fbbfa50 100644 --- a/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala +++ b/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala @@ -18,7 +18,6 @@ package tpe import scala.collection.mutable import util.TriState import scala.annotation.tailrec -import scala.reflect.internal.util.StatisticsStatics trait TypeComparers { self: SymbolTable => @@ -104,7 +103,7 @@ trait TypeComparers { /** Do `tp1` and `tp2` denote equivalent types? */ def isSameType(tp1: Type, tp2: Type): Boolean = try { - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(sametypeCount) + if (settings.areStatisticsEnabled) statistics.incCounter(sametypeCount) subsametypeRecursions += 1 //OPT cutdown on Function0 allocation //was: diff --git a/src/reflect/scala/reflect/internal/util/StatisticsStatics.java b/src/reflect/scala/reflect/internal/util/StatisticsStatics.java index 1143a64268a7..76c1644e18bf 100644 --- a/src/reflect/scala/reflect/internal/util/StatisticsStatics.java +++ b/src/reflect/scala/reflect/internal/util/StatisticsStatics.java @@ -26,15 +26,10 @@ public final class StatisticsStatics { private static final AlmostFinalValue DEBUG = new AlmostFinalValue(); private static final AlmostFinalValue DEVELOPER = new AlmostFinalValue(); - private static final MethodHandle COLD_STATS_GETTER = COLD_STATS.invoker; - private static final MethodHandle HOT_STATS_GETTER = HOT_STATS.invoker; - private static final MethodHandle DEBUG_GETTER = DEBUG.invoker; - private static final MethodHandle DEVELOPER_GETTER = DEVELOPER.invoker; - - public static boolean areSomeColdStatsEnabled() throws Throwable { return (boolean) COLD_STATS_GETTER.invokeExact(); } - public static boolean areSomeHotStatsEnabled() throws Throwable { return (boolean) HOT_STATS_GETTER.invokeExact(); } - public static boolean isDebug() throws Throwable { return (boolean) DEBUG_GETTER.invokeExact(); } - public static boolean isDeveloper() throws Throwable { return (boolean) DEVELOPER_GETTER.invokeExact(); } + public static final MethodHandle COLD_STATS_GETTER = COLD_STATS.invoker; + public static final MethodHandle HOT_STATS_GETTER = HOT_STATS.invoker; + public static final MethodHandle DEBUG_GETTER = DEBUG.invoker; + public static final MethodHandle DEVELOPER_GETTER = DEVELOPER.invoker; public static void enableColdStatsAndDeoptimize() { COLD_STATS.toggleOnAndDeoptimize(); } public static void enableHotStatsAndDeoptimize() { HOT_STATS.toggleOnAndDeoptimize(); } diff --git a/src/reflect/scala/reflect/io/AbstractFile.scala b/src/reflect/scala/reflect/io/AbstractFile.scala index d97e6d23e5ec..72736bfb2f26 100644 --- a/src/reflect/scala/reflect/io/AbstractFile.scala +++ b/src/reflect/scala/reflect/io/AbstractFile.scala @@ -121,7 +121,7 @@ abstract class AbstractFile extends AbstractIterable[AbstractFile] { /** Does this abstract file denote an existing file? */ def exists: Boolean = { - //if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(IOStats.fileExistsCount) + //if (settings.areStatisticsEnabled) statistics.incCounter(IOStats.fileExistsCount) (file eq null) || file.exists } diff --git a/src/reflect/scala/reflect/io/Path.scala b/src/reflect/scala/reflect/io/Path.scala index 26bef55f5796..361805ba8955 100644 --- a/src/reflect/scala/reflect/io/Path.scala +++ b/src/reflect/scala/reflect/io/Path.scala @@ -64,12 +64,12 @@ object Path { def apply(path: String): Path = apply(new JFile(path)) def apply(jfile: JFile): Path = try { def isFile = { - //if (StatisticsStatics.areSomeColdStatsEnabled) Statistics.incCounter(IOStats.fileIsFileCount) + //if (settings.areStatisticsEnabled) Statistics.incCounter(IOStats.fileIsFileCount) jfile.isFile } def isDirectory = { - //if (StatisticsStatics.areSomeColdStatsEnabled) Statistics.incCounter(IOStats.fileIsDirectoryCount) + //if (settings.areStatisticsEnabled) Statistics.incCounter(IOStats.fileIsDirectoryCount) jfile.isDirectory } @@ -206,16 +206,16 @@ class Path private[io] (val jfile: JFile) { def canRead = jfile.canRead() def canWrite = jfile.canWrite() def exists = { - //if (StatisticsStatics.areSomeColdStatsEnabled) Statistics.incCounter(IOStats.fileExistsCount) + //if (settings.areStatisticsEnabled) Statistics.incCounter(IOStats.fileExistsCount) try jfile.exists() catch { case ex: SecurityException => false } } def isFile = { - //if (StatisticsStatics.areSomeColdStatsEnabled) Statistics.incCounter(IOStats.fileIsFileCount) + //if (settings.areStatisticsEnabled) Statistics.incCounter(IOStats.fileIsFileCount) try jfile.isFile() catch { case ex: SecurityException => false } } def isDirectory = { - //if (StatisticsStatics.areSomeColdStatsEnabled) Statistics.incCounter(IOStats.fileIsDirectoryCount) + //if (settings.areStatisticsEnabled) Statistics.incCounter(IOStats.fileIsDirectoryCount) try jfile.isDirectory() catch { case ex: SecurityException => jfile.getPath == "." } } def isAbsolute = jfile.isAbsolute() From f5e2a98a95783b5d3088edf88520a618fc65fca0 Mon Sep 17 00:00:00 2001 From: Guillaume Martres Date: Fri, 30 Apr 2021 11:42:47 +0200 Subject: [PATCH 133/769] Support symbol literals under -Xsource:3 Scala 3 still supports symbol literals even if they require a language import now (cf https://github.com/lampepfl/dotty/pull/11588), so don't emit an error if we find one under -Xsource:3 as that could unnecessarily impede cross-compilation as discovered in https://github.com/scala/scala-dev/issues/769. --- src/compiler/scala/tools/nsc/ast/parser/Parsers.scala | 3 +-- test/files/neg/symbol-literal-removal.check | 4 ---- test/files/neg/symbol-literal-removal.scala | 5 ----- 3 files changed, 1 insertion(+), 11 deletions(-) delete mode 100644 test/files/neg/symbol-literal-removal.check delete mode 100644 test/files/neg/symbol-literal-removal.scala diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala index 42767df41f78..9f4c7464d2bb 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala @@ -1421,8 +1421,7 @@ self => else if (in.token == SYMBOLLIT) { def msg(what: String) = s"""symbol literal is $what; use Symbol("${in.strVal}") instead""" - if (settings.isScala3) syntaxError(in.offset, msg("unsupported")) - else deprecationWarning(in.offset, msg("deprecated"), "2.13.0") + deprecationWarning(in.offset, msg("deprecated"), "2.13.0") Apply(scalaDot(nme.Symbol), List(finish(in.strVal))) } else finish(in.token match { diff --git a/test/files/neg/symbol-literal-removal.check b/test/files/neg/symbol-literal-removal.check deleted file mode 100644 index 839b635950fd..000000000000 --- a/test/files/neg/symbol-literal-removal.check +++ /dev/null @@ -1,4 +0,0 @@ -symbol-literal-removal.scala:4: error: symbol literal is unsupported; use Symbol("TestSymbol") instead - val foo = 'TestSymbol - ^ -1 error diff --git a/test/files/neg/symbol-literal-removal.scala b/test/files/neg/symbol-literal-removal.scala deleted file mode 100644 index 0d95ded21fd1..000000000000 --- a/test/files/neg/symbol-literal-removal.scala +++ /dev/null @@ -1,5 +0,0 @@ -// scalac: -Xsource:3 -// -abstract class Foo { - val foo = 'TestSymbol -} From ea314f2895e9553d715664e87d1056549c2a3543 Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Fri, 30 Apr 2021 13:44:58 -0700 Subject: [PATCH 134/769] Avoid attempt to load plugin from empty path Noticed at scala.tools.nsc.GlobalCustomizeClassloaderTest.test which would print a complaint which went unheeded. There is a code comment to ignore dirs with no plugins, but this case is where there are no dirs. --- .../scala/tools/nsc/plugins/Plugin.scala | 18 ++++++------------ 1 file changed, 6 insertions(+), 12 deletions(-) diff --git a/src/compiler/scala/tools/nsc/plugins/Plugin.scala b/src/compiler/scala/tools/nsc/plugins/Plugin.scala index 888c707a7c25..39edb8100815 100644 --- a/src/compiler/scala/tools/nsc/plugins/Plugin.scala +++ b/src/compiler/scala/tools/nsc/plugins/Plugin.scala @@ -142,19 +142,14 @@ object Plugin { ignoring: List[String], findPluginClassloader: (Seq[Path] => ClassLoader)): List[Try[AnyClass]] = { - def targeted(targets: List[List[Path]]) = targets.map { path => - val loader = findPluginClassloader(path) + def pluginResource(classpath: List[Path], loader: ClassLoader) = loader.getResource(PluginXML) match { - case null => Failure(new MissingPluginException(path)) + case null => Failure(new MissingPluginException(classpath)) case url => val inputStream = url.openStream - try { - Try((PluginDescription.fromXML(inputStream), loader)) - } finally { - inputStream.close() - } + try Try((PluginDescription.fromXML(inputStream), loader)) finally inputStream.close() } - } + def targeted(targets: List[List[Path]]) = targets.filter(_.nonEmpty).map(classpath => pluginResource(classpath, findPluginClassloader(classpath))) def dirList(dir: Path) = if (dir.isDirectory) dir.toDirectory.files.filter(Jar.isJarOrZip).toList.sortBy(_.name) else Nil // ask plugin loaders for plugin resources, but ignore if none in -Xpluginsdir @@ -179,9 +174,8 @@ object Plugin { /** Instantiate a plugin class, given the class and * the compiler it is to be used in. */ - def instantiate(clazz: AnyClass, global: Global): Plugin = { - (clazz getConstructor classOf[Global] newInstance global).asInstanceOf[Plugin] - } + def instantiate(clazz: AnyClass, global: Global): Plugin = + clazz.getConstructor(classOf[Global]).newInstance(global).asInstanceOf[Plugin] } class PluginLoadException(val path: String, message: String, cause: Exception) extends Exception(message, cause) { From 433084186ea625ee2192734a18728c0dbd87f279 Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Fri, 30 Apr 2021 23:09:43 -0700 Subject: [PATCH 135/769] Rectify test code --- build.sbt | 5 +- test/scalacheck/CheckEither.scala | 46 +++++++++++-------- test/scalacheck/Ctrie.scala | 7 ++- test/scalacheck/concurrent-map.scala | 3 +- test/scalacheck/range.scala | 6 +-- test/scalacheck/redblacktree.scala | 2 +- test/scalacheck/scala/ArrayTest.scala | 4 +- .../scala/collection/IndexOfSliceTest.scala | 1 + .../scala/collection/IteratorProperties.scala | 12 ++--- .../scala/collection/StringOpsProps.scala | 2 +- .../ImmutableChampHashMapProperties.scala | 4 +- .../ImmutableChampHashSetProperties.scala | 10 ++-- .../collection/immutable/SeqProperties.scala | 1 + .../collection/immutable/SetProperties.scala | 5 +- .../collection/mutable/MapProperties.scala | 3 +- .../collection/mutable/RedBlackTree.scala | 2 +- .../quasiquotes/ArbitraryTreesAndNames.scala | 1 + .../DefinitionConstructionProps.scala | 2 + .../quasiquotes/DeprecationProps.scala | 1 + .../reflect/quasiquotes/UnliftableProps.scala | 6 ++- test/scalacheck/t2460.scala | 4 +- test/scalacheck/treemap.scala | 16 +++---- test/scalacheck/treeset.scala | 8 ++-- 23 files changed, 87 insertions(+), 64 deletions(-) diff --git a/build.sbt b/build.sbt index 4879f841d428..d89a4e98dd3c 100644 --- a/build.sbt +++ b/build.sbt @@ -157,6 +157,7 @@ lazy val commonSettings = instanceSettings ++ clearSourceAndResourceDirectories // we don't want optimizer warnings to interfere with `-Werror`. we have hundreds of such warnings // when the optimizer is enabled (as it is in CI and release builds, though not in local development) Compile / scalacOptions += "-Wconf:cat=optimizer:is", + Compile / scalacOptions ++= Seq("-deprecation", "-feature"), Compile / doc / scalacOptions ++= Seq( "-doc-footer", "epfl", "-diagrams", @@ -743,6 +744,7 @@ lazy val tasty = project.in(file("test") / "tasty") lazy val scalacheck = project.in(file("test") / "scalacheck") .dependsOn(library, reflect, compiler, scaladoc) .settings(commonSettings) + .settings(fatalWarningsSettings) .settings(disableDocs) .settings(publish / skip := true) .settings( @@ -755,7 +757,7 @@ lazy val scalacheck = project.in(file("test") / "scalacheck") // Full stack trace on failure: "-verbosity", "2" ), - libraryDependencies ++= Seq(scalacheckDep), + libraryDependencies ++= Seq(scalacheckDep, junitDep), Compile / unmanagedSourceDirectories := Nil, Test / unmanagedSourceDirectories := List(baseDirectory.value) ) @@ -813,6 +815,7 @@ def osgiTestProject(p: Project, framework: ModuleID) = p lazy val partestJavaAgent = Project("partestJavaAgent", file(".") / "src" / "partest-javaagent") .settings(commonSettings) + .settings(fatalWarningsSettings) .settings(generatePropertiesFileSettings) .settings(disableDocs) .settings( diff --git a/test/scalacheck/CheckEither.scala b/test/scalacheck/CheckEither.scala index cf6b2e2f8558..c650cee4ade3 100644 --- a/test/scalacheck/CheckEither.scala +++ b/test/scalacheck/CheckEither.scala @@ -4,8 +4,16 @@ import org.scalacheck.Gen.oneOf import org.scalacheck.Prop._ import org.scalacheck.Test.check import Function.tupled +import scala.util.Either.LeftProjection +@annotation.nowarn("cat=deprecation") object CheckEitherTest extends Properties("Either") { + implicit class Failing[A, B](val e: Either[A, B]) { + def orFail = e.getOrElse(???) + } + implicit class FailingLeft[A, B](val e: LeftProjection[A, B]) { + def orFail = e.getOrElse(???) + } implicit def arbitraryEither[X, Y](implicit xa: Arbitrary[X], ya: Arbitrary[Y]): Arbitrary[Either[X, Y]] = Arbitrary[Either[X, Y]](oneOf(arbitrary[X].map(Left(_)), arbitrary[Y].map(Right(_)))) @@ -14,14 +22,14 @@ object CheckEitherTest extends Properties("Either") { val prop_either2 = forAll((n: Int) => Right(n).fold(a => sys.error("fail"), x => x) == n) val prop_swap = forAll((e: Either[Int, Int]) => e match { - case Left(a) => e.swap.right.get == a - case Right(b) => e.swap.left.get == b + case Left(a) => e.swap.orFail == a + case Right(b) => e.swap.left.orFail == b }) val prop_isLeftRight = forAll((e: Either[Int, Int]) => e.isLeft != e.isRight) object CheckLeftProjection { - val prop_value = forAll((n: Int) => Left(n).left.get == n) + val prop_value = forAll((n: Int) => Left(n).left.orFail == n) val prop_getOrElse = forAll((e: Either[Int, Int], or: Int) => e.left.getOrElse(or) == (e match { case Left(a) => a @@ -29,10 +37,10 @@ object CheckEitherTest extends Properties("Either") { })) val prop_forall = forAll((e: Either[Int, Int]) => - e.left.forall(_ % 2 == 0) == (e.isRight || e.left.get % 2 == 0)) + e.left.forall(_ % 2 == 0) == (e.isRight || e.left.orFail % 2 == 0)) val prop_exists = forAll((e: Either[Int, Int]) => - e.left.exists(_ % 2 == 0) == (e.isLeft && e.left.get % 2 == 0)) + e.left.exists(_ % 2 == 0) == (e.isLeft && e.left.orFail % 2 == 0)) val prop_flatMapLeftIdentity = forAll((e: Either[Int, Int], n: Int, s: String) => { def f(x: Int) = if(x % 2 == 0) Left(s) else Right(s) @@ -53,7 +61,7 @@ object CheckEitherTest extends Properties("Either") { e.left.map(x => f(g(x))) == e.left.map(x => g(x)).left.map(f(_))}) val prop_filterToOption = forAll((e: Either[Int, Int], x: Int) => e.left.filterToOption(_ % 2 == 0) == - (if(e.isRight || e.left.get % 2 != 0) None else Some(e))) + (if(e.isRight || e.left.orFail % 2 != 0) None else Some(e))) val prop_seq = forAll((e: Either[Int, Int]) => e.left.toSeq == (e match { case Left(a) => Seq(a) @@ -67,46 +75,46 @@ object CheckEitherTest extends Properties("Either") { } object CheckRightProjection { - val prop_value = forAll((n: Int) => Right(n).right.get == n) + val prop_value = forAll((n: Int) => Right(n).orFail == n) - val prop_getOrElse = forAll((e: Either[Int, Int], or: Int) => e.right.getOrElse(or) == (e match { + val prop_getOrElse = forAll((e: Either[Int, Int], or: Int) => e.getOrElse(or) == (e match { case Left(_) => or case Right(b) => b })) val prop_forall = forAll((e: Either[Int, Int]) => - e.right.forall(_ % 2 == 0) == (e.isLeft || e.right.get % 2 == 0)) + e.forall(_ % 2 == 0) == (e.isLeft || e.orFail % 2 == 0)) val prop_exists = forAll((e: Either[Int, Int]) => - e.right.exists(_ % 2 == 0) == (e.isRight && e.right.get % 2 == 0)) + e.exists(_ % 2 == 0) == (e.isRight && e.orFail % 2 == 0)) val prop_flatMapLeftIdentity = forAll((e: Either[Int, Int], n: Int, s: String) => { def f(x: Int) = if(x % 2 == 0) Left(s) else Right(s) - Right(n).right.flatMap(f(_)) == f(n)}) + Right(n).flatMap(f(_)) == f(n)}) - val prop_flatMapRightIdentity = forAll((e: Either[Int, Int]) => e.right.flatMap(Right(_)) == e) + val prop_flatMapRightIdentity = forAll((e: Either[Int, Int]) => e.flatMap(Right(_)) == e) val prop_flatMapComposition = forAll((e: Either[Int, Int]) => { def f(x: Int) = if(x % 2 == 0) Left(x) else Right(x) def g(x: Int) = if(x % 7 == 0) Right(x) else Left(x) - e.right.flatMap(f(_)).right.flatMap(g(_)) == e.right.flatMap(f(_).right.flatMap(g(_)))}) + e.flatMap(f(_)).flatMap(g(_)) == e.flatMap(f(_).flatMap(g(_)))}) - val prop_mapIdentity = forAll((e: Either[Int, Int]) => e.right.map(x => x) == e) + val prop_mapIdentity = forAll((e: Either[Int, Int]) => e.map(x => x) == e) val prop_mapComposition = forAll((e: Either[Int, String]) => { def f(s: String) = s.toLowerCase def g(s: String) = s.reverse - e.right.map(x => f(g(x))) == e.right.map(x => g(x)).right.map(f(_))}) + e.map(x => f(g(x))) == e.map(x => g(x)).map(f(_))}) val prop_filterToOption = forAll((e: Either[Int, Int], x: Int) => e.right.filterToOption(_ % 2 == 0) == - (if(e.isLeft || e.right.get % 2 != 0) None else Some(e))) + (if(e.isLeft || e.orFail % 2 != 0) None else Some(e))) - val prop_seq = forAll((e: Either[Int, Int]) => e.right.toSeq == (e match { + val prop_seq = forAll((e: Either[Int, Int]) => e.toSeq == (e match { case Left(_) => Seq.empty case Right(b) => Seq(b) })) - val prop_option = forAll((e: Either[Int, Int]) => e.right.toOption == (e match { + val prop_option = forAll((e: Either[Int, Int]) => e.toOption == (e match { case Left(_) => None case Right(b) => Some(b) })) @@ -114,7 +122,7 @@ object CheckEitherTest extends Properties("Either") { val prop_Either_left = forAll((n: Int) => Left(n).left.get == n) - val prop_Either_right = forAll((n: Int) => Right(n).right.get == n) + val prop_Either_right = forAll((n: Int) => Right(n).orFail == n) val prop_Either_joinLeft = forAll((e: Either[Either[Int, Int], Int]) => e match { case Left(ee) => e.joinLeft == ee diff --git a/test/scalacheck/Ctrie.scala b/test/scalacheck/Ctrie.scala index 6101105f06fc..9c120c552566 100644 --- a/test/scalacheck/Ctrie.scala +++ b/test/scalacheck/Ctrie.scala @@ -3,8 +3,7 @@ import Prop._ import org.scalacheck.Gen._ import collection._ import collection.concurrent.TrieMap - - +import scala.language.reflectiveCalls case class Wrap(i: Int) { override def hashCode = i // * 0x9e3775cd @@ -192,8 +191,8 @@ object CtrieTest extends Properties("concurrent.TrieMap") { idx => (0 until sz) foreach { i => - val v = ct.getOrElseUpdate(Wrap(i), idx + ":" + i) - if (v == idx + ":" + i) totalInserts.incrementAndGet() + val v = ct.getOrElseUpdate(Wrap(i), s"$idx:$i") + if (v == s"$idx:$i") totalInserts.incrementAndGet() } } diff --git a/test/scalacheck/concurrent-map.scala b/test/scalacheck/concurrent-map.scala index 75082e8bd09c..f3c529922269 100644 --- a/test/scalacheck/concurrent-map.scala +++ b/test/scalacheck/concurrent-map.scala @@ -1,6 +1,6 @@ import java.util.concurrent._ import scala.collection._ -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.scalacheck._ import org.scalacheck.Prop._ import org.scalacheck.Gen._ @@ -26,6 +26,7 @@ object ConcurrentMapTest extends Properties("concurrent.TrieMap") { /* helpers */ def inParallel[T](totalThreads: Int)(body: Int => T): Seq[T] = { + import scala.language.reflectiveCalls val threads = for (idx <- 0 until totalThreads) yield new Thread { setName("ParThread-" + idx) private var res: T = _ diff --git a/test/scalacheck/range.scala b/test/scalacheck/range.scala index 3344d3be6315..f06606b59fbc 100644 --- a/test/scalacheck/range.scala +++ b/test/scalacheck/range.scala @@ -43,9 +43,9 @@ abstract class RangeTest(kind: String) extends Properties("Range "+kind) { size <- choose(1, 100) step <- choose(1, 101) } yield { - val signum = if (boundary == 0) 1 else boundary.signum - if (isStart) Range(boundary, boundary - size * boundary.signum, - step * signum) - else Range(boundary - size * boundary.signum, boundary, step * signum) + val signum = if (boundary == 0) 1 else boundary.sign + if (isStart) Range(boundary, boundary - size * boundary.sign, - step * signum) + else Range(boundary - size * boundary.sign, boundary, step * signum) } diff --git a/test/scalacheck/redblacktree.scala b/test/scalacheck/redblacktree.scala index 3d4cfdd145a4..02c7597548b3 100644 --- a/test/scalacheck/redblacktree.scala +++ b/test/scalacheck/redblacktree.scala @@ -24,7 +24,7 @@ abstract class RedBlackTreeTest(tname: String) extends Properties(tname) with Re import RB._ def nodeAt[A](tree: Tree[String, A], n: Int): Option[(String, A)] = if (n < iterator(tree).size && n >= 0) - Some(iterator(tree).drop(n).next) + Some(iterator(tree).drop(n).next()) else None diff --git a/test/scalacheck/scala/ArrayTest.scala b/test/scalacheck/scala/ArrayTest.scala index e08c77e3e8af..a51562d5d88e 100644 --- a/test/scalacheck/scala/ArrayTest.scala +++ b/test/scalacheck/scala/ArrayTest.scala @@ -25,9 +25,9 @@ object ArrayTest extends Properties("Array") { property("fill") = forAll( Gen.choose(-10, 100), ) { len => - val xs = Vector.fill(len)(Random.nextInt) + val xs = Vector.fill(len)(Random.nextInt()) val i = xs.iterator - Array.fill(len)(i.next).toVector == xs + Array.fill(len)(i.next()).toVector == xs } property("tabulate") = forAll( diff --git a/test/scalacheck/scala/collection/IndexOfSliceTest.scala b/test/scalacheck/scala/collection/IndexOfSliceTest.scala index 3853139d340f..50c11d5cea25 100644 --- a/test/scalacheck/scala/collection/IndexOfSliceTest.scala +++ b/test/scalacheck/scala/collection/IndexOfSliceTest.scala @@ -8,6 +8,7 @@ object IndexOfSliceTest extends Properties("indexOfSlice") { // The default arbitrary[Seq[Int]] picks only one Seq implementation. // Here we explicitly list all the implementations we want to test + @annotation.nowarn("msg=type WrappedArray") val genDifferentSeqs = Gen.oneOf[Seq[Int]]( Arbitrary.arbitrary[collection.immutable.List[Int]], diff --git a/test/scalacheck/scala/collection/IteratorProperties.scala b/test/scalacheck/scala/collection/IteratorProperties.scala index 820cbaa11748..d20e24c33b7d 100644 --- a/test/scalacheck/scala/collection/IteratorProperties.scala +++ b/test/scalacheck/scala/collection/IteratorProperties.scala @@ -34,12 +34,12 @@ object IteratorProperties extends Properties("Iterator") { val indexed = s.toIndexedSeq // IndexedSeqs and their Iterators have a knownSize val simple = new SimpleIterable(s) // SimpleIterable and its Iterator don't val stream = LazyList.from(s) // Lazy - val indexed1 = f(indexed, n).toSeq - val indexed2 = f(indexed.iterator, n).toSeq - val simple1 = f(simple, n).toSeq - val simple2 = f(simple.iterator, n).toSeq - val stream1 = f(stream, n).toSeq - val stream2 = f(stream.iterator, n).toSeq + val indexed1 = f(indexed, n).iterator.to(Seq) + val indexed2 = f(indexed.iterator, n).iterator.to(Seq) + val simple1 = f(simple, n).iterator.to(Seq) + val simple2 = f(simple.iterator, n).iterator.to(Seq) + val stream1 = f(stream, n).iterator.to(Seq) + val stream2 = f(stream.iterator, n).iterator.to(Seq) (indexed1 == indexed2) :| s"indexed: $indexed1 != $indexed2" && (simple1 == simple2) :| s"simple: $simple1 != $simple2" && (stream1 == stream2) :| s"stream: $stream1 != $stream2" && diff --git a/test/scalacheck/scala/collection/StringOpsProps.scala b/test/scalacheck/scala/collection/StringOpsProps.scala index b902512e39e7..bdade1547a72 100644 --- a/test/scalacheck/scala/collection/StringOpsProps.scala +++ b/test/scalacheck/scala/collection/StringOpsProps.scala @@ -6,7 +6,7 @@ import java.io.{BufferedReader, StringReader} import org.scalacheck.{Gen, Properties}, Gen.{oneOf, listOf} import org.scalacheck.Prop._ -import JavaConverters._ +import scala.jdk.CollectionConverters._ object StringOpsTest extends Properties("StringOps") { diff --git a/test/scalacheck/scala/collection/immutable/ImmutableChampHashMapProperties.scala b/test/scalacheck/scala/collection/immutable/ImmutableChampHashMapProperties.scala index fa41faa4b724..2a61d5fe0382 100644 --- a/test/scalacheck/scala/collection/immutable/ImmutableChampHashMapProperties.scala +++ b/test/scalacheck/scala/collection/immutable/ImmutableChampHashMapProperties.scala @@ -33,7 +33,7 @@ object ImmutableChampHashMapProperties extends Properties("HashMap") { val builder = HashMap.newBuilder[K, V] inputMap.foreach(builder.addOne) - val duplicateMap = builder.result + val duplicateMap = builder.result() inputMap == duplicateMap } @@ -72,7 +72,7 @@ object ImmutableChampHashMapProperties extends Properties("HashMap") { property("adding elems twice to builder is the same as adding them once") = forAll { seq: Seq[(K, V)] => val b = HashMap.newBuilder[K, V].addAll(seq) - b.result == b.addAll(seq).result() + b.result() == b.addAll(seq).result() } property("(xs ++ ys).toMap == xs.toMap ++ ys.toMap") = forAll { (xs: Seq[(K, V)],ys: Seq[(K, V)]) => diff --git a/test/scalacheck/scala/collection/immutable/ImmutableChampHashSetProperties.scala b/test/scalacheck/scala/collection/immutable/ImmutableChampHashSetProperties.scala index 62ea4d75257b..7331f78c64b3 100644 --- a/test/scalacheck/scala/collection/immutable/ImmutableChampHashSetProperties.scala +++ b/test/scalacheck/scala/collection/immutable/ImmutableChampHashSetProperties.scala @@ -38,7 +38,7 @@ object ImmutableChampHashSetProperties extends Properties("immutable.HashSet") { val builder = HashSet.newBuilder[K] inputSet.foreach(builder.addOne) - val duplicateSet = builder.result + val duplicateSet = builder.result() inputSet == duplicateSet } @@ -64,7 +64,7 @@ object ImmutableChampHashSetProperties extends Properties("immutable.HashSet") { val builder = HashSet.newBuilder[K] inputShared.foreach(builder.addOne) - val duplicateSet = builder.result + val duplicateSet = builder.result() inputShared == inputShared.intersect(duplicateSet) } @@ -121,7 +121,7 @@ object ImmutableChampHashSetProperties extends Properties("immutable.HashSet") { val builder = HashSet.newBuilder[K] inputShared.foreach(builder.addOne) - val duplicateSet = builder.result + val duplicateSet = builder.result() inputShared == inputShared.union(duplicateSet) } @@ -166,7 +166,7 @@ object ImmutableChampHashSetProperties extends Properties("immutable.HashSet") { val builder = HashSet.newBuilder[K] inputShared.foreach(builder.addOne) - val duplicateSet = builder.result + val duplicateSet = builder.result() HashSet.empty[K] == inputShared.diff(duplicateSet) } @@ -240,7 +240,7 @@ object ImmutableChampHashSetProperties extends Properties("immutable.HashSet") { } property("adding elems twice to builder is the same as adding them once") = forAll { seq: Seq[K] => val b = HashSet.newBuilder[K].addAll(seq) - b.result == b.addAll(seq).result() + b.result() == b.addAll(seq).result() } property("(xs ++ ys).toSet == xs.toSet ++ ys.toSet") = forAll { (xs: Seq[K],ys: Seq[K]) => (xs ++ ys).toSet =? xs.toSet ++ ys.toSet diff --git a/test/scalacheck/scala/collection/immutable/SeqProperties.scala b/test/scalacheck/scala/collection/immutable/SeqProperties.scala index 1086506da5ee..0cd7ecbcbb4f 100644 --- a/test/scalacheck/scala/collection/immutable/SeqProperties.scala +++ b/test/scalacheck/scala/collection/immutable/SeqProperties.scala @@ -11,6 +11,7 @@ import scala.util.{Success, Try} import org.scalacheck.Properties +@annotation.nowarn("cat=deprecation&msg=Stream") object SeqProperties extends Properties("immutable.Seq builder implementations"){ type A = Int diff --git a/test/scalacheck/scala/collection/immutable/SetProperties.scala b/test/scalacheck/scala/collection/immutable/SetProperties.scala index f100b7292f48..f34a303cc164 100644 --- a/test/scalacheck/scala/collection/immutable/SetProperties.scala +++ b/test/scalacheck/scala/collection/immutable/SetProperties.scala @@ -7,8 +7,8 @@ import org.scalacheck.commands.Commands import scala.collection.mutable import scala.util.{Success, Try} - -object SetProperties extends Properties("immutable.Set builder implementations"){ +@annotation.nowarn("cat=deprecation&msg=Stream") +object SetProperties extends Properties("immutable.Set builder implementations") { type A = Int @@ -60,6 +60,7 @@ class SetBuilderStateProperties[A, To <: Set[A]](newBuilder: => mutable.Builder[ override def genCommand(state: State): Gen[Command] = _genCommand + @annotation.nowarn("cat=deprecation&msg=Stream") override def shrinkState = Shrink.apply[State]( set => set.to(Stream).map(set - _) ) case object Clear extends UnitCommand { diff --git a/test/scalacheck/scala/collection/mutable/MapProperties.scala b/test/scalacheck/scala/collection/mutable/MapProperties.scala index a77365d5a244..22394a1931c5 100644 --- a/test/scalacheck/scala/collection/mutable/MapProperties.scala +++ b/test/scalacheck/scala/collection/mutable/MapProperties.scala @@ -33,6 +33,7 @@ object MapProperties extends Properties("mutable.Map") { override def addOne(elem: (K, V)): this.type = { _elems += elem; this } } + @annotation.nowarn("cat=deprecation&msg=ListMap") implicit val arbMap: Arbitrary[Map[K, V]] = Arbitrary { for { @@ -52,4 +53,4 @@ object MapProperties extends Properties("mutable.Map") { map.filterInPlace(p) (map: collection.Map[K, V]) ?= expected } -} \ No newline at end of file +} diff --git a/test/scalacheck/scala/collection/mutable/RedBlackTree.scala b/test/scalacheck/scala/collection/mutable/RedBlackTree.scala index a6613309bc7a..c643a3d4c104 100644 --- a/test/scalacheck/scala/collection/mutable/RedBlackTree.scala +++ b/test/scalacheck/scala/collection/mutable/RedBlackTree.scala @@ -24,7 +24,7 @@ abstract class RedBlackTreeTest(tname: String) extends Properties(tname) with Re import RB._ def nodeAt[A](tree: Tree[String, A], n: Int): Option[(String, A)] = if (n < iterator(tree).size && n >= 0) - Some(iterator(tree).drop(n).next) + Some(iterator(tree).drop(n).next()) else None diff --git a/test/scalacheck/scala/reflect/quasiquotes/ArbitraryTreesAndNames.scala b/test/scalacheck/scala/reflect/quasiquotes/ArbitraryTreesAndNames.scala index 0541405f1c7d..19032a2d0fb7 100644 --- a/test/scalacheck/scala/reflect/quasiquotes/ArbitraryTreesAndNames.scala +++ b/test/scalacheck/scala/reflect/quasiquotes/ArbitraryTreesAndNames.scala @@ -1,6 +1,7 @@ package scala.reflect.quasiquotes import org.scalacheck._, Prop._, Gen._, Arbitrary._ +import scala.language.implicitConversions import scala.reflect.runtime.universe._, internal._, Flag._ trait ArbitraryTreesAndNames { diff --git a/test/scalacheck/scala/reflect/quasiquotes/DefinitionConstructionProps.scala b/test/scalacheck/scala/reflect/quasiquotes/DefinitionConstructionProps.scala index 01cdea398f55..2356f272038c 100644 --- a/test/scalacheck/scala/reflect/quasiquotes/DefinitionConstructionProps.scala +++ b/test/scalacheck/scala/reflect/quasiquotes/DefinitionConstructionProps.scala @@ -1,6 +1,7 @@ package scala.reflect.quasiquotes import org.scalacheck._, Prop._, Gen._, Arbitrary._ +import scala.language.reflectiveCalls import scala.reflect.runtime.universe._, Flag._, internal.reificationSupport.ScalaDot object DefinitionConstructionProps @@ -34,6 +35,7 @@ trait ClassConstruction { self: QuasiquoteProperties => val emptyConstructor = DefDef(Modifiers(), termNames.CONSTRUCTOR, List(), List(List()), TypeTree(), Block(List(pendingSuperCall), Literal(Constant(())))) + @annotation.nowarn("cat=deprecation&msg=emptyValDef") def classWith(name: TypeName, parents: List[Tree] = List(anyRef), body: List[DefDef] = Nil) = ClassDef( Modifiers(), name, List(), diff --git a/test/scalacheck/scala/reflect/quasiquotes/DeprecationProps.scala b/test/scalacheck/scala/reflect/quasiquotes/DeprecationProps.scala index 9439a5a2c69f..cccb06144ce5 100644 --- a/test/scalacheck/scala/reflect/quasiquotes/DeprecationProps.scala +++ b/test/scalacheck/scala/reflect/quasiquotes/DeprecationProps.scala @@ -3,6 +3,7 @@ package scala.reflect.quasiquotes import org.scalacheck._, Prop._, Gen._, Arbitrary._ import scala.reflect.runtime.universe._ +@annotation.nowarn("cat=deprecation") object DeprecationProps extends QuasiquoteProperties("deprecation") { val tname = TypeName("Foo") val tpt = tq"Foo" diff --git a/test/scalacheck/scala/reflect/quasiquotes/UnliftableProps.scala b/test/scalacheck/scala/reflect/quasiquotes/UnliftableProps.scala index 77e5b2de3d88..ae2d9aaf0b7f 100644 --- a/test/scalacheck/scala/reflect/quasiquotes/UnliftableProps.scala +++ b/test/scalacheck/scala/reflect/quasiquotes/UnliftableProps.scala @@ -1,8 +1,10 @@ package scala.reflect.quasiquotes +import org.junit.Assert.{assertEquals, assertTrue} import org.scalacheck._, Prop._, Gen._, Arbitrary._ import scala.reflect.runtime.universe._, Flag._ +@annotation.nowarn("msg=deprecated adaptation") object UnliftableProps extends QuasiquoteProperties("unliftable") { property("unlift name") = test { val termname0 = TermName("foo") @@ -74,7 +76,9 @@ object UnliftableProps extends QuasiquoteProperties("unliftable") { property("unlift scala.symbol") = test { val q"${s: scala.Symbol}" = q"'foo" - assert(s.isInstanceOf[scala.Symbol] && s == 'foo) + //assert(s.isInstanceOf[scala.Symbol] && s == Symbol("foo")) + assertTrue(s.isInstanceOf[scala.Symbol]) + assertEquals(Symbol("foo"), s) } implicit def unliftList[T: Unliftable]: Unliftable[List[T]] = Unliftable { diff --git a/test/scalacheck/t2460.scala b/test/scalacheck/t2460.scala index 42ff3ecfe6ab..81941a33261f 100644 --- a/test/scalacheck/t2460.scala +++ b/test/scalacheck/t2460.scala @@ -12,11 +12,11 @@ object SI2460Test extends Properties("Regex : Ticket 2460") { } val numberOfGroup = forAll(vowel) { - (s: String) => "\\s*([a-z])\\s*([a-z])\\s*".r("data").findAllMatchIn((1 to 20).map(_ => s).mkString).next.groupCount == 2 + (s: String) => "\\s*([a-z])\\s*([a-z])\\s*".r("data").findAllMatchIn((1 to 20).map(_ => s).mkString).next().groupCount == 2 } val nameOfGroup = forAll(vowel) { - (s: String) => "([a-z])".r("data").findAllMatchIn(s).next.group("data") == s + (s: String) => "([a-z])".r("data").findAllMatchIn(s).next().group("data") == s } val tests = List( diff --git a/test/scalacheck/treemap.scala b/test/scalacheck/treemap.scala index f21dacaef7fa..83fb586b5192 100644 --- a/test/scalacheck/treemap.scala +++ b/test/scalacheck/treemap.scala @@ -71,21 +71,21 @@ object TreeMapTest extends Properties("TreeMap") { property("minAfter") = forAll { (elements: List[Int]) => elements.nonEmpty ==> { val half = elements.take(elements.size / 2) val subject = TreeMap((half zip half): _*) - elements.forall{e => { - val temp = subject.from(e) + elements.forall { e => + val temp = subject.rangeFrom(e) if (temp.isEmpty) subject.minAfter(e).isEmpty else subject.minAfter(e).get == temp.min - }} + } }} property("maxBefore") = forAll { (elements: List[Int]) => elements.nonEmpty ==> { val half = elements.take(elements.size / 2) val subject = TreeMap((half zip half): _*) - elements.forall{e => { - val temp = subject.until(e) + elements.forall { e => + val temp = subject.rangeUntil(e) if (temp.isEmpty) subject.maxBefore(e).isEmpty else subject.maxBefore(e).get == temp.max - }} + } }} property("head/tail identity") = forAll { (subject: TreeMap[Int, String]) => subject.nonEmpty ==> { @@ -146,7 +146,7 @@ object TreeMapTest extends Properties("TreeMap") { property("from is inclusive") = forAll { (subject: TreeMap[Int, String]) => subject.nonEmpty ==> { val n = choose(0, subject.size - 1).sample.get val from = subject.drop(n).firstKey - subject.from(from).firstKey == from && subject.from(from).forall(_._1 >= from) + subject.rangeFrom(from).firstKey == from && subject.rangeFrom(from).forall(_._1 >= from) }} property("to is inclusive") = forAll { (subject: TreeMap[Int, String]) => subject.nonEmpty ==> { @@ -158,7 +158,7 @@ object TreeMapTest extends Properties("TreeMap") { property("until is exclusive") = forAll { (subject: TreeMap[Int, String]) => subject.size > 1 ==> { val n = choose(1, subject.size - 1).sample.get val until = subject.drop(n).firstKey - subject.until(until).lastKey == subject.take(n).lastKey && subject.until(until).forall(_._1 <= until) + subject.rangeUntil(until).lastKey == subject.take(n).lastKey && subject.rangeUntil(until).forall(_._1 <= until) }} property("remove single") = forAll { (subject: TreeMap[Int, String]) => subject.nonEmpty ==> { diff --git a/test/scalacheck/treeset.scala b/test/scalacheck/treeset.scala index 286fb1bc6919..e4ba91f54727 100644 --- a/test/scalacheck/treeset.scala +++ b/test/scalacheck/treeset.scala @@ -70,7 +70,7 @@ object TreeSetTest extends Properties("TreeSet") { val half = elements.take(elements.size / 2) val subject = TreeSet(half: _*) elements.forall{e => { - val temp = subject.from(e) + val temp = subject.rangeFrom(e) if (temp.isEmpty) subject.minAfter(e).isEmpty else subject.minAfter(e).get == temp.min }} @@ -80,7 +80,7 @@ object TreeSetTest extends Properties("TreeSet") { val half = elements.take(elements.size / 2) val subject = TreeSet(half: _*) elements.forall{e => { - val temp = subject.from(e) + val temp = subject.rangeFrom(e) if (temp.isEmpty) subject.minAfter(e).isEmpty else subject.minAfter(e).get == temp.min }} @@ -144,7 +144,7 @@ object TreeSetTest extends Properties("TreeSet") { property("from is inclusive") = forAll { (subject: TreeSet[Int]) => subject.nonEmpty ==> { val n = choose(0, subject.size - 1).sample.get val from = subject.drop(n).firstKey - subject.from(from).firstKey == from && subject.from(from).forall(_ >= from) + subject.rangeFrom(from).firstKey == from && subject.rangeFrom(from).forall(_ >= from) }} property("to is inclusive") = forAll { (subject: TreeSet[Int]) => subject.nonEmpty ==> { @@ -156,7 +156,7 @@ object TreeSetTest extends Properties("TreeSet") { property("until is exclusive") = forAll { (subject: TreeSet[Int]) => subject.size > 1 ==> { val n = choose(1, subject.size - 1).sample.get val until = subject.drop(n).firstKey - subject.until(until).lastKey == subject.take(n).lastKey && subject.until(until).forall(_ <= until) + subject.rangeUntil(until).lastKey == subject.take(n).lastKey && subject.rangeUntil(until).forall(_ <= until) }} property("remove single") = forAll { (subject: TreeSet[Int]) => subject.nonEmpty ==> { From 1592398ffe5c87e145e3b0fa1672e2d499900a34 Mon Sep 17 00:00:00 2001 From: Seth Tisue Date: Mon, 3 May 2021 14:27:58 -0700 Subject: [PATCH 136/769] ScalaCheck 1.15.4 (was 1.15.3) --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index d89a4e98dd3c..3a10e5a576e1 100644 --- a/build.sbt +++ b/build.sbt @@ -37,7 +37,7 @@ import scala.build._, VersionUtil._ // Non-Scala dependencies: val junitDep = "junit" % "junit" % "4.13.2" val junitInterfaceDep = "com.novocode" % "junit-interface" % "0.11" % Test -val scalacheckDep = "org.scalacheck" %% "scalacheck" % "1.15.3" % Test +val scalacheckDep = "org.scalacheck" %% "scalacheck" % "1.15.4" % Test val jolDep = "org.openjdk.jol" % "jol-core" % "0.13" val asmDep = "org.scala-lang.modules" % "scala-asm" % versionProps("scala-asm.version") val jlineDep = "org.jline" % "jline" % versionProps("jline.version") From 692431d3cb406f2272944c3fc112760b735cede4 Mon Sep 17 00:00:00 2001 From: Kai Date: Mon, 3 May 2021 23:20:30 +0100 Subject: [PATCH 137/769] Parse `+_` and `-_` in types as identifiers to support Scala 3.2 placeholder syntax This change allows `kind-projector` plugin to rewrite `+_` and `-_` tokens to type lambdas, in line with proposed syntax for Scala 3.2 in http://dotty.epfl.ch/docs/reference/changed-features/wildcards.html When used in conjunction with `-Xsource:3` this will let the user use `?` for wildcards and `_` for placeholders, letting the user cross-compile the same sources with Scala 3 with `-source:3.2` flag. This change is not source breaking since currently `+_` and `-_` fail to parse entirely, this change also does not allow the user to declare types with these names without backticks, they can only be used as part of a type tree. --- .../scala/tools/nsc/ast/parser/Parsers.scala | 6 +++++ .../neg/variant-placeholders-future.check | 7 +++++ .../neg/variant-placeholders-future.scala | 4 +++ .../pos/variant-placeholders-future.scala | 27 +++++++++++++++++++ 4 files changed, 44 insertions(+) create mode 100644 test/files/neg/variant-placeholders-future.check create mode 100644 test/files/neg/variant-placeholders-future.scala create mode 100644 test/files/pos/variant-placeholders-future.scala diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala index 42767df41f78..d4f3f20c0df2 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala @@ -1127,6 +1127,12 @@ self => val start = in.offset in.nextToken() atPos(start)(SingletonTypeTree(literal(isNegated = true, start = start))) + } else if ((in.name == raw.PLUS || in.name == raw.MINUS) && lookingAhead(in.token == USCORE)) { + val start = in.offset + val identName = in.name.encode.append("_").toTypeName + in.nextToken() + in.nextToken() + atPos(start)(Ident(identName)) } else { val start = in.offset simpleTypeRest(in.token match { diff --git a/test/files/neg/variant-placeholders-future.check b/test/files/neg/variant-placeholders-future.check new file mode 100644 index 000000000000..1ae4080af390 --- /dev/null +++ b/test/files/neg/variant-placeholders-future.check @@ -0,0 +1,7 @@ +variant-placeholders-future.scala:2: error: `=`, `>:`, or `<:` expected + type -_ = Int // error -_ not allowed as a type def name without backticks + ^ +variant-placeholders-future.scala:3: error: `=`, `>:`, or `<:` expected + type +_ = Int // error +_ not allowed as a type def name without backticks + ^ +2 errors diff --git a/test/files/neg/variant-placeholders-future.scala b/test/files/neg/variant-placeholders-future.scala new file mode 100644 index 000000000000..973fb1f3b74d --- /dev/null +++ b/test/files/neg/variant-placeholders-future.scala @@ -0,0 +1,4 @@ +object Test { + type -_ = Int // error -_ not allowed as a type def name without backticks + type +_ = Int // error +_ not allowed as a type def name without backticks +} diff --git a/test/files/pos/variant-placeholders-future.scala b/test/files/pos/variant-placeholders-future.scala new file mode 100644 index 000000000000..cb2cf4c4cb20 --- /dev/null +++ b/test/files/pos/variant-placeholders-future.scala @@ -0,0 +1,27 @@ +object Test { + type `-_` = Int + type `+_` = Long + + val fnMinusPlus1: -_ => +_ = (_: Int).toLong + val fnMinusPlus2: (-_) => +_ = fnMinusPlus1 + val fnMinusPlus3: -_ => (+_) = fnMinusPlus2 + + val fnTupMinusPlus2: (=> -_, -_) => +_ = (a, b) => ((a: Int) + (b: Int)).toLong + def defMinusPlus2(byname: => -_, vararg: -_*): +_ = ((vararg.sum: Int) + (byname: -_)).toLong + val infixMinusPlus2: -_ Either +_ = Right[-_, +_](1L) + + val optPlus: Option[+_] = Some[ + _ ](1L) // spaces allowed + optPlus match { + case opt: Option[ + _ ] => + val opt1: + _ = opt.get + val opt2: Long = opt1 + } + + val optMinus: Option[-_] = Some[ - _ ](1) // spaces allowed + optMinus match { + case opt: Option[ - _ ] => + val opt1: `-_` = opt.get + val optErr: - _ = opt.get + val opt2: Int = opt1 + } +} From 493f98552047b0473b3a30dcb0dda9e4db8ba2e4 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Tue, 4 May 2021 14:13:48 +1000 Subject: [PATCH 138/769] Include --release version in cache key for classpath cache This prevents concurrent compilers with different values for this compiler option from seeing the incorrect API. --- .../ZipAndJarFileLookupFactory.scala | 53 ++++++++++--------- .../scala/tools/nsc/plugins/Plugin.scala | 2 +- .../scala/tools/nsc/plugins/Plugins.scala | 2 +- .../scala/tools/nsc/typechecker/Macros.scala | 4 +- 4 files changed, 33 insertions(+), 28 deletions(-) diff --git a/src/compiler/scala/tools/nsc/classpath/ZipAndJarFileLookupFactory.scala b/src/compiler/scala/tools/nsc/classpath/ZipAndJarFileLookupFactory.scala index f9c29456d456..5f374119daef 100644 --- a/src/compiler/scala/tools/nsc/classpath/ZipAndJarFileLookupFactory.scala +++ b/src/compiler/scala/tools/nsc/classpath/ZipAndJarFileLookupFactory.scala @@ -14,16 +14,18 @@ package scala.tools.nsc.classpath import java.io.{Closeable, File} import java.net.URL -import java.nio.file.Files +import java.nio.file.{Files, InvalidPathException} import java.nio.file.attribute.{BasicFileAttributes, FileTime} +import java.nio.file.spi.FileSystemProvider import java.util.{Timer, TimerTask} import java.util.concurrent.atomic.AtomicInteger - +import java.util.zip.ZipError import scala.annotation.tailrec import scala.reflect.io.{AbstractFile, FileZipArchive, ManifestResources} import scala.tools.nsc.util.{ClassPath, ClassRepresentation} import scala.tools.nsc.{CloseableRegistry, Settings} import FileUtils._ +import scala.reflect.internal.FatalError import scala.tools.nsc.io.Jar /** @@ -32,21 +34,23 @@ import scala.tools.nsc.io.Jar * when there are a lot of projects having a lot of common dependencies. */ sealed trait ZipAndJarFileLookupFactory { - private val cache = new FileBasedCache[ClassPath with Closeable] + case class ZipSettings(releaseValue: Option[String]) + private val cache = new FileBasedCache[ZipSettings, ClassPath with Closeable] def create(zipFile: AbstractFile, settings: Settings, closeableRegistry: CloseableRegistry): ClassPath = { val disabled = (settings.YdisableFlatCpCaching.value && !settings.YforceFlatCpCaching.value) || zipFile.file == null + val zipSettings = ZipSettings(settings.releaseValue) cache.checkCacheability(zipFile.toURL :: Nil, checkStamps = true, disableCache = disabled) match { case Left(_) => - val result: ClassPath with Closeable = createForZipFile(zipFile, settings.releaseValue) + val result: ClassPath with Closeable = createForZipFile(zipFile, zipSettings) closeableRegistry.registerClosable(result) result case Right(Seq(path)) => - cache.getOrCreate(List(path), () => createForZipFile(zipFile, settings.releaseValue), closeableRegistry, checkStamps = true) + cache.getOrCreate(zipSettings, List(path), () => createForZipFile(zipFile, zipSettings), closeableRegistry, checkStamps = true) } } - protected def createForZipFile(zipFile: AbstractFile, release: Option[String]): ClassPath with Closeable + protected def createForZipFile(zipFile: AbstractFile, zipSettings: ZipSettings): ClassPath with Closeable } /** @@ -158,9 +162,9 @@ object ZipAndJarClassPathFactory extends ZipAndJarFileLookupFactory { case class PackageInfo(packageName: String, subpackages: List[AbstractFile]) } - override protected def createForZipFile(zipFile: AbstractFile, release: Option[String]): ClassPath with Closeable = + override protected def createForZipFile(zipFile: AbstractFile, zipSettings: ZipSettings): ClassPath with Closeable = if (zipFile.file == null) createWithoutUnderlyingFile(zipFile) - else ZipArchiveClassPath(zipFile.file, release) + else ZipArchiveClassPath(zipFile.file, zipSettings.releaseValue) private def createWithoutUnderlyingFile(zipFile: AbstractFile) = zipFile match { case manifestRes: ManifestResources => @@ -189,13 +193,13 @@ object ZipAndJarSourcePathFactory extends ZipAndJarFileLookupFactory { override protected def isRequiredFileType(file: AbstractFile): Boolean = file.isScalaOrJavaSource } - override protected def createForZipFile(zipFile: AbstractFile, release: Option[String]): ClassPath with Closeable = ZipArchiveSourcePath(zipFile.file) + override protected def createForZipFile(zipFile: AbstractFile, zipSettings: ZipSettings): ClassPath with Closeable = ZipArchiveSourcePath(zipFile.file) } -final class FileBasedCache[T] { +final class FileBasedCache[K, T] { import java.nio.file.Path private case class Stamp(lastModified: FileTime, size: Long, fileKey: Object) - private case class Entry(stamps: Seq[Stamp], t: T) { + private case class Entry(k: K, stamps: Seq[Stamp], t: T) { val referenceCount: AtomicInteger = new AtomicInteger(1) var timerTask: TimerTask = null def cancelTimer(): Unit = { @@ -205,9 +209,9 @@ final class FileBasedCache[T] { } } } - private val cache = collection.mutable.Map.empty[Seq[Path], Entry] + private val cache = collection.mutable.Map.empty[(K, Seq[Path]), Entry] - private def referenceCountDecrementer(e: Entry, paths: Seq[Path]): Closeable = { + private def referenceCountDecrementer(e: Entry, key: (K, Seq[Path])): Closeable = { // Cancel the deferred close timer (if any) that was started when the reference count // last dropped to zero. e.cancelTimer() @@ -227,7 +231,7 @@ final class FileBasedCache[T] { override def run(): Unit = { cache.synchronized { if (e.referenceCount.compareAndSet(0, -1)) { - cache.remove(paths) + cache.remove(key) cl.close() } } @@ -259,7 +263,7 @@ final class FileBasedCache[T] { } } - def getOrCreate(paths: Seq[Path], create: () => T, closeableRegistry: CloseableRegistry, checkStamps: Boolean): T = cache.synchronized { + def getOrCreate(k: K, paths: Seq[Path], create: () => T, closeableRegistry: CloseableRegistry, checkStamps: Boolean): T = cache.synchronized { val stamps = if (!checkStamps) Nil else paths.map { path => try { val attrs = Files.readAttributes(path, classOf[BasicFileAttributes]) @@ -273,14 +277,15 @@ final class FileBasedCache[T] { Stamp(FileTime.fromMillis(0), -1, new Object) } } + val key = (k, paths) - cache.get(paths) match { - case Some(e@Entry(cachedStamps, cached)) => + cache.get(key) match { + case Some(e@Entry(k1, cachedStamps, cached)) => if (!checkStamps || cachedStamps == stamps) { // Cache hit val count = e.referenceCount.incrementAndGet() assert(count > 0, (stamps, count)) - closeableRegistry.registerClosable(referenceCountDecrementer(e, paths)) + closeableRegistry.registerClosable(referenceCountDecrementer(e, (k1, paths))) cached } else { // Cache miss: we found an entry but the underlying files have been modified @@ -293,17 +298,17 @@ final class FileBasedCache[T] { } } val value = create() - val entry = Entry(stamps, value) - cache.put(paths, entry) - closeableRegistry.registerClosable(referenceCountDecrementer(entry, paths)) + val entry = Entry(k, stamps, value) + cache.put(key, entry) + closeableRegistry.registerClosable(referenceCountDecrementer(entry, key)) value } case _ => // Cache miss val value = create() - val entry = Entry(stamps, value) - cache.put(paths, entry) - closeableRegistry.registerClosable(referenceCountDecrementer(entry, paths)) + val entry = Entry(k, stamps, value) + cache.put(key, entry) + closeableRegistry.registerClosable(referenceCountDecrementer(entry, key)) value } } diff --git a/src/compiler/scala/tools/nsc/plugins/Plugin.scala b/src/compiler/scala/tools/nsc/plugins/Plugin.scala index d596fc86b8b7..2836fd4f03ea 100644 --- a/src/compiler/scala/tools/nsc/plugins/Plugin.scala +++ b/src/compiler/scala/tools/nsc/plugins/Plugin.scala @@ -112,7 +112,7 @@ object Plugin { val PluginXML = "scalac-plugin.xml" - private[nsc] val pluginClassLoadersCache = new FileBasedCache[ScalaClassLoader.URLClassLoader]() + private[nsc] val pluginClassLoadersCache = new FileBasedCache[Unit, ScalaClassLoader.URLClassLoader]() type AnyClass = Class[_] diff --git a/src/compiler/scala/tools/nsc/plugins/Plugins.scala b/src/compiler/scala/tools/nsc/plugins/Plugins.scala index 22d397a469a8..17722acad2b7 100644 --- a/src/compiler/scala/tools/nsc/plugins/Plugins.scala +++ b/src/compiler/scala/tools/nsc/plugins/Plugins.scala @@ -106,7 +106,7 @@ trait Plugins { global: Global => closeableRegistry.registerClosable(loader) loader case Right(paths) => - cache.getOrCreate(classpath.map(_.jfile.toPath()), newLoader, closeableRegistry, checkStamps) + cache.getOrCreate((), classpath.map(_.jfile.toPath()), newLoader, closeableRegistry, checkStamps) } } diff --git a/src/compiler/scala/tools/nsc/typechecker/Macros.scala b/src/compiler/scala/tools/nsc/typechecker/Macros.scala index f9a047c3c28a..ba964b252df2 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Macros.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Macros.scala @@ -96,7 +96,7 @@ trait Macros extends MacroRuntimes with Traces with Helpers { closeableRegistry.registerClosable(loader) loader case Right(paths) => - cache.getOrCreate(paths, newLoader, closeableRegistry, checkStamps) + cache.getOrCreate((), paths, newLoader, closeableRegistry, checkStamps) } } @@ -973,7 +973,7 @@ trait Macros extends MacroRuntimes with Traces with Helpers { object Macros { final val macroClassLoadersCache = - new scala.tools.nsc.classpath.FileBasedCache[ScalaClassLoader.URLClassLoader]() + new scala.tools.nsc.classpath.FileBasedCache[Unit, ScalaClassLoader.URLClassLoader]() } trait MacrosStats { From 0714941e302e7cc8182f770226497af25108a3fa Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Tue, 4 May 2021 14:11:18 +1000 Subject: [PATCH 139/769] Cache JRT and --release (ct.sym) classpath elements. Classpath elements based on a) jrt:// file system (representing platform libraries of the current the Java 9+ instance) and b) ct.sym (the JEP 247 repository of the of previous JDK versions) are an immutable part of the JDK. The ClassPath entries we create are safe to share across concurrent or subsequent compilers in the same way we cache entries for regular JARs. --- .../scala/tools/nsc/classpath/DirectoryClassPath.scala | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/src/compiler/scala/tools/nsc/classpath/DirectoryClassPath.scala b/src/compiler/scala/tools/nsc/classpath/DirectoryClassPath.scala index 3d3a6b31881b..e35c3aa22353 100644 --- a/src/compiler/scala/tools/nsc/classpath/DirectoryClassPath.scala +++ b/src/compiler/scala/tools/nsc/classpath/DirectoryClassPath.scala @@ -14,7 +14,6 @@ package scala.tools.nsc.classpath import java.io.{Closeable, File} import java.net.URL -import java.nio.file.{FileSystems, Files} import java.util import scala.reflect.io.{AbstractFile, PlainFile, PlainNioFile} @@ -130,6 +129,8 @@ trait JFileDirectoryLookup[FileEntryType <: ClassRepresentation] extends Directo object JrtClassPath { import java.nio.file._, java.net.URI + private val jrtClassPathCache = new FileBasedCache[Unit, JrtClassPath]() + private val ctSymClassPathCache = new FileBasedCache[Unit, CtSymClassPath]() def apply(release: Option[String], closeableRegistry: CloseableRegistry): Option[ClassPath] = { import scala.util.Properties._ if (!isJavaAtLeast("9")) None @@ -148,8 +149,7 @@ object JrtClassPath { val ctSym = Paths.get(javaHome).resolve("lib").resolve("ct.sym") if (Files.notExists(ctSym)) None else { - val classPath = new CtSymClassPath(ctSym, v.toInt) - closeableRegistry.registerClosable(classPath) + val classPath = ctSymClassPathCache.getOrCreate((), ctSym :: Nil, () => new CtSymClassPath(ctSym, v.toInt), closeableRegistry, true) Some(classPath) } } catch { @@ -158,7 +158,8 @@ object JrtClassPath { case _ => try { val fs = FileSystems.getFileSystem(URI.create("jrt:/")) - Some(new JrtClassPath(fs)) + val classPath = jrtClassPathCache.getOrCreate((), Nil, () => new JrtClassPath(fs), closeableRegistry, false) + Some(classPath) } catch { case _: ProviderNotFoundException | _: FileSystemNotFoundException => None } From 31255c38f0bc3e3e1a46716518f5dd401773236d Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Tue, 4 May 2021 14:16:21 +1000 Subject: [PATCH 140/769] Avoid contention in classpath access Classpath caching shares a single instance of ZipArchive across multiple threads. This can cause read contention as j.u.ZipFile internally serializes reads. Instead, maintain a pool of ZipFile instances to avoid sharing them across threads. --- project/MimaFilters.scala | 5 ++ src/reflect/scala/reflect/io/ZipArchive.scala | 73 +++++++++++++------ 2 files changed, 57 insertions(+), 21 deletions(-) diff --git a/project/MimaFilters.scala b/project/MimaFilters.scala index cb83f1da446a..41cf89665189 100644 --- a/project/MimaFilters.scala +++ b/project/MimaFilters.scala @@ -28,6 +28,11 @@ object MimaFilters extends AutoPlugin { // #9166 add missing serialVersionUID ProblemFilters.exclude[MissingFieldProblem]("*.serialVersionUID"), + + // private[scala] Internal API + ProblemFilters.exclude[IncompatibleMethTypeProblem]("scala.reflect.io.FileZipArchive#LeakyEntry.this"), + ProblemFilters.exclude[IncompatibleMethTypeProblem]("scala.reflect.io.FileZipArchive#LeakyEntry.this"), + ProblemFilters.exclude[MissingClassProblem]("scala.reflect.io.FileZipArchive$zipFilePool$"), ) override val buildSettings = Seq( diff --git a/src/reflect/scala/reflect/io/ZipArchive.scala b/src/reflect/scala/reflect/io/ZipArchive.scala index 53a85532bc67..55fa3d84a236 100644 --- a/src/reflect/scala/reflect/io/ZipArchive.scala +++ b/src/reflect/scala/reflect/io/ZipArchive.scala @@ -17,14 +17,13 @@ package io import java.net.URL import java.io.{ByteArrayInputStream, FilterInputStream, IOException, InputStream} import java.io.{File => JFile} +import java.util.concurrent.{ArrayBlockingQueue, TimeUnit} import java.util.zip.{ZipEntry, ZipFile, ZipInputStream} import java.util.jar.Manifest - import scala.collection.mutable import scala.collection.JavaConverters._ import scala.annotation.tailrec import scala.reflect.internal.JDK9Reflectors - import ZipArchive._ /** An abstraction for zip files and streams. Everything is written the way @@ -146,6 +145,31 @@ abstract class ZipArchive(override val file: JFile, release: Option[String]) ext /** ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */ final class FileZipArchive(file: JFile, release: Option[String]) extends ZipArchive(file, release) { def this(file: JFile) = this(file, None) + private object zipFilePool { + private[this] val zipFiles = new ArrayBlockingQueue[ZipFile](Runtime.getRuntime.availableProcessors()) + + def acquire: ZipFile = { + val zf = zipFiles.poll(0, TimeUnit.MILLISECONDS) + zf match { + case null => + openZipFile() + case _ => + zf + } + } + + def release(zf: ZipFile): Unit = { + if (!zipFiles.offer(zf, 0, TimeUnit.MILLISECONDS)) + zf.close() + } + + def close(): Unit = { + val zipFilesToClose = new java.util.ArrayList[ZipFile] + zipFiles.drainTo(zipFilesToClose) + zipFilesToClose.iterator().forEachRemaining(_.close()) + } + } + private[this] def openZipFile(): ZipFile = try { release match { case Some(r) if file.getName.endsWith(".jar") => @@ -175,18 +199,28 @@ final class FileZipArchive(file: JFile, release: Option[String]) extends ZipArch override def sizeOption: Option[Int] = Some(size) // could be stale } - // keeps a file handle open to ZipFile, which forbids file mutation - // on Windows, and leaks memory on all OS (typically by stopping - // classloaders from being garbage collected). But is slightly - // faster than LazyEntry. + // keeps file handle(s) open to ZipFile in the pool this.zipFiles, + // which forbids file mutation on Windows, and leaks memory on all OS (typically by stopping + // classloaders from being garbage collected). But is slightly faster than LazyEntry. + // + // Note: scala/scala#7366 / scala/scala#7644, LeakyEntry _does_ close the file when `Global.close` is called, + // or after a short delay specified by FileBasedCache.deferCloseMs if classpath caching is enabled. + // So the file handle "leak" is far less a problem than it used do be. private[this] class LeakyEntry( - zipFile: ZipFile, - zipEntry: ZipEntry, - name: String + name: String, + time: Long, + size: Int ) extends Entry(name) { - override def lastModified: Long = zipEntry.getTime - override def input: InputStream = zipFile.getInputStream(zipEntry) - override def sizeOption: Option[Int] = Some(zipEntry.getSize.toInt) + override def lastModified: Long = time // could be stale + override def input: InputStream = { + val zipFile = zipFilePool.acquire + val entry = zipFile.getEntry(name) // with `-release`, returns the correct version under META-INF/versions + val delegate = zipFile.getInputStream(entry) + new FilterInputStream(delegate) { + override def close(): Unit = { zipFilePool.release(zipFile) } + } + } + override def sizeOption: Option[Int] = Some(size) // could be stale } private[this] val dirs = new java.util.HashMap[String, DirEntry]() @@ -200,10 +234,6 @@ final class FileZipArchive(file: JFile, release: Option[String]) extends ZipArch while (enum.hasMoreElements) { val zipEntry = enum.nextElement if (!zipEntry.getName.startsWith("META-INF/versions/")) { - val zipEntryVersioned = if (release.isDefined) { - // JARFile will return the entry for the corresponding release-dependent version here under META-INF/versions - zipFile.getEntry(zipEntry.getName) - } else zipEntry if (!zipEntry.isDirectory) { val dir = getDir(dirs, zipEntry) val f = @@ -213,15 +243,17 @@ final class FileZipArchive(file: JFile, release: Option[String]) extends ZipArch zipEntry.getTime, zipEntry.getSize.toInt) else - new LeakyEntry(zipFile, zipEntryVersioned, zipEntry.getName) + new LeakyEntry(zipEntry.getName, + zipEntry.getTime, + zipEntry.getSize.toInt) dir.entries(f.name) = f } } } } finally { - if (ZipArchive.closeZipFile) zipFile.close() - else closeables ::= zipFile + if (!ZipArchive.closeZipFile) + zipFilePool.release(zipFile) } root } @@ -242,9 +274,8 @@ final class FileZipArchive(file: JFile, release: Option[String]) extends ZipArch case x: FileZipArchive => file.getAbsoluteFile == x.file.getAbsoluteFile case _ => false } - private[this] var closeables: List[java.io.Closeable] = Nil override def close(): Unit = { - closeables.foreach(_.close) + zipFilePool.close() } } /** ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */ From d0396cddeab70ed7c36522df1d52be31818ac4b6 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Tue, 4 May 2021 16:06:25 +1000 Subject: [PATCH 141/769] Adjust conversion of AbstractFile's name to TypeName The current optimized version tries to avoid temporary strings. But it doesn't achieve this for classes based by jrt:// (or any `NioPath`, as the call to `AbstractFile.fileName` internally constructs a string each time. This commit uses `.name` (which is a `lazy val`). --- .../scala/tools/nsc/symtab/SymbolLoaders.scala | 15 ++++++++------- src/compiler/scala/tools/nsc/util/ClassPath.scala | 11 ----------- 2 files changed, 8 insertions(+), 18 deletions(-) diff --git a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala index a96d94bb9e3b..4c399e62d1b4 100644 --- a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala +++ b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala @@ -199,15 +199,16 @@ abstract class SymbolLoaders { } } private def nameOf(classRep: ClassRepresentation): TermName = { - while(true) { - val len = classRep.nameChars(nameCharBuffer) - if (len == -1) nameCharBuffer = new Array[Char](nameCharBuffer.length * 2) - else return newTermName(nameCharBuffer, 0, len) + val name = classRep.name + val nameLength = name.length + if (nameLength <= nameCharBuffer.length) { + name.getChars(0, nameLength, nameCharBuffer, 0) + newTermName(nameCharBuffer, 0, nameLength) + } else { + newTermName(name) } - throw new IllegalStateException() } - private var nameCharBuffer = new Array[Char](256) - + private val nameCharBuffer = new Array[Char](512) /** * A lazy type that completes itself by calling parameter doComplete. diff --git a/src/compiler/scala/tools/nsc/util/ClassPath.scala b/src/compiler/scala/tools/nsc/util/ClassPath.scala index e585e1127c53..77ad71578a93 100644 --- a/src/compiler/scala/tools/nsc/util/ClassPath.scala +++ b/src/compiler/scala/tools/nsc/util/ClassPath.scala @@ -204,17 +204,6 @@ object ClassPath { trait ClassRepresentation { def fileName: String def name: String - /** Low level way to extract the entry name without allocation. */ - final def nameChars(buffer: Array[Char]): Int = { - val ix = fileName.lastIndexOf('.') - val nameLength = if (ix < 0) fileName.length else ix - if (nameLength > buffer.length) - -1 - else { - fileName.getChars(0, fileName.lastIndexOf('.'), buffer, 0) - nameLength - } - } def binary: Option[AbstractFile] def source: Option[AbstractFile] } From d5bb4858a0d373557fbd400e719b8b4f2eee5d47 Mon Sep 17 00:00:00 2001 From: Martijn Hoekstra Date: Sat, 23 May 2020 16:07:16 +0200 Subject: [PATCH 142/769] [backport] fix back-quoted constructor params with identical prefixes --- .../tools/nsc/transform/Constructors.scala | 25 +- .../scala/reflect/internal/StdNames.scala | 3 + .../scala/reflect/internal/Symbols.scala | 16 +- test/files/run/t10625.check | 3 + test/files/run/t10625.scala | 8 + test/files/run/t8831.check | 1 + test/files/run/t8831.scala | 47 +++ test/files/run/t8831_many/Classes_1.scala | 319 ++++++++++++++++++ test/files/run/t8831_many/Tests_2.scala | 246 ++++++++++++++ test/files/run/t8831_many/generator.scala | 87 +++++ 10 files changed, 740 insertions(+), 15 deletions(-) create mode 100644 test/files/run/t10625.check create mode 100644 test/files/run/t10625.scala create mode 100644 test/files/run/t8831.check create mode 100644 test/files/run/t8831.scala create mode 100644 test/files/run/t8831_many/Classes_1.scala create mode 100644 test/files/run/t8831_many/Tests_2.scala create mode 100644 test/files/run/t8831_many/generator.scala diff --git a/src/compiler/scala/tools/nsc/transform/Constructors.scala b/src/compiler/scala/tools/nsc/transform/Constructors.scala index b5db9c56a009..f13dc73c19e3 100644 --- a/src/compiler/scala/tools/nsc/transform/Constructors.scala +++ b/src/compiler/scala/tools/nsc/transform/Constructors.scala @@ -480,18 +480,21 @@ abstract class Constructors extends Statics with Transform with TypingTransforme def usesSpecializedField = intoConstructor.usesSpecializedField // The constructor parameter corresponding to an accessor - def parameter(acc: Symbol): Symbol = parameterNamed(acc.unexpandedName.getterName) - - // The constructor parameter with given name. This means the parameter - // has given name, or starts with given name, and continues with a `$` afterwards. - def parameterNamed(name: Name): Symbol = { - def matchesName(param: Symbol) = param.name == name || param.name.startsWith(name + nme.NAME_JOIN_STRING) + def parameter(acc: Symbol): Symbol = { + //works around the edge case where unexpandedName over-unexpands shenanigans like literal $$ or `$#` + def unexpanded = parameterNamed(acc.unexpandedName.getterName) + def expanded = parameterNamed(acc.getterName) + (if (unexpanded.isRight) unexpanded else expanded).swap.map(abort).merge + } - primaryConstrParams filter matchesName match { - case Nil => abort(name + " not in " + primaryConstrParams) - case p :: _ => p + // The constructor parameter with given getter name. This means the parameter name + // decodes to the same name that the getter decodes to + def parameterNamed(name: Name): Either[String, Symbol] = + primaryConstrParams.filter(_.name.decodedName == name.decodedName) match { + case List(p) => Right(p) + case Nil => Left(s"No constructor parameter named $name (decoded to ${name.decodedName}) found in list of constructor parameters $primaryConstrParams (decoded to ${primaryConstrParams.map(_.decodedName)})") + case ps => Left(s"$name matches multiple constructor parameters $ps") } - } // A transformer for expressions that go into the constructor object intoConstructor extends Transformer { @@ -537,7 +540,7 @@ abstract class Constructors extends Statics with Transform with TypingTransforme else if (canBeSupplanted(tree.symbol)) gen.mkAttributedIdent(parameter(tree.symbol)) setPos tree.pos else if (tree.symbol.outerSource == clazz && !isDelayedInitSubclass) - gen.mkAttributedIdent(parameterNamed(nme.OUTER)) setPos tree.pos + gen.mkAttributedIdent(parameterNamed(nme.OUTER).fold(abort, identity)).setPos(tree.pos) else super.transform(tree) diff --git a/src/reflect/scala/reflect/internal/StdNames.scala b/src/reflect/scala/reflect/internal/StdNames.scala index 6d688cfa0866..84d42b562f5e 100644 --- a/src/reflect/scala/reflect/internal/StdNames.scala +++ b/src/reflect/scala/reflect/internal/StdNames.scala @@ -441,6 +441,9 @@ trait StdNames { * Look backward from the end of the string for "$$", and take the * part of the string after that; but if the string is "$$$" or longer, * be sure to retain the extra dollars. + * If the name happens to be a back quoted name containing literal $$ + * or $ followed by an operator that gets encoded, go directly to compiler + * crash. Do not pass go and don't even think about collecting any $$ */ def unexpandedName(name: Name): Name = { if (!name.containsChar('$')) name // lastIndexOf calls Name.toString, add a fast path to avoid that. diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala index 4c2376351f9d..616e56bab198 100644 --- a/src/reflect/scala/reflect/internal/Symbols.scala +++ b/src/reflect/scala/reflect/internal/Symbols.scala @@ -2098,18 +2098,26 @@ trait Symbols extends api.Symbols { self: SymbolTable => // handling of non-public parameters seems to change the order (see scala/bug#7035.) // // Luckily, the constrParamAccessors are still sorted properly, so sort the field-accessors using them - // (need to undo name-mangling, including the sneaky trailing whitespace) + // (need to undo name-mangling, including the sneaky trailing whitespace, and match longest first) // // The slightly more principled approach of using the paramss of the // primary constructor leads to cycles in, for example, pos/t5084.scala. val primaryNames = constrParamAccessors map (_.name.dropLocal) def nameStartsWithOrigDollar(name: Name, prefix: Name) = name.startsWith(prefix) && name.length > prefix.length + 1 && name.charAt(prefix.length) == '$' - caseFieldAccessorsUnsorted.sortBy { acc => - primaryNames indexWhere { orig => - (acc.name == orig) || nameStartsWithOrigDollar(acc.name, orig) + + def rec(remaningAccessors: List[Symbol], foundAccessors: List[(Symbol, Int)], remainingNames: List[(Name, Int)]): List[Symbol] = { + remaningAccessors match { + case Nil => foundAccessors.sortBy(_._2).map(_._1) + case acc :: tail => { + val i = remainingNames.collectFirst { case (name, i) if acc.name == name || nameStartsWithOrigDollar(acc.name, name) => i} + rec(tail, (acc, i.get) :: foundAccessors, remainingNames.filterNot { case (_, ii) => Some(ii) == i} ) + } } } + + rec(caseFieldAccessorsUnsorted.sortBy(s => -s.name.length), Nil, primaryNames.zipWithIndex.sortBy{ case (n, _) => -n.length}) + } private final def caseFieldAccessorsUnsorted: List[Symbol] = info.decls.toList.filter(_.isCaseAccessorMethod) diff --git a/test/files/run/t10625.check b/test/files/run/t10625.check new file mode 100644 index 000000000000..a65cf05470e7 --- /dev/null +++ b/test/files/run/t10625.check @@ -0,0 +1,3 @@ +1 +1 +Some(1) diff --git a/test/files/run/t10625.scala b/test/files/run/t10625.scala new file mode 100644 index 000000000000..9f7cf82ab178 --- /dev/null +++ b/test/files/run/t10625.scala @@ -0,0 +1,8 @@ +case class WhyNot(`^$#`: Int) +object Test extends App { + val wn = WhyNot(1) + println(wn.`^$#`) + val WhyNot(i) = wn + println(i) + println(WhyNot.unapply(wn)) +} diff --git a/test/files/run/t8831.check b/test/files/run/t8831.check new file mode 100644 index 000000000000..5680b2c3e502 --- /dev/null +++ b/test/files/run/t8831.check @@ -0,0 +1 @@ +Right5(1,2,3,4,5) \ No newline at end of file diff --git a/test/files/run/t8831.scala b/test/files/run/t8831.scala new file mode 100644 index 000000000000..65ab7cd3ec35 --- /dev/null +++ b/test/files/run/t8831.scala @@ -0,0 +1,47 @@ +case class Right(a: Int, `a b`: Int) +case class VeryRight(a: Int, `a b`: String) + +case class Wrong(`a b`: Int, a: Int) +case class VeryWrong(`a b`: Int, a: String) +case class WrongDollar(a$: Int, a: Int) +case class VeryWrongDollar(a$: Int, a: String) +case class WrongQuotedDollar(`a$`: Int, a: Int) +case class WrongHyphenated(val `foo-bar`: Int, `foo`: Int) +case class VeryWrongHyphenated(val `foo-bar`: Int, `foo`: String) +case class WrongPlus(a_+ : Int, a_ : Int) +case class VeryWrongPlus(a_+ : Int, a_ : String) + +case class Right5(b: Int, `a b`: Int, a: Int, `a `: Int, `a b c`: Int) + +object Test { + def main(args: Array[String]): Unit = { + val r = Right(1, 2) + val w = Wrong(1, 2) + val wd = WrongDollar(1, 2) + val wh = WrongHyphenated(1, 2) + val wp = WrongPlus(1, 2) + assert(r.a == w.`a b`) + assert(r.a == wd.a$) + assert(r.a == wh.`foo-bar`) + assert(r.a == wp.a_+) + assert(r.`a b` == w.a) + assert(r.`a b` == wd.a) + assert(r.`a b` == wh.foo) + assert(r.`a b` == wp.a_) + + val vr = VeryRight(1, "one") + val vw = VeryWrong(1, "one") + val vwd = VeryWrongDollar(1, "one") + val vwh = VeryWrongHyphenated(1, "one") + val vwp = VeryWrongPlus(1, "one") + assert(vr.a == vw.`a b`) + assert(vr.a == vwd.a$) + assert(vr.a == vwh.`foo-bar`) + assert(vr.a == vwp.a_+) + assert(vr.`a b` == vw.a) + assert(vr.`a b` == vwd.a) + assert(vr.`a b` == vwh.foo) + assert(vr.`a b` == vwp.a_) + println(Right5(1, 2, 3, 4, 5).toString()) + } +} \ No newline at end of file diff --git a/test/files/run/t8831_many/Classes_1.scala b/test/files/run/t8831_many/Classes_1.scala new file mode 100644 index 000000000000..102caf4739cd --- /dev/null +++ b/test/files/run/t8831_many/Classes_1.scala @@ -0,0 +1,319 @@ +case class Cpv_pv[@specialized(Int) A, @specialized(Int) B](private val `a b`: A, private val a: B){ + val p1 = `a b` + val p2 = a +} +case class Cpv_v[@specialized(Int) A, @specialized(Int) B](private val `a b`: A, val a: B){ + val p1 = `a b` + val p2 = a +} +case class Cpv_n[@specialized(Int) A, @specialized(Int) B](private val `a b`: A, a: B){ + val p1 = `a b` + val p2 = a +} +case class Cv_pv[@specialized(Int) A, @specialized(Int) B](val `a b`: A, private val a: B){ + val p1 = `a b` + val p2 = a +} +case class Cv_v[@specialized(Int) A, @specialized(Int) B](val `a b`: A, val a: B){ + val p1 = `a b` + val p2 = a +} +case class Cv_n[@specialized(Int) A, @specialized(Int) B](val `a b`: A, a: B){ + val p1 = `a b` + val p2 = a +} +case class Cn_pv[@specialized(Int) A, @specialized(Int) B]( `a b`: A, private val a: B){ + val p1 = `a b` + val p2 = a +} +case class Cn_v[@specialized(Int) A, @specialized(Int) B]( `a b`: A, val a: B){ + val p1 = `a b` + val p2 = a +} +case class Cn_n[@specialized(Int) A, @specialized(Int) B]( `a b`: A, a: B){ + val p1 = `a b` + val p2 = a +} + class Rpv_pv[@specialized(Int) A, @specialized(Int) B](private val `a b`: A, private val a: B){ + val p1 = `a b` + val p2 = a +} + class Rpv_v[@specialized(Int) A, @specialized(Int) B](private val `a b`: A, val a: B){ + val p1 = `a b` + val p2 = a +} + class Rpv_n[@specialized(Int) A, @specialized(Int) B](private val `a b`: A, a: B){ + val p1 = `a b` + val p2 = a +} + class Rv_pv[@specialized(Int) A, @specialized(Int) B](val `a b`: A, private val a: B){ + val p1 = `a b` + val p2 = a +} + class Rv_v[@specialized(Int) A, @specialized(Int) B](val `a b`: A, val a: B){ + val p1 = `a b` + val p2 = a +} + class Rv_n[@specialized(Int) A, @specialized(Int) B](val `a b`: A, a: B){ + val p1 = `a b` + val p2 = a +} + class Rn_pv[@specialized(Int) A, @specialized(Int) B]( `a b`: A, private val a: B){ + val p1 = `a b` + val p2 = a +} + class Rn_v[@specialized(Int) A, @specialized(Int) B]( `a b`: A, val a: B){ + val p1 = `a b` + val p2 = a +} + class Rn_n[@specialized(Int) A, @specialized(Int) B]( `a b`: A, a: B){ + val p1 = `a b` + val p2 = a +} +object TestJoint { + def joint(): Unit = { + +val ch_Cpv_pv = new Cpv_pv('a', 'b') +val i_Cpv_pv = new Cpv_pv(1, 2) +val Cpv_pv(extracted1i_Cpv_pv, extracted2i_Cpv_pv) = i_Cpv_pv +val Cpv_pv(extracted1ch_Cpv_pv, extracted2ch_Cpv_pv) = ch_Cpv_pv +assert(1 == extracted1i_Cpv_pv) +assert(2 == extracted2i_Cpv_pv) +assert('a' == extracted1ch_Cpv_pv) +assert('b' == extracted2ch_Cpv_pv) + +assert(1 == i_Cpv_pv.p1) +assert(2 == i_Cpv_pv.p2) +assert('a' == ch_Cpv_pv.p1) +assert('b' == ch_Cpv_pv.p2) + + +val ch_Cpv_v = new Cpv_v('a', 'b') +val i_Cpv_v = new Cpv_v(1, 2) +val Cpv_v(extracted1i_Cpv_v, extracted2i_Cpv_v) = i_Cpv_v +val Cpv_v(extracted1ch_Cpv_v, extracted2ch_Cpv_v) = ch_Cpv_v +assert(1 == extracted1i_Cpv_v) +assert(2 == extracted2i_Cpv_v) +assert('a' == extracted1ch_Cpv_v) +assert('b' == extracted2ch_Cpv_v) + +assert(2 == i_Cpv_v.a) +assert('b' == ch_Cpv_v.a) +assert(1 == i_Cpv_v.p1) +assert(2 == i_Cpv_v.p2) +assert('a' == ch_Cpv_v.p1) +assert('b' == ch_Cpv_v.p2) + + +val ch_Cpv_n = new Cpv_n('a', 'b') +val i_Cpv_n = new Cpv_n(1, 2) +val Cpv_n(extracted1i_Cpv_n, extracted2i_Cpv_n) = i_Cpv_n +val Cpv_n(extracted1ch_Cpv_n, extracted2ch_Cpv_n) = ch_Cpv_n +assert(1 == extracted1i_Cpv_n) +assert(2 == extracted2i_Cpv_n) +assert('a' == extracted1ch_Cpv_n) +assert('b' == extracted2ch_Cpv_n) + +assert(2 == i_Cpv_n.a) +assert('b' == ch_Cpv_n.a) +assert(1 == i_Cpv_n.p1) +assert(2 == i_Cpv_n.p2) +assert('a' == ch_Cpv_n.p1) +assert('b' == ch_Cpv_n.p2) + + +val ch_Cv_pv = new Cv_pv('a', 'b') +val i_Cv_pv = new Cv_pv(1, 2) +val Cv_pv(extracted1i_Cv_pv, extracted2i_Cv_pv) = i_Cv_pv +val Cv_pv(extracted1ch_Cv_pv, extracted2ch_Cv_pv) = ch_Cv_pv +assert(1 == extracted1i_Cv_pv) +assert(2 == extracted2i_Cv_pv) +assert('a' == extracted1ch_Cv_pv) +assert('b' == extracted2ch_Cv_pv) + +assert(1 == i_Cv_pv.`a b`) +assert('a' == ch_Cv_pv.`a b`) +assert(1 == i_Cv_pv.p1) +assert(2 == i_Cv_pv.p2) +assert('a' == ch_Cv_pv.p1) +assert('b' == ch_Cv_pv.p2) + + +val ch_Cv_v = new Cv_v('a', 'b') +val i_Cv_v = new Cv_v(1, 2) +val Cv_v(extracted1i_Cv_v, extracted2i_Cv_v) = i_Cv_v +val Cv_v(extracted1ch_Cv_v, extracted2ch_Cv_v) = ch_Cv_v +assert(1 == extracted1i_Cv_v) +assert(2 == extracted2i_Cv_v) +assert('a' == extracted1ch_Cv_v) +assert('b' == extracted2ch_Cv_v) + +assert(1 == i_Cv_v.`a b`) +assert(2 == i_Cv_v.a) +assert('a' == ch_Cv_v.`a b`) +assert('b' == ch_Cv_v.a) +assert(1 == i_Cv_v.p1) +assert(2 == i_Cv_v.p2) +assert('a' == ch_Cv_v.p1) +assert('b' == ch_Cv_v.p2) + + +val ch_Cv_n = new Cv_n('a', 'b') +val i_Cv_n = new Cv_n(1, 2) +val Cv_n(extracted1i_Cv_n, extracted2i_Cv_n) = i_Cv_n +val Cv_n(extracted1ch_Cv_n, extracted2ch_Cv_n) = ch_Cv_n +assert(1 == extracted1i_Cv_n) +assert(2 == extracted2i_Cv_n) +assert('a' == extracted1ch_Cv_n) +assert('b' == extracted2ch_Cv_n) + +assert(1 == i_Cv_n.`a b`) +assert(2 == i_Cv_n.a) +assert('a' == ch_Cv_n.`a b`) +assert('b' == ch_Cv_n.a) +assert(1 == i_Cv_n.p1) +assert(2 == i_Cv_n.p2) +assert('a' == ch_Cv_n.p1) +assert('b' == ch_Cv_n.p2) + + +val ch_Cn_pv = new Cn_pv('a', 'b') +val i_Cn_pv = new Cn_pv(1, 2) +val Cn_pv(extracted1i_Cn_pv, extracted2i_Cn_pv) = i_Cn_pv +val Cn_pv(extracted1ch_Cn_pv, extracted2ch_Cn_pv) = ch_Cn_pv +assert(1 == extracted1i_Cn_pv) +assert(2 == extracted2i_Cn_pv) +assert('a' == extracted1ch_Cn_pv) +assert('b' == extracted2ch_Cn_pv) + +assert(1 == i_Cn_pv.`a b`) +assert('a' == ch_Cn_pv.`a b`) +assert(1 == i_Cn_pv.p1) +assert(2 == i_Cn_pv.p2) +assert('a' == ch_Cn_pv.p1) +assert('b' == ch_Cn_pv.p2) + + +val ch_Cn_v = new Cn_v('a', 'b') +val i_Cn_v = new Cn_v(1, 2) +val Cn_v(extracted1i_Cn_v, extracted2i_Cn_v) = i_Cn_v +val Cn_v(extracted1ch_Cn_v, extracted2ch_Cn_v) = ch_Cn_v +assert(1 == extracted1i_Cn_v) +assert(2 == extracted2i_Cn_v) +assert('a' == extracted1ch_Cn_v) +assert('b' == extracted2ch_Cn_v) + +assert(1 == i_Cn_v.`a b`) +assert(2 == i_Cn_v.a) +assert('a' == ch_Cn_v.`a b`) +assert('b' == ch_Cn_v.a) +assert(1 == i_Cn_v.p1) +assert(2 == i_Cn_v.p2) +assert('a' == ch_Cn_v.p1) +assert('b' == ch_Cn_v.p2) + + +val ch_Cn_n = new Cn_n('a', 'b') +val i_Cn_n = new Cn_n(1, 2) +val Cn_n(extracted1i_Cn_n, extracted2i_Cn_n) = i_Cn_n +val Cn_n(extracted1ch_Cn_n, extracted2ch_Cn_n) = ch_Cn_n +assert(1 == extracted1i_Cn_n) +assert(2 == extracted2i_Cn_n) +assert('a' == extracted1ch_Cn_n) +assert('b' == extracted2ch_Cn_n) + +assert(1 == i_Cn_n.`a b`) +assert(2 == i_Cn_n.a) +assert('a' == ch_Cn_n.`a b`) +assert('b' == ch_Cn_n.a) +assert(1 == i_Cn_n.p1) +assert(2 == i_Cn_n.p2) +assert('a' == ch_Cn_n.p1) +assert('b' == ch_Cn_n.p2) + + +val ch_Rpv_pv = new Rpv_pv('a', 'b') +val i_Rpv_pv = new Rpv_pv(1, 2) +assert(1 == i_Rpv_pv.p1) +assert(2 == i_Rpv_pv.p2) +assert('a' == ch_Rpv_pv.p1) +assert('b' == ch_Rpv_pv.p2) + + +val ch_Rpv_v = new Rpv_v('a', 'b') +val i_Rpv_v = new Rpv_v(1, 2) +assert(2 == i_Rpv_v.a) +assert('b' == ch_Rpv_v.a) +assert(1 == i_Rpv_v.p1) +assert(2 == i_Rpv_v.p2) +assert('a' == ch_Rpv_v.p1) +assert('b' == ch_Rpv_v.p2) + + +val ch_Rpv_n = new Rpv_n('a', 'b') +val i_Rpv_n = new Rpv_n(1, 2) +assert(1 == i_Rpv_n.p1) +assert(2 == i_Rpv_n.p2) +assert('a' == ch_Rpv_n.p1) +assert('b' == ch_Rpv_n.p2) + + +val ch_Rv_pv = new Rv_pv('a', 'b') +val i_Rv_pv = new Rv_pv(1, 2) +assert(1 == i_Rv_pv.`a b`) +assert('a' == ch_Rv_pv.`a b`) +assert(1 == i_Rv_pv.p1) +assert(2 == i_Rv_pv.p2) +assert('a' == ch_Rv_pv.p1) +assert('b' == ch_Rv_pv.p2) + + +val ch_Rv_v = new Rv_v('a', 'b') +val i_Rv_v = new Rv_v(1, 2) +assert(1 == i_Rv_v.`a b`) +assert(2 == i_Rv_v.a) +assert('a' == ch_Rv_v.`a b`) +assert('b' == ch_Rv_v.a) +assert(1 == i_Rv_v.p1) +assert(2 == i_Rv_v.p2) +assert('a' == ch_Rv_v.p1) +assert('b' == ch_Rv_v.p2) + + +val ch_Rv_n = new Rv_n('a', 'b') +val i_Rv_n = new Rv_n(1, 2) +assert(1 == i_Rv_n.`a b`) +assert('a' == ch_Rv_n.`a b`) +assert(1 == i_Rv_n.p1) +assert(2 == i_Rv_n.p2) +assert('a' == ch_Rv_n.p1) +assert('b' == ch_Rv_n.p2) + + +val ch_Rn_pv = new Rn_pv('a', 'b') +val i_Rn_pv = new Rn_pv(1, 2) +assert(1 == i_Rn_pv.p1) +assert(2 == i_Rn_pv.p2) +assert('a' == ch_Rn_pv.p1) +assert('b' == ch_Rn_pv.p2) + + +val ch_Rn_v = new Rn_v('a', 'b') +val i_Rn_v = new Rn_v(1, 2) +assert(2 == i_Rn_v.a) +assert('b' == ch_Rn_v.a) +assert(1 == i_Rn_v.p1) +assert(2 == i_Rn_v.p2) +assert('a' == ch_Rn_v.p1) +assert('b' == ch_Rn_v.p2) + + +val ch_Rn_n = new Rn_n('a', 'b') +val i_Rn_n = new Rn_n(1, 2) +assert(1 == i_Rn_n.p1) +assert(2 == i_Rn_n.p2) +assert('a' == ch_Rn_n.p1) +assert('b' == ch_Rn_n.p2) + + +}} diff --git a/test/files/run/t8831_many/Tests_2.scala b/test/files/run/t8831_many/Tests_2.scala new file mode 100644 index 000000000000..d496b4f5ebe7 --- /dev/null +++ b/test/files/run/t8831_many/Tests_2.scala @@ -0,0 +1,246 @@ +object Test extends App { + +val ch_Cpv_pv = new Cpv_pv('a', 'b') +val i_Cpv_pv = new Cpv_pv(1, 2) +val Cpv_pv(extracted1i_Cpv_pv, extracted2i_Cpv_pv) = i_Cpv_pv +val Cpv_pv(extracted1ch_Cpv_pv, extracted2ch_Cpv_pv) = ch_Cpv_pv +assert(1 == extracted1i_Cpv_pv) +assert(2 == extracted2i_Cpv_pv) +assert('a' == extracted1ch_Cpv_pv) +assert('b' == extracted2ch_Cpv_pv) + +assert(1 == i_Cpv_pv.p1) +assert(2 == i_Cpv_pv.p2) +assert('a' == ch_Cpv_pv.p1) +assert('b' == ch_Cpv_pv.p2) + + +val ch_Cpv_v = new Cpv_v('a', 'b') +val i_Cpv_v = new Cpv_v(1, 2) +val Cpv_v(extracted1i_Cpv_v, extracted2i_Cpv_v) = i_Cpv_v +val Cpv_v(extracted1ch_Cpv_v, extracted2ch_Cpv_v) = ch_Cpv_v +assert(1 == extracted1i_Cpv_v) +assert(2 == extracted2i_Cpv_v) +assert('a' == extracted1ch_Cpv_v) +assert('b' == extracted2ch_Cpv_v) + +assert(2 == i_Cpv_v.a) +assert('b' == ch_Cpv_v.a) +assert(1 == i_Cpv_v.p1) +assert(2 == i_Cpv_v.p2) +assert('a' == ch_Cpv_v.p1) +assert('b' == ch_Cpv_v.p2) + + +val ch_Cpv_n = new Cpv_n('a', 'b') +val i_Cpv_n = new Cpv_n(1, 2) +val Cpv_n(extracted1i_Cpv_n, extracted2i_Cpv_n) = i_Cpv_n +val Cpv_n(extracted1ch_Cpv_n, extracted2ch_Cpv_n) = ch_Cpv_n +assert(1 == extracted1i_Cpv_n) +assert(2 == extracted2i_Cpv_n) +assert('a' == extracted1ch_Cpv_n) +assert('b' == extracted2ch_Cpv_n) + +assert(2 == i_Cpv_n.a) +assert('b' == ch_Cpv_n.a) +assert(1 == i_Cpv_n.p1) +assert(2 == i_Cpv_n.p2) +assert('a' == ch_Cpv_n.p1) +assert('b' == ch_Cpv_n.p2) + + +val ch_Cv_pv = new Cv_pv('a', 'b') +val i_Cv_pv = new Cv_pv(1, 2) +val Cv_pv(extracted1i_Cv_pv, extracted2i_Cv_pv) = i_Cv_pv +val Cv_pv(extracted1ch_Cv_pv, extracted2ch_Cv_pv) = ch_Cv_pv +assert(1 == extracted1i_Cv_pv) +assert(2 == extracted2i_Cv_pv) +assert('a' == extracted1ch_Cv_pv) +assert('b' == extracted2ch_Cv_pv) + +assert(1 == i_Cv_pv.`a b`) +assert('a' == ch_Cv_pv.`a b`) +assert(1 == i_Cv_pv.p1) +assert(2 == i_Cv_pv.p2) +assert('a' == ch_Cv_pv.p1) +assert('b' == ch_Cv_pv.p2) + + +val ch_Cv_v = new Cv_v('a', 'b') +val i_Cv_v = new Cv_v(1, 2) +val Cv_v(extracted1i_Cv_v, extracted2i_Cv_v) = i_Cv_v +val Cv_v(extracted1ch_Cv_v, extracted2ch_Cv_v) = ch_Cv_v +assert(1 == extracted1i_Cv_v) +assert(2 == extracted2i_Cv_v) +assert('a' == extracted1ch_Cv_v) +assert('b' == extracted2ch_Cv_v) + +assert(1 == i_Cv_v.`a b`) +assert(2 == i_Cv_v.a) +assert('a' == ch_Cv_v.`a b`) +assert('b' == ch_Cv_v.a) +assert(1 == i_Cv_v.p1) +assert(2 == i_Cv_v.p2) +assert('a' == ch_Cv_v.p1) +assert('b' == ch_Cv_v.p2) + + +val ch_Cv_n = new Cv_n('a', 'b') +val i_Cv_n = new Cv_n(1, 2) +val Cv_n(extracted1i_Cv_n, extracted2i_Cv_n) = i_Cv_n +val Cv_n(extracted1ch_Cv_n, extracted2ch_Cv_n) = ch_Cv_n +assert(1 == extracted1i_Cv_n) +assert(2 == extracted2i_Cv_n) +assert('a' == extracted1ch_Cv_n) +assert('b' == extracted2ch_Cv_n) + +assert(1 == i_Cv_n.`a b`) +assert(2 == i_Cv_n.a) +assert('a' == ch_Cv_n.`a b`) +assert('b' == ch_Cv_n.a) +assert(1 == i_Cv_n.p1) +assert(2 == i_Cv_n.p2) +assert('a' == ch_Cv_n.p1) +assert('b' == ch_Cv_n.p2) + + +val ch_Cn_pv = new Cn_pv('a', 'b') +val i_Cn_pv = new Cn_pv(1, 2) +val Cn_pv(extracted1i_Cn_pv, extracted2i_Cn_pv) = i_Cn_pv +val Cn_pv(extracted1ch_Cn_pv, extracted2ch_Cn_pv) = ch_Cn_pv +assert(1 == extracted1i_Cn_pv) +assert(2 == extracted2i_Cn_pv) +assert('a' == extracted1ch_Cn_pv) +assert('b' == extracted2ch_Cn_pv) + +assert(1 == i_Cn_pv.`a b`) +assert('a' == ch_Cn_pv.`a b`) +assert(1 == i_Cn_pv.p1) +assert(2 == i_Cn_pv.p2) +assert('a' == ch_Cn_pv.p1) +assert('b' == ch_Cn_pv.p2) + + +val ch_Cn_v = new Cn_v('a', 'b') +val i_Cn_v = new Cn_v(1, 2) +val Cn_v(extracted1i_Cn_v, extracted2i_Cn_v) = i_Cn_v +val Cn_v(extracted1ch_Cn_v, extracted2ch_Cn_v) = ch_Cn_v +assert(1 == extracted1i_Cn_v) +assert(2 == extracted2i_Cn_v) +assert('a' == extracted1ch_Cn_v) +assert('b' == extracted2ch_Cn_v) + +assert(1 == i_Cn_v.`a b`) +assert(2 == i_Cn_v.a) +assert('a' == ch_Cn_v.`a b`) +assert('b' == ch_Cn_v.a) +assert(1 == i_Cn_v.p1) +assert(2 == i_Cn_v.p2) +assert('a' == ch_Cn_v.p1) +assert('b' == ch_Cn_v.p2) + + +val ch_Cn_n = new Cn_n('a', 'b') +val i_Cn_n = new Cn_n(1, 2) +val Cn_n(extracted1i_Cn_n, extracted2i_Cn_n) = i_Cn_n +val Cn_n(extracted1ch_Cn_n, extracted2ch_Cn_n) = ch_Cn_n +assert(1 == extracted1i_Cn_n) +assert(2 == extracted2i_Cn_n) +assert('a' == extracted1ch_Cn_n) +assert('b' == extracted2ch_Cn_n) + +assert(1 == i_Cn_n.`a b`) +assert(2 == i_Cn_n.a) +assert('a' == ch_Cn_n.`a b`) +assert('b' == ch_Cn_n.a) +assert(1 == i_Cn_n.p1) +assert(2 == i_Cn_n.p2) +assert('a' == ch_Cn_n.p1) +assert('b' == ch_Cn_n.p2) + + +val ch_Rpv_pv = new Rpv_pv('a', 'b') +val i_Rpv_pv = new Rpv_pv(1, 2) +assert(1 == i_Rpv_pv.p1) +assert(2 == i_Rpv_pv.p2) +assert('a' == ch_Rpv_pv.p1) +assert('b' == ch_Rpv_pv.p2) + + +val ch_Rpv_v = new Rpv_v('a', 'b') +val i_Rpv_v = new Rpv_v(1, 2) +assert(2 == i_Rpv_v.a) +assert('b' == ch_Rpv_v.a) +assert(1 == i_Rpv_v.p1) +assert(2 == i_Rpv_v.p2) +assert('a' == ch_Rpv_v.p1) +assert('b' == ch_Rpv_v.p2) + + +val ch_Rpv_n = new Rpv_n('a', 'b') +val i_Rpv_n = new Rpv_n(1, 2) +assert(1 == i_Rpv_n.p1) +assert(2 == i_Rpv_n.p2) +assert('a' == ch_Rpv_n.p1) +assert('b' == ch_Rpv_n.p2) + + +val ch_Rv_pv = new Rv_pv('a', 'b') +val i_Rv_pv = new Rv_pv(1, 2) +assert(1 == i_Rv_pv.`a b`) +assert('a' == ch_Rv_pv.`a b`) +assert(1 == i_Rv_pv.p1) +assert(2 == i_Rv_pv.p2) +assert('a' == ch_Rv_pv.p1) +assert('b' == ch_Rv_pv.p2) + + +val ch_Rv_v = new Rv_v('a', 'b') +val i_Rv_v = new Rv_v(1, 2) +assert(1 == i_Rv_v.`a b`) +assert(2 == i_Rv_v.a) +assert('a' == ch_Rv_v.`a b`) +assert('b' == ch_Rv_v.a) +assert(1 == i_Rv_v.p1) +assert(2 == i_Rv_v.p2) +assert('a' == ch_Rv_v.p1) +assert('b' == ch_Rv_v.p2) + + +val ch_Rv_n = new Rv_n('a', 'b') +val i_Rv_n = new Rv_n(1, 2) +assert(1 == i_Rv_n.`a b`) +assert('a' == ch_Rv_n.`a b`) +assert(1 == i_Rv_n.p1) +assert(2 == i_Rv_n.p2) +assert('a' == ch_Rv_n.p1) +assert('b' == ch_Rv_n.p2) + + +val ch_Rn_pv = new Rn_pv('a', 'b') +val i_Rn_pv = new Rn_pv(1, 2) +assert(1 == i_Rn_pv.p1) +assert(2 == i_Rn_pv.p2) +assert('a' == ch_Rn_pv.p1) +assert('b' == ch_Rn_pv.p2) + + +val ch_Rn_v = new Rn_v('a', 'b') +val i_Rn_v = new Rn_v(1, 2) +assert(2 == i_Rn_v.a) +assert('b' == ch_Rn_v.a) +assert(1 == i_Rn_v.p1) +assert(2 == i_Rn_v.p2) +assert('a' == ch_Rn_v.p1) +assert('b' == ch_Rn_v.p2) + + +val ch_Rn_n = new Rn_n('a', 'b') +val i_Rn_n = new Rn_n(1, 2) +assert(1 == i_Rn_n.p1) +assert(2 == i_Rn_n.p2) +assert('a' == ch_Rn_n.p1) +assert('b' == ch_Rn_n.p2) + +TestJoint.joint() +} diff --git a/test/files/run/t8831_many/generator.scala b/test/files/run/t8831_many/generator.scala new file mode 100644 index 000000000000..658647d702d5 --- /dev/null +++ b/test/files/run/t8831_many/generator.scala @@ -0,0 +1,87 @@ +//Generate the classes and assertions under test. +case class TestCase(classType: String, p1: ParamConfig, p2: ParamConfig) { + val className = s"${classType.headOption.getOrElse('r')}${abbr(p1)}_${abbr(p2)}".capitalize + val tParams = "[@specialized(Int) A, @specialized(Int) B]" + def abbr(p: ParamConfig): String = p.modifier.split(' ').toSeq.map(_.headOption.getOrElse('n')).mkString + def decl(param: ParamConfig): String = s"val ${param.aliasName} = ${param.constructorName}" + def renderClass: String = s"""$classType class $className$tParams(${p1.modifier} ${p1.constructorName}: A, ${p2.modifier} a: B){ + | ${decl(p1)} + | ${decl(p2)} + |}""".stripMargin + + def accessConstr(p: ParamConfig) = Option(p).filterNot(p => p.modifier == "private val").filterNot(p => p.modifier == "" && classType == "").map(_.constructorName) + def testConstrCh(p: ParamConfig, expected: String) = accessConstr(p).map(name => s"assert($expected == ch_$className.$name)") + def testConstrI(p: ParamConfig, expected: String) = accessConstr(p).map(name => s"assert($expected == i_$className.$name)") + def testAliasCh(p: ParamConfig, expected: String) = Some(p.aliasName).map(name => s"assert($expected == ch_$className.$name)") + def testAliasI(p: ParamConfig, expected: String) = Some(p.aliasName).map(name => s"assert($expected == i_$className.$name)") + def testExtractors = Some(s"""val $className(extracted1i_$className, extracted2i_$className) = i_$className + |val $className(extracted1ch_$className, extracted2ch_$className) = ch_$className + |assert(1 == extracted1i_$className) + |assert(2 == extracted2i_$className) + |assert('a' == extracted1ch_$className) + |assert('b' == extracted2ch_$className) + |""".stripMargin).filter(_ => classType == "case") + val assertions = List( + testExtractors, + testConstrI(p1, "1"), + testConstrI(p2, "2"), + testConstrCh(p1, "'a'"), + testConstrCh(p2, "'b'"), + testAliasI(p1, "1"), + testAliasI(p2, "2"), + testAliasCh(p1, "'a'"), + testAliasCh(p2, "'b'"), + ).collect{ case Some(t) => t } + def renderTests: String = (instantiateChar :: instantiateInt :: assertions).mkString("\n", "\n", "\n") + def instantiateChar = s"val ch_$className = new $className('a', 'b')" //non-specialized variety + def instantiateInt = s"val i_$className = new $className(1, 2)" //specialized variety +} + +case class ParamConfig(modifier: String, constructorName: String, aliasName: String) + +object Generator { + def paramConfigurations(constructorName: String, aliasName: String) = for { + modifier <- List("private val", "val", "") + } yield ParamConfig(modifier, constructorName, aliasName) + + def hasVal(p1: ParamConfig, p2: ParamConfig) = p1.modifier.contains("val") || p2.modifier.contains("val") + + val configurations = for { + classConfig <- List("case", "") + p1config <- paramConfigurations("`a b`", "p1") + p2config <- paramConfigurations("a", "p2") + } yield TestCase(classConfig, p1config, p2config) + + def main(args: Array[String]): Unit = { + import java.io.File + import java.io.PrintWriter + + val classes = new File("Classes_1.scala") + val tests = new File("Tests_2.scala") + val classWriter = new PrintWriter(classes) + val testWriter = new PrintWriter(tests) + + for(testClass <- configurations) { + classWriter.write(testClass.renderClass) + classWriter.write("\n") + } + + //test both separate and joint compilation. + + testWriter.write("object Test extends App {\n") + classWriter.write("object TestJoint {\n def joint(): Unit = {\n") + for(testClass <- configurations){ + classWriter.write(testClass.renderTests) + classWriter.write("\n") + testWriter.write(testClass.renderTests) + testWriter.write("\n") + } + classWriter.write("\n}}\n") + testWriter.write("TestJoint.joint()") + + testWriter.write("\n}\n") + classWriter.close() + testWriter.close() + + } +} \ No newline at end of file From 4e2eb74ab10ed1e4d56247b0629cdb3a88a52724 Mon Sep 17 00:00:00 2001 From: "Aaron S. Hawley" Date: Tue, 4 May 2021 12:00:01 -0400 Subject: [PATCH 143/769] Revise workaround from ScalaCheck 1.15.1 update --- test/scalacheck/scala/collection/FloatFormatTest.scala | 4 ++-- test/scalacheck/scala/collection/IntegralParseTest.scala | 7 +++++-- 2 files changed, 7 insertions(+), 4 deletions(-) diff --git a/test/scalacheck/scala/collection/FloatFormatTest.scala b/test/scalacheck/scala/collection/FloatFormatTest.scala index 7dd3989fab9f..6a70352fde82 100644 --- a/test/scalacheck/scala/collection/FloatFormatTest.scala +++ b/test/scalacheck/scala/collection/FloatFormatTest.scala @@ -81,8 +81,8 @@ object FloatFormatTest extends Properties("FloatFormat") { 10 -> right )) - // type annotation shouldn't be necessary? see typelevel/scalacheck#721 - Gen.sequence[List[String], String](bogoparts).map(_.mkString) + import scala.jdk.CollectionConverters._ + Gen.sequence(bogoparts).map(_.asScala.mkString) } //compare NaN equal diff --git a/test/scalacheck/scala/collection/IntegralParseTest.scala b/test/scalacheck/scala/collection/IntegralParseTest.scala index 6fd4e229551c..b49466e9bb15 100644 --- a/test/scalacheck/scala/collection/IntegralParseTest.scala +++ b/test/scalacheck/scala/collection/IntegralParseTest.scala @@ -120,8 +120,11 @@ object NumericStringGenerators { if (n >= 0) Gen.oneOf(digitsByValue(n)) else Gen.const(ch) }) - // type annotation shouldn't be necessary? see typelevel/scalacheck#721 - Gen.sequence[List[Char], Char](listOfGens).map(_.mkString) + + import scala.jdk.CollectionConverters._ + + val sequenced = Gen.sequence(listOfGens) + sequenced.map(_.asScala.mkString) } } From 12093820c6da3909f5545f481d76602e69f3b151 Mon Sep 17 00:00:00 2001 From: "Aaron S. Hawley" Date: Tue, 4 May 2021 13:39:29 -0400 Subject: [PATCH 144/769] Shorten links in apidocs for Java --- src/compiler/scala/tools/reflect/FormatInterpolator.scala | 2 +- src/library/scala/Predef.scala | 5 ++--- src/library/scala/SerialVersionUID.scala | 2 +- src/library/scala/concurrent/ExecutionContext.scala | 6 +++--- src/library/scala/util/matching/Regex.scala | 3 ++- 5 files changed, 9 insertions(+), 9 deletions(-) diff --git a/src/compiler/scala/tools/reflect/FormatInterpolator.scala b/src/compiler/scala/tools/reflect/FormatInterpolator.scala index 1630e44d250d..158ba29c88ad 100644 --- a/src/compiler/scala/tools/reflect/FormatInterpolator.scala +++ b/src/compiler/scala/tools/reflect/FormatInterpolator.scala @@ -69,7 +69,7 @@ abstract class FormatInterpolator { * 5) "...\${smth}%%" => okay, equivalent to "...\${smth}%s%%" * 6) "...\${smth}[%legalJavaConversion]" => okay* * 7) "...\${smth}[%illegalJavaConversion]" => error - * *Legal according to [[https://docs.oracle.com/javase/8/docs/api/java/util/Formatter.html]] + * *Legal according to [[java.util.Formatter]] */ def interpolated(parts: List[Tree], args: List[Tree]) = { val fstring = new StringBuilder diff --git a/src/library/scala/Predef.scala b/src/library/scala/Predef.scala index 6577d5d8e403..fa46286d494a 100644 --- a/src/library/scala/Predef.scala +++ b/src/library/scala/Predef.scala @@ -137,9 +137,8 @@ object Predef extends LowPriorityImplicits { @inline def valueOf[T](implicit vt: ValueOf[T]): T = vt.value /** The `String` type in Scala has all the methods of the underlying - * `java.lang.String`, of which it is just an alias. - * (See the documentation corresponding to your Java version, - * for example [[https://docs.oracle.com/javase/8/docs/api/java/lang/String.html]].) + * [[java.lang.String]], of which it is just an alias. + * * In addition, extension methods in [[scala.collection.StringOps]] * are added implicitly through the conversion [[augmentString]]. * @group aliases diff --git a/src/library/scala/SerialVersionUID.scala b/src/library/scala/SerialVersionUID.scala index e92e0d9fbd78..7a0b08f6fa23 100644 --- a/src/library/scala/SerialVersionUID.scala +++ b/src/library/scala/SerialVersionUID.scala @@ -20,7 +20,7 @@ package scala * which the JVM's serialization mechanism uses to determine serialization * compatibility between different versions of a class. * - * @see [[https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html `java.io.Serializable`]] + * @see [[java.io.Serializable]] * @see [[Serializable]] */ @deprecatedInheritance("Scheduled for being final in the future", "2.13.0") diff --git a/src/library/scala/concurrent/ExecutionContext.scala b/src/library/scala/concurrent/ExecutionContext.scala index 7f811c97834a..41dfbb609816 100644 --- a/src/library/scala/concurrent/ExecutionContext.scala +++ b/src/library/scala/concurrent/ExecutionContext.scala @@ -106,13 +106,13 @@ trait ExecutionContext { /** * An [[ExecutionContext]] that is also a - * Java [[https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/Executor.html Executor]]. + * Java [[java.util.concurrent.Executor Executor]]. */ trait ExecutionContextExecutor extends ExecutionContext with Executor /** * An [[ExecutionContext]] that is also a - * Java [[https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ExecutorService.html ExecutorService]]. + * Java [[java.util.concurrent.ExecutorService ExecutorService]]. */ trait ExecutionContextExecutorService extends ExecutionContextExecutor with ExecutorService @@ -287,7 +287,7 @@ object ExecutionContext { */ def fromExecutor(e: Executor): ExecutionContextExecutor = fromExecutor(e, defaultReporter) - /** The default reporter simply prints the stack trace of the `Throwable` to [[https://docs.oracle.com/javase/8/docs/api/java/lang/System.html#err System.err]]. + /** The default reporter simply prints the stack trace of the `Throwable` to [[java.lang.System#err System.err]]. * * @return the function for error reporting */ diff --git a/src/library/scala/util/matching/Regex.scala b/src/library/scala/util/matching/Regex.scala index 66ead3f03107..2b8bc69c07c6 100644 --- a/src/library/scala/util/matching/Regex.scala +++ b/src/library/scala/util/matching/Regex.scala @@ -33,7 +33,8 @@ import java.util.regex.{ Pattern, Matcher } * and, if it does, to extract or transform the parts that match. * * === Usage === - * This class delegates to the [[java.util.regex]] package of the Java Platform. + + * This class delegates to the [[https://docs.oracle.com/javase/8/docs/api/java/util/regex/package-summary.html java.util.regex]] package of the Java Platform. * See the documentation for [[java.util.regex.Pattern]] for details about * the regular expression syntax for pattern strings. * From 0b0ecbadf015d2e42f9db8d24e386c40788ab0e5 Mon Sep 17 00:00:00 2001 From: "Aaron S. Hawley" Date: Tue, 4 May 2021 13:47:31 -0400 Subject: [PATCH 145/769] Fix broken doc link in jdk.Accumulator --- src/library/scala/jdk/Accumulator.scala | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/library/scala/jdk/Accumulator.scala b/src/library/scala/jdk/Accumulator.scala index da5f722df4d2..ca1b0215bcd8 100644 --- a/src/library/scala/jdk/Accumulator.scala +++ b/src/library/scala/jdk/Accumulator.scala @@ -54,7 +54,8 @@ import scala.language.implicitConversions * There are two possibilities to process elements of a primitive Accumulator without boxing: * specialized operations of the Accumulator, or the Stepper interface. The most common collection * operations are overloaded or overridden in the primitive Accumulator classes, for example - * [[IntAccumulator.map(f: Int => Int)* IntAccumulator.map]] or [[IntAccumulator.exists]]. Thanks to Scala's function specialization, + * [[IntAccumulator.map(f:Int=>Int)* IntAccumulator.map]] or [[IntAccumulator.exists]]. + * Thanks to Scala's function specialization, * `intAcc.exists(x => testOn(x))` does not incur boxing. * * The [[scala.collection.Stepper]] interface provides iterator-like `hasStep` and `nextStep` methods, and is From 07a5f4de9ca92d49129a6bb838bac25460ad58c3 Mon Sep 17 00:00:00 2001 From: "Aaron S. Hawley" Date: Tue, 4 May 2021 13:51:33 -0400 Subject: [PATCH 146/769] Fix warning about doc variable in LazyList --- src/library/scala/collection/immutable/LazyList.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/library/scala/collection/immutable/LazyList.scala b/src/library/scala/collection/immutable/LazyList.scala index 58ff4a8970a2..db0e9d180b22 100644 --- a/src/library/scala/collection/immutable/LazyList.scala +++ b/src/library/scala/collection/immutable/LazyList.scala @@ -68,7 +68,7 @@ import scala.runtime.Statics * val fibs: LazyList[BigInt] = * BigInt(0) #:: BigInt(1) #:: * fibs.zip(fibs.tail).map{ n => - * println(s"Adding ${n._1} and ${n._2}") + * println(s"Adding \${n._1} and \${n._2}") * n._1 + n._2 * } * fibs.take(5).foreach(println) From e8194e70c35d6cc9f41ae2a6115ec6b553d6ebad Mon Sep 17 00:00:00 2001 From: Seth Tisue Date: Tue, 5 Jan 2021 13:33:23 -0800 Subject: [PATCH 147/769] bump copyright year to 2021 forward-ports 906f4cd178f to 2.13.x -- this somehow got missed in some 2.12.x->2.13.x merge --- NOTICE | 4 ++-- doc/LICENSE.md | 4 ++-- doc/License.rtf | 4 ++-- project/VersionUtil.scala | 2 +- src/library/scala/util/Properties.scala | 2 +- src/scaladoc/scala/tools/nsc/doc/html/page/Entity.scala | 2 +- src/scalap/decoder.properties | 2 +- 7 files changed, 10 insertions(+), 10 deletions(-) diff --git a/NOTICE b/NOTICE index ac3a26b40f48..ba6f890b920f 100644 --- a/NOTICE +++ b/NOTICE @@ -1,6 +1,6 @@ Scala -Copyright (c) 2002-2020 EPFL -Copyright (c) 2011-2020 Lightbend, Inc. +Copyright (c) 2002-2021 EPFL +Copyright (c) 2011-2021 Lightbend, Inc. Scala includes software developed at LAMP/EPFL (https://lamp.epfl.ch/) and diff --git a/doc/LICENSE.md b/doc/LICENSE.md index 83ef781d15f1..8a8e78738ff2 100644 --- a/doc/LICENSE.md +++ b/doc/LICENSE.md @@ -2,9 +2,9 @@ Scala is licensed under the [Apache License Version 2.0](https://www.apache.org/ ## Scala License -Copyright (c) 2002-2020 EPFL +Copyright (c) 2002-2021 EPFL -Copyright (c) 2011-2020 Lightbend, Inc. +Copyright (c) 2011-2021 Lightbend, Inc. All rights reserved. diff --git a/doc/License.rtf b/doc/License.rtf index 376ec02cb530..8f266ee71b4b 100644 --- a/doc/License.rtf +++ b/doc/License.rtf @@ -23,8 +23,8 @@ Scala is licensed under the\'a0{\field{\*\fldinst{HYPERLINK "https://www.apache. \fs48 \cf2 Scala License\ \pard\pardeftab720\sl360\sa320\partightenfactor0 -\f0\b0\fs28 \cf2 Copyright (c) 2002-2020 EPFL\ -Copyright (c) 2011-2020 Lightbend, Inc.\ +\f0\b0\fs28 \cf2 Copyright (c) 2002-2021 EPFL\ +Copyright (c) 2011-2021 Lightbend, Inc.\ All rights reserved.\ \pard\pardeftab720\sl360\sa320\partightenfactor0 \cf2 \cb4 Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at {\field{\*\fldinst{HYPERLINK "http://www.apache.org/licenses/LICENSE-2.0"}}{\fldrslt http://www.apache.org/licenses/LICENSE-2.0}}.\ diff --git a/project/VersionUtil.scala b/project/VersionUtil.scala index 6b4e659cc7a2..ac5ea30b20cc 100644 --- a/project/VersionUtil.scala +++ b/project/VersionUtil.scala @@ -30,7 +30,7 @@ object VersionUtil { ) lazy val generatePropertiesFileSettings = Seq[Setting[_]]( - copyrightString := "Copyright 2002-2020, LAMP/EPFL and Lightbend, Inc.", + copyrightString := "Copyright 2002-2021, LAMP/EPFL and Lightbend, Inc.", shellBannerString := """ | ________ ___ / / ___ | / __/ __// _ | / / / _ | diff --git a/src/library/scala/util/Properties.scala b/src/library/scala/util/Properties.scala index ff9634e2cc7e..f10723cb4b1d 100644 --- a/src/library/scala/util/Properties.scala +++ b/src/library/scala/util/Properties.scala @@ -109,7 +109,7 @@ private[scala] trait PropertiesTrait { * or "version (unknown)" if it cannot be determined. */ val versionString = "version " + scalaPropOrElse("version.number", "(unknown)") - val copyrightString = scalaPropOrElse("copyright.string", "Copyright 2002-2020, LAMP/EPFL and Lightbend, Inc.") + val copyrightString = scalaPropOrElse("copyright.string", "Copyright 2002-2021, LAMP/EPFL and Lightbend, Inc.") /** This is the encoding to use reading in source files, overridden with -encoding. * Note that it uses "prop" i.e. looks in the scala jar, not the system properties. diff --git a/src/scaladoc/scala/tools/nsc/doc/html/page/Entity.scala b/src/scaladoc/scala/tools/nsc/doc/html/page/Entity.scala index 3c82654fb515..32a0cbca5840 100644 --- a/src/scaladoc/scala/tools/nsc/doc/html/page/Entity.scala +++ b/src/scaladoc/scala/tools/nsc/doc/html/page/Entity.scala @@ -341,7 +341,7 @@ trait EntityPage extends HtmlPage { val postamble = List(Div(id = "tooltip"), if (Set("epfl", "EPFL").contains(tpl.universe.settings.docfooter.value)) - Div(id = "footer", elems = Txt("Scala programming documentation. Copyright (c) 2002-2020 ") :: A(href = "https://www.epfl.ch", target = "_top", elems = Txt("EPFL")) :: Txt(" and ") :: A(href = "https://www.lightbend.com", target = "_top", elems = Txt("Lightbend")) :: Txt(".")) + Div(id = "footer", elems = Txt("Scala programming documentation. Copyright (c) 2002-2021 ") :: A(href = "https://www.epfl.ch", target = "_top", elems = Txt("EPFL")) :: Txt(" and ") :: A(href = "https://www.lightbend.com", target = "_top", elems = Txt("Lightbend")) :: Txt(".")) else Div(id = "footer", elems = Txt(tpl.universe.settings.docfooter.value))) diff --git a/src/scalap/decoder.properties b/src/scalap/decoder.properties index 3607f029f024..9ac03dd79c51 100644 --- a/src/scalap/decoder.properties +++ b/src/scalap/decoder.properties @@ -1,2 +1,2 @@ version.number=2.0.1 -copyright.string=(c) 2002-2020 LAMP/EPFL +copyright.string=(c) 2002-2021 LAMP/EPFL From b80853511a12923eb574f6ecab1b3ae776c3850b Mon Sep 17 00:00:00 2001 From: Dale Wijnand Date: Fri, 23 Apr 2021 14:58:00 +0100 Subject: [PATCH 148/769] Benchmark and simplify AlmostFinalValue (cherry picked from commit 807beb63be1260d08c28b6c520ec8d6d98f5ca99) --- build.sbt | 3 +- .../internal/util/AlmostFinalValue.java | 104 ++++-------------- .../reflect/internal/util/Statistics.scala | 4 +- .../internal/util/StatisticsStatics.java | 48 ++------ .../AlmostFinalValueBenchmarkStatics.java | 12 ++ .../util/AlmostFinalValueBenchmark.scala | 56 ++++++++++ 6 files changed, 103 insertions(+), 124 deletions(-) create mode 100644 test/benchmarks/src/main/java/scala/reflect/internal/util/AlmostFinalValueBenchmarkStatics.java create mode 100644 test/benchmarks/src/main/scala/scala/reflect/internal/util/AlmostFinalValueBenchmark.scala diff --git a/build.sbt b/build.sbt index 02fad2c5148b..40a5311aff18 100644 --- a/build.sbt +++ b/build.sbt @@ -659,7 +659,8 @@ lazy val bench = project.in(file("test") / "benchmarks") .settings( name := "test-benchmarks", libraryDependencies += "org.openjdk.jol" % "jol-core" % "0.6", - scalacOptions ++= Seq("-feature", "-opt:l:inline", "-opt-inline-from:**") + compileOrder := CompileOrder.JavaThenScala, // to allow inlining from Java ("... is defined in a Java source (mixed compilation), no bytecode is available") + scalacOptions ++= Seq("-feature", "-opt:l:inline", "-opt-inline-from:scala/**", "-opt-warnings"), ) lazy val junit = project.in(file("test") / "junit") diff --git a/src/reflect/scala/reflect/internal/util/AlmostFinalValue.java b/src/reflect/scala/reflect/internal/util/AlmostFinalValue.java index 415f91f9a8ff..f9bb24f00a85 100644 --- a/src/reflect/scala/reflect/internal/util/AlmostFinalValue.java +++ b/src/reflect/scala/reflect/internal/util/AlmostFinalValue.java @@ -14,93 +14,35 @@ import java.lang.invoke.MethodHandle; import java.lang.invoke.MethodHandles; -import java.lang.invoke.MethodType; import java.lang.invoke.MutableCallSite; -import java.lang.invoke.SwitchPoint; /** * Represents a value that is wrapped with JVM machinery to allow the JVM - * to speculate on its content and effectively optimize it as if it was final. - * - * This file has been drawn from JSR292 cookbook created by Rémi Forax. - * https://code.google.com/archive/p/jsr292-cookbook/. The explanation of the strategy - * can be found in https://community.oracle.com/blogs/forax/2011/12/17/jsr-292-goodness-almost-static-final-field. - * - * Before copying this file to the repository, I tried to adapt the most important - * parts of this implementation and special case it for `Statistics`, but that - * caused an important performance penalty (~10%). This performance penalty is - * due to the fact that using `static`s for the method handles and all the other + * to speculate on its content and effectively optimize it as if it was a constant. + * + * Originally from the JSR-292 cookbook created by Rémi Forax: + * https://code.google.com/archive/p/jsr292-cookbook/. + * + * Implemented in Java because using `static`s for the method handles and all the other * fields is extremely important for the JVM to correctly optimize the code, and * we cannot do that if we make `Statistics` an object extending `MutableCallSite` - * in Scala. We instead rely on the Java implementation that uses a boxed representation. + * in Scala. + * + * Subsequently specialised for booleans, to avoid needless Boolean boxing. + * + * Finally reworked to default to false and only allow for the value to be toggled on, + * using Rémi Forax's newer "MostlyConstant" as inspiration, in https://github.com/forax/exotic. */ -public class AlmostFinalValue { - private final AlmostFinalCallSite callsite = - new AlmostFinalCallSite(this); - - protected boolean initialValue() { - return false; - } - - public MethodHandle createGetter() { - return callsite.dynamicInvoker(); - } - - public void setValue(boolean value) { - callsite.setValue(value); - } - - private static class AlmostFinalCallSite extends MutableCallSite { - private Boolean value; - private SwitchPoint switchPoint; - private final AlmostFinalValue volatileFinalValue; - private final MethodHandle fallback; - private final Object lock; - - private static final Boolean NONE = null; - private static final MethodHandle FALLBACK; - static { - try { - FALLBACK = MethodHandles.lookup().findVirtual(AlmostFinalCallSite.class, "fallback", - MethodType.methodType(Boolean.TYPE)); - } catch (NoSuchMethodException|IllegalAccessException e) { - throw new AssertionError(e.getMessage(), e); - } - } - - AlmostFinalCallSite(AlmostFinalValue volatileFinalValue) { - super(MethodType.methodType(Boolean.TYPE)); - Object lock = new Object(); - MethodHandle fallback = FALLBACK.bindTo(this); - synchronized(lock) { - value = null; - switchPoint = new SwitchPoint(); - setTarget(fallback); - } - this.volatileFinalValue = volatileFinalValue; - this.lock = lock; - this.fallback = fallback; - } +final class AlmostFinalValue { + private static final MethodHandle K_FALSE = MethodHandles.constant(boolean.class, false); + private static final MethodHandle K_TRUE = MethodHandles.constant(boolean.class, true); + + private final MutableCallSite callsite = new MutableCallSite(K_FALSE); + final MethodHandle invoker = callsite.dynamicInvoker(); - boolean fallback() { - synchronized(lock) { - Boolean value = this.value; - if (value == NONE) { - value = volatileFinalValue.initialValue(); - } - MethodHandle target = switchPoint.guardWithTest(MethodHandles.constant(Boolean.TYPE, value), fallback); - setTarget(target); - return value; - } - } - - void setValue(boolean value) { - synchronized(lock) { - SwitchPoint switchPoint = this.switchPoint; - this.value = value; - this.switchPoint = new SwitchPoint(); - SwitchPoint.invalidateAll(new SwitchPoint[] {switchPoint}); - } - } + void toggleOnAndDeoptimize() { + if (callsite.getTarget() == K_TRUE) return; + callsite.setTarget(K_TRUE); + MutableCallSite.syncAll(new MutableCallSite[] { callsite }); } -} \ No newline at end of file +} diff --git a/src/reflect/scala/reflect/internal/util/Statistics.scala b/src/reflect/scala/reflect/internal/util/Statistics.scala index f3dc3cc57cae..413804a67ad4 100644 --- a/src/reflect/scala/reflect/internal/util/Statistics.scala +++ b/src/reflect/scala/reflect/internal/util/Statistics.scala @@ -296,7 +296,7 @@ quant) @inline final def enabled: Boolean = areColdStatsLocallyEnabled def enabled_=(cond: Boolean) = { if (cond && !enabled) { - StatisticsStatics.enableColdStats() + StatisticsStatics.enableColdStatsAndDeoptimize() areColdStatsLocallyEnabled = true } } @@ -305,7 +305,7 @@ quant) @inline final def hotEnabled: Boolean = enabled && areHotStatsLocallyEnabled def hotEnabled_=(cond: Boolean) = { if (cond && enabled && !areHotStatsLocallyEnabled) { - StatisticsStatics.enableHotStats() + StatisticsStatics.enableHotStatsAndDeoptimize() areHotStatsLocallyEnabled = true } } diff --git a/src/reflect/scala/reflect/internal/util/StatisticsStatics.java b/src/reflect/scala/reflect/internal/util/StatisticsStatics.java index dc9021471d87..d2d27a7af6c7 100644 --- a/src/reflect/scala/reflect/internal/util/StatisticsStatics.java +++ b/src/reflect/scala/reflect/internal/util/StatisticsStatics.java @@ -12,7 +12,6 @@ package scala.reflect.internal.util; -import scala.reflect.internal.util.AlmostFinalValue; import java.lang.invoke.MethodHandle; /** @@ -22,46 +21,15 @@ * which helps performance (see docs to find out why). */ public final class StatisticsStatics { - private static final AlmostFinalValue COLD_STATS = new AlmostFinalValue() { - @Override - protected boolean initialValue() { - return false; - } - }; + private static final AlmostFinalValue COLD_STATS = new AlmostFinalValue(); + private static final AlmostFinalValue HOT_STATS = new AlmostFinalValue(); - private static final AlmostFinalValue HOT_STATS = new AlmostFinalValue() { - @Override - protected boolean initialValue() { - return false; - } - }; + private static final MethodHandle COLD_STATS_GETTER = COLD_STATS.invoker; + private static final MethodHandle HOT_STATS_GETTER = HOT_STATS.invoker; - private static final MethodHandle COLD_STATS_GETTER = COLD_STATS.createGetter(); - private static final MethodHandle HOT_STATS_GETTER = HOT_STATS.createGetter(); - - public static boolean areSomeColdStatsEnabled() throws Throwable { - return (boolean) COLD_STATS_GETTER.invokeExact(); - } + public static boolean areSomeColdStatsEnabled() throws Throwable { return (boolean) COLD_STATS_GETTER.invokeExact(); } + public static boolean areSomeHotStatsEnabled() throws Throwable { return (boolean) HOT_STATS_GETTER.invokeExact(); } - public static boolean areSomeHotStatsEnabled() throws Throwable { - return (boolean) HOT_STATS_GETTER.invokeExact(); - } - - public static void enableColdStats() throws Throwable { - if (!areSomeColdStatsEnabled()) - COLD_STATS.setValue(true); - } - - public static void disableColdStats() { - COLD_STATS.setValue(false); - } - - public static void enableHotStats() throws Throwable { - if (!areSomeHotStatsEnabled()) - HOT_STATS.setValue(true); - } - - public static void disableHotStats() { - HOT_STATS.setValue(false); - } + public static void enableColdStatsAndDeoptimize() { COLD_STATS.toggleOnAndDeoptimize(); } + public static void enableHotStatsAndDeoptimize() { HOT_STATS.toggleOnAndDeoptimize(); } } diff --git a/test/benchmarks/src/main/java/scala/reflect/internal/util/AlmostFinalValueBenchmarkStatics.java b/test/benchmarks/src/main/java/scala/reflect/internal/util/AlmostFinalValueBenchmarkStatics.java new file mode 100644 index 000000000000..966adedb44e1 --- /dev/null +++ b/test/benchmarks/src/main/java/scala/reflect/internal/util/AlmostFinalValueBenchmarkStatics.java @@ -0,0 +1,12 @@ +package scala.reflect.internal.util; + +import java.lang.invoke.MethodHandle; + +final class AlmostFinalValueBenchmarkStatics { + static final boolean STATIC_FINAL_FALSE = false; + + private static final AlmostFinalValue ALMOST_FINAL_FALSE = new AlmostFinalValue(); + private static final MethodHandle ALMOST_FINAL_FALSE_GETTER = ALMOST_FINAL_FALSE.invoker; + + static boolean isTrue() throws Throwable { return (boolean) ALMOST_FINAL_FALSE_GETTER.invokeExact(); } +} diff --git a/test/benchmarks/src/main/scala/scala/reflect/internal/util/AlmostFinalValueBenchmark.scala b/test/benchmarks/src/main/scala/scala/reflect/internal/util/AlmostFinalValueBenchmark.scala new file mode 100644 index 000000000000..70d69178cb19 --- /dev/null +++ b/test/benchmarks/src/main/scala/scala/reflect/internal/util/AlmostFinalValueBenchmark.scala @@ -0,0 +1,56 @@ +package scala.reflect.internal.util + +import java.util.concurrent.TimeUnit + +import org.openjdk.jmh.annotations._ +import org.openjdk.jmh.infra.Blackhole + +class AlmostFinalValueBenchSettings extends scala.reflect.runtime.Settings { + val flag = new BooleanSetting(false) + + @inline final def isTrue2: Boolean = AlmostFinalValueBenchmarkStatics.isTrue && flag +} + +object AlmostFinalValueBenchSettings { + implicit class SettingsOps(private val settings: AlmostFinalValueBenchSettings) extends AnyVal { + @inline final def isTrue3: Boolean = AlmostFinalValueBenchmarkStatics.isTrue && settings.flag + } + + @inline def isTrue4(settings: AlmostFinalValueBenchSettings): Boolean = + AlmostFinalValueBenchmarkStatics.isTrue && settings.flag +} + +@Warmup(iterations = 10, time = 1, timeUnit = TimeUnit.SECONDS) +@Measurement(iterations = 10, time = 1, timeUnit = TimeUnit.SECONDS) +@Fork(3) +@BenchmarkMode(Array(Mode.AverageTime)) +@OutputTimeUnit(TimeUnit.NANOSECONDS) +@State(Scope.Benchmark) +class AlmostFinalValueBenchmark { + import AlmostFinalValueBenchmarkStatics.STATIC_FINAL_FALSE + val settings = new AlmostFinalValueBenchSettings(); import settings._ + + private def pretendToWorkHard() = Blackhole.consumeCPU(3) + + @Benchmark def bench0_unit = () + @Benchmark def bench0_usingStaticFinalFalse = if (STATIC_FINAL_FALSE && flag) pretendToWorkHard() + @Benchmark def bench0_workingHard = pretendToWorkHard() + + @Benchmark def bench1_usingAlmostFinalFalse = if (AlmostFinalValueBenchmarkStatics.isTrue && flag) pretendToWorkHard() + @Benchmark def bench2_usingInlineMethod = if (settings.isTrue2) pretendToWorkHard() + @Benchmark def bench3_usingExtMethod = if (settings.isTrue3) pretendToWorkHard() + @Benchmark def bench4_usingObjectMethod = if (AlmostFinalValueBenchSettings.isTrue4(settings)) pretendToWorkHard() + +/* + This benchmark is measuring two things: + 1. verifying that using AlmostFinalValue in an if block makes the block a no-op + 2. verifying and comparing which ergonomic wrapper around AlmostFinalValue maintains that + + The first point is satisfied. + + For the second: + 1. inline instance methods add a null-check overhead, slowing it down + 2. extension methods perform as quickly, are very ergonomic and so are the best choice + 3. object methods also perform as quickly, but can be less ergonomic if it requires an import +*/ +} From 3b6d6c79117f8e938991f531763349131df5fa5f Mon Sep 17 00:00:00 2001 From: Dale Wijnand Date: Mon, 26 Apr 2021 17:47:49 +0100 Subject: [PATCH 149/769] Fix outer test making vs annotated types Fixes scala/community-build#1400 --- .../nsc/transform/patmat/MatchTreeMaking.scala | 1 - test/files/run/t11534c.scala | 18 ++++++++++++++++++ 2 files changed, 18 insertions(+), 1 deletion(-) diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala index 2d3299112420..27749a6035d8 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala @@ -369,7 +369,6 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging { withOuterTest(withOuterTest(orig)(testedBinder, parent))(testedBinder, copyRefinedType(rt, rest, scope)) case expectedTp => val expectedClass = expectedTp.typeSymbol - assert(!expectedClass.isRefinementClass, orig) // .typeSymbol dealiases, so look at the prefix of the base type at the dealiased symbol, // not of expectedTp itself. val expectedPrefix = expectedTp.baseType(expectedClass).prefix diff --git a/test/files/run/t11534c.scala b/test/files/run/t11534c.scala index 4fb201c64b4d..a1fbaf0d72e6 100644 --- a/test/files/run/t11534c.scala +++ b/test/files/run/t11534c.scala @@ -85,6 +85,19 @@ object Test { case _ => false } + + trait ScalaProvider { def loader: Int } + type ScalaProvider2 = { def loaderLibraryOnly: Int } + import scala.language.reflectiveCalls + + def cb1400(provider: ScalaProvider) = try { + provider match { + case p: ScalaProvider2 @unchecked => p.loaderLibraryOnly + } + } catch { + case _: NoSuchMethodException => provider.loader + } + def assertOuter(expected: Int, actual: Int): Unit = { if (expected != actual) throw WrongOuter(expected, actual) } @@ -113,5 +126,10 @@ object Test { assert(pat5(new m1.B2)) assert(pat5(new m2.B2)) + + class SP1 extends ScalaProvider { def loader = 1 } + class SP2 extends ScalaProvider { def loader = 1; def loaderLibraryOnly = 2 } + assert(cb1400(new SP1()) == 1) + assert(cb1400(new SP2()) == 2) } } From 3bd24299fc34e5c3a480206c9798c055ca3a3439 Mon Sep 17 00:00:00 2001 From: Dale Wijnand Date: Mon, 26 Apr 2021 11:05:23 +0100 Subject: [PATCH 150/769] Rework Statistics to be faster & avoid stale state ... by reusing settings, and using their postSetHook to sync their AlmostFinalValue. And use a value class extension method as its API. (cherry picked from commit 518e6e076b0a75c4977a876b8ff3d7869f29dcf7) --- src/compiler/scala/tools/nsc/Global.scala | 9 +-- src/compiler/scala/tools/nsc/MainBench.scala | 5 +- .../nsc/backend/jvm/ClassfileWriters.scala | 4 +- .../backend/jvm/GeneratedClassHandler.scala | 2 +- .../tools/nsc/settings/ScalaSettings.scala | 6 +- .../scala/tools/nsc/typechecker/Typers.scala | 2 +- .../internal/settings/MutableSettings.scala | 6 ++ .../reflect/internal/util/Statistics.scala | 61 +++++-------------- .../scala/reflect/runtime/Settings.scala | 1 + 9 files changed, 37 insertions(+), 59 deletions(-) diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala index f2f10792e7d6..c9dde2828f5c 100644 --- a/src/compiler/scala/tools/nsc/Global.scala +++ b/src/compiler/scala/tools/nsc/Global.scala @@ -1272,11 +1272,8 @@ class Global(var currentSettings: Settings, reporter0: Reporter) checkPhaseSettings(including = true, inclusions.toSeq: _*) checkPhaseSettings(including = false, exclusions map (_.value): _*) - // Enable or disable depending on the current setting -- useful for interactive behaviour - statistics.initFromSettings(settings) - // Report the overhead of statistics measurements per every run - if (statistics.areStatisticsLocallyEnabled) + if (settings.areStatisticsEnabled) statistics.reportStatisticsOverhead(reporter) phase = first //parserPhase @@ -1505,7 +1502,7 @@ class Global(var currentSettings: Settings, reporter0: Reporter) warnDeprecatedAndConflictingSettings() globalPhase = fromPhase - val timePhases = statistics.areStatisticsLocallyEnabled + val timePhases = settings.areStatisticsEnabled val startTotal = if (timePhases) statistics.startTimer(totalCompileTime) else null while (globalPhase.hasNext && !reporter.hasErrors) { @@ -1552,7 +1549,7 @@ class Global(var currentSettings: Settings, reporter0: Reporter) runCheckers() // output collected statistics - if (settings.YstatisticsEnabled && settings.Ystatistics.contains(phase.name)) + if (settings.areStatisticsEnabled && settings.Ystatistics.contains(phase.name)) printStatisticsFor(phase) if (!globalPhase.hasNext || reporter.hasErrors) diff --git a/src/compiler/scala/tools/nsc/MainBench.scala b/src/compiler/scala/tools/nsc/MainBench.scala index d84a2eee6ec8..faeea4e99ace 100644 --- a/src/compiler/scala/tools/nsc/MainBench.scala +++ b/src/compiler/scala/tools/nsc/MainBench.scala @@ -30,9 +30,8 @@ object MainBench extends Driver with EvalLoop { var start = System.nanoTime() for (i <- 0 until NIter) { if (i == NIter-1) { - theCompiler.settings.Ystatistics.value = List("all") - theCompiler.statistics.enabled = true - theCompiler.statistics.hotEnabled = true + theCompiler.settings.Ystatistics.value = List("all") + theCompiler.settings.YhotStatistics.value = true } process(args) val end = System.nanoTime() diff --git a/src/compiler/scala/tools/nsc/backend/jvm/ClassfileWriters.scala b/src/compiler/scala/tools/nsc/backend/jvm/ClassfileWriters.scala index d8bf14db0557..82dabf72cdd8 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/ClassfileWriters.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/ClassfileWriters.scala @@ -63,7 +63,7 @@ abstract class ClassfileWriters { def apply(global: Global): ClassfileWriter = { //Note dont import global._ - its too easy to leak non threadsafe structures - import global.{cleanup, log, settings, statistics} + import global.{ cleanup, log, settings } def jarManifestMainClass: Option[String] = settings.mainClass.valueSetByUser.orElse { cleanup.getEntryPoints match { case List(name) => Some(name) @@ -91,7 +91,7 @@ abstract class ClassfileWriters { new DebugClassWriter(basicClassWriter, asmp, dump) } - val enableStats = statistics.enabled && settings.YaddBackendThreads.value == 1 + val enableStats = settings.areStatisticsEnabled && settings.YaddBackendThreads.value == 1 if (enableStats) new WithStatsWriter(withAdditionalFormats) else withAdditionalFormats } diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GeneratedClassHandler.scala b/src/compiler/scala/tools/nsc/backend/jvm/GeneratedClassHandler.scala index f057544a4333..cf25c348dfbe 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/GeneratedClassHandler.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/GeneratedClassHandler.scala @@ -59,7 +59,7 @@ private[jvm] object GeneratedClassHandler { new SyncWritingClassHandler(postProcessor) case maxThreads => - if (statistics.enabled) + if (settings.areStatisticsEnabled) runReporting.warning(NoPosition, "jvm statistics are not reliable with multi-threaded jvm class writing", WarningCategory.Other, site = "") val additionalThreads = maxThreads - 1 // The thread pool queue is limited in size. When it's full, the `CallerRunsPolicy` causes diff --git a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala index 46e9497cebc5..a769f3bb09ed 100644 --- a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala +++ b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala @@ -23,6 +23,7 @@ import scala.language.existentials import scala.annotation.elidable import scala.tools.util.PathResolver.Defaults import scala.collection.mutable +import scala.reflect.internal.util.StatisticsStatics import scala.tools.nsc.util.DefaultJarFactory trait ScalaSettings extends StandardScalaSettings with Warnings { _: MutableSettings => @@ -426,9 +427,12 @@ trait ScalaSettings extends StandardScalaSettings with Warnings { _: MutableSett val YoptLogInline = StringSetting("-Yopt-log-inline", "package/Class.method", "Print a summary of inliner activity; `_` to print all, prefix match to select.", "") val Ystatistics = PhasesSetting("-Ystatistics", "Print compiler statistics for specific phases", "parser,typer,patmat,erasure,cleanup,jvm") - override def YstatisticsEnabled = Ystatistics.value.nonEmpty + .withPostSetHook(s => if (s.value.nonEmpty) StatisticsStatics.enableColdStatsAndDeoptimize()) val YhotStatistics = BooleanSetting("-Yhot-statistics-enabled", s"Enable `${Ystatistics.name}` to print hot statistics.") + .withPostSetHook(s => if (s && YstatisticsEnabled) StatisticsStatics.enableHotStatsAndDeoptimize()) + + override def YstatisticsEnabled = Ystatistics.value.nonEmpty override def YhotStatisticsEnabled = YhotStatistics.value val YprofileEnabled = BooleanSetting("-Yprofile-enabled", "Enable profiling.") diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index a0bc729890eb..d6bf80ccf0db 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -5756,7 +5756,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper def typed(tree: Tree, mode: Mode, pt: Type): Tree = { lastTreeToTyper = tree - val statsEnabled = StatisticsStatics.areSomeHotStatsEnabled() && statistics.areHotStatsLocallyEnabled + val statsEnabled = StatisticsStatics.areSomeHotStatsEnabled && settings.areStatisticsEnabled && settings.YhotStatisticsEnabled val startByType = if (statsEnabled) statistics.pushTimer(byTypeStack, byTypeNanos(tree.getClass)) else null if (statsEnabled) statistics.incCounter(visitsByType, tree.getClass) val shouldPrintTyping = printTypings && !phase.erasedTypes && !noPrintTyping(tree) diff --git a/src/reflect/scala/reflect/internal/settings/MutableSettings.scala b/src/reflect/scala/reflect/internal/settings/MutableSettings.scala index 6d50c6ab4176..6e810d6c6970 100644 --- a/src/reflect/scala/reflect/internal/settings/MutableSettings.scala +++ b/src/reflect/scala/reflect/internal/settings/MutableSettings.scala @@ -16,6 +16,8 @@ package scala package reflect.internal package settings +import scala.reflect.internal.util.StatisticsStatics + /** A mutable Settings object. */ abstract class MutableSettings extends AbsSettings { @@ -82,4 +84,8 @@ object MutableSettings { import scala.language.implicitConversions /** Support the common use case, `if (settings.debug) println("Hello, martin.")` */ @inline implicit def reflectSettingToBoolean(s: MutableSettings#BooleanSetting): Boolean = s.value + + implicit class SettingsOps(private val settings: MutableSettings) extends AnyVal { + @inline final def areStatisticsEnabled = StatisticsStatics.areSomeColdStatsEnabled && settings.YstatisticsEnabled + } } diff --git a/src/reflect/scala/reflect/internal/util/Statistics.scala b/src/reflect/scala/reflect/internal/util/Statistics.scala index 413804a67ad4..94cc82d8bc7c 100644 --- a/src/reflect/scala/reflect/internal/util/Statistics.scala +++ b/src/reflect/scala/reflect/internal/util/Statistics.scala @@ -21,57 +21,49 @@ import java.util.concurrent.atomic.{AtomicInteger, AtomicLong} import scala.runtime.LongRef abstract class Statistics(val symbolTable: SymbolTable, settings: MutableSettings) { - - initFromSettings(settings) - - def initFromSettings(currentSettings: MutableSettings): Unit = { - enabled = currentSettings.YstatisticsEnabled - hotEnabled = currentSettings.YhotStatisticsEnabled - } - type TimerSnapshot = (Long, Long) /** If enabled, increment counter by one */ - @inline final def incCounter(c: Counter) { - if (areStatisticsLocallyEnabled && c != null) c.value += 1 + @inline final def incCounter(c: Counter): Unit = { + if (enabled && c != null) c.value += 1 } /** If enabled, increment counter by given delta */ - @inline final def incCounter(c: Counter, delta: Int) { - if (areStatisticsLocallyEnabled && c != null) c.value += delta + @inline final def incCounter(c: Counter, delta: Int): Unit = { + if (enabled && c != null) c.value += delta } /** If enabled, increment counter in map `ctrs` at index `key` by one */ @inline final def incCounter[K](ctrs: QuantMap[K, Counter], key: K) = - if (areStatisticsLocallyEnabled && ctrs != null) ctrs(key).value += 1 + if (enabled && ctrs != null) ctrs(key).value += 1 /** If enabled, start subcounter. While active it will track all increments of * its base counter. */ @inline final def startCounter(sc: SubCounter): (Int, Int) = - if (areStatisticsLocallyEnabled && sc != null) sc.start() else null + if (enabled && sc != null) sc.start() else null /** If enabled, stop subcounter from tracking its base counter. */ - @inline final def stopCounter(sc: SubCounter, start: (Int, Int)) { - if (areStatisticsLocallyEnabled && sc != null) sc.stop(start) + @inline final def stopCounter(sc: SubCounter, start: (Int, Int)): Unit = { + if (enabled && sc != null) sc.stop(start) } /** If enabled, start timer */ @inline final def startTimer(tm: Timer): TimerSnapshot = - if (areStatisticsLocallyEnabled && tm != null) tm.start() else null + if (enabled && tm != null) tm.start() else null /** If enabled, stop timer */ - @inline final def stopTimer(tm: Timer, start: TimerSnapshot) { - if (areStatisticsLocallyEnabled && tm != null) tm.stop(start) + @inline final def stopTimer(tm: Timer, start: TimerSnapshot): Unit = { + if (enabled && tm != null) tm.stop(start) } /** If enabled, push and start a new timer in timer stack */ @inline final def pushTimer(timers: TimerStack, timer: => StackableTimer): TimerSnapshot = - if (areStatisticsLocallyEnabled && timers != null) timers.push(timer) else null + if (enabled && timers != null) timers.push(timer) else null /** If enabled, stop and pop timer from timer stack */ - @inline final def popTimer(timers: TimerStack, prev: TimerSnapshot) { - if (areStatisticsLocallyEnabled && timers != null) timers.pop(prev) + @inline final def popTimer(timers: TimerStack, prev: TimerSnapshot): Unit = { + if (enabled && timers != null) timers.pop(prev) } /** Create a new counter that shows as `prefix` and is active in given phases */ @@ -288,30 +280,9 @@ quant) } } - private val qs = new mutable.HashMap[String, Quantity] - private[scala] var areColdStatsLocallyEnabled: Boolean = false - private[scala] var areHotStatsLocallyEnabled: Boolean = false - - /** Represents whether normal statistics can or cannot be enabled. */ - @inline final def enabled: Boolean = areColdStatsLocallyEnabled - def enabled_=(cond: Boolean) = { - if (cond && !enabled) { - StatisticsStatics.enableColdStatsAndDeoptimize() - areColdStatsLocallyEnabled = true - } - } - - /** Represents whether hot statistics can or cannot be enabled. */ - @inline final def hotEnabled: Boolean = enabled && areHotStatsLocallyEnabled - def hotEnabled_=(cond: Boolean) = { - if (cond && enabled && !areHotStatsLocallyEnabled) { - StatisticsStatics.enableHotStatsAndDeoptimize() - areHotStatsLocallyEnabled = true - } - } + private[this] val qs = new mutable.HashMap[String, Quantity] - /** Tells whether statistics should be definitely reported to the user for this `Global` instance. */ - @inline final def areStatisticsLocallyEnabled: Boolean = areColdStatsLocallyEnabled + @inline final def enabled: Boolean = settings.areStatisticsEnabled import scala.reflect.internal.Reporter /** Reports the overhead of measuring statistics via the nanoseconds variation. */ diff --git a/src/reflect/scala/reflect/runtime/Settings.scala b/src/reflect/scala/reflect/runtime/Settings.scala index d36e6c8bc6e8..3f4cfa0e1c5a 100644 --- a/src/reflect/scala/reflect/runtime/Settings.scala +++ b/src/reflect/scala/reflect/runtime/Settings.scala @@ -15,6 +15,7 @@ package reflect package runtime import scala.reflect.internal.settings.MutableSettings +import scala.reflect.internal.util.StatisticsStatics /** The Settings class for runtime reflection. * This should be refined, so that settings are settable via command From 068859e8afd83fc7e0c94879afc288b28a17d45d Mon Sep 17 00:00:00 2001 From: Dale Wijnand Date: Thu, 15 Apr 2021 09:08:14 +0100 Subject: [PATCH 151/769] Put all debug/developer behind an AlmostFinalValue false (cherry picked from commit 97ca3aaae3cf2f1dd1d1c0351e2a7c3d98e78f9b) --- .../scala/tools/nsc/CompilerCommand.scala | 2 +- src/compiler/scala/tools/nsc/Global.scala | 18 ++++++------- .../scala/tools/nsc/MainTokenMetric.scala | 2 +- .../scala/tools/nsc/ast/Positions.scala | 2 +- .../nsc/backend/jvm/BCodeSkelBuilder.scala | 2 +- .../nsc/backend/jvm/BTypesFromSymbols.scala | 2 +- .../scala/tools/nsc/backend/jvm/CodeGen.scala | 2 +- .../jvm/PostProcessorFrontendAccess.scala | 2 +- .../scala/tools/nsc/plugins/Plugins.scala | 2 +- .../scala/tools/nsc/reporters/Reporter.scala | 2 +- .../tools/nsc/settings/ScalaSettings.scala | 4 +-- .../tools/nsc/symtab/SymbolLoaders.scala | 6 ++--- .../tools/nsc/symtab/SymbolTrackers.scala | 2 +- .../symtab/classfile/ClassfileParser.scala | 11 ++++---- .../tools/nsc/symtab/classfile/Pickler.scala | 2 +- .../scala/tools/nsc/transform/CleanUp.scala | 2 +- .../scala/tools/nsc/transform/Erasure.scala | 4 +-- .../scala/tools/nsc/transform/Mixin.scala | 2 +- .../transform/TypeAdaptingTransformer.scala | 2 +- .../nsc/transform/async/AsyncPhase.scala | 2 +- .../tools/nsc/typechecker/Implicits.scala | 4 +-- .../scala/tools/nsc/typechecker/Infer.scala | 2 +- .../scala/tools/nsc/typechecker/Namers.scala | 9 ++++--- .../tools/nsc/typechecker/RefChecks.scala | 6 ++--- .../tools/nsc/typechecker/TreeCheckers.scala | 8 +++--- .../nsc/typechecker/TypeDiagnostics.scala | 2 +- .../tools/nsc/typechecker/TypeStrings.scala | 2 +- .../scala/tools/nsc/typechecker/Typers.scala | 4 +-- .../nsc/typechecker/TypersTracking.scala | 6 ++--- .../scala/tools/reflect/ToolBoxFactory.scala | 4 +-- .../scala/tools/nsc/interactive/REPL.scala | 2 +- .../scala/reflect/internal/Kinds.scala | 4 +-- .../scala/reflect/internal/Mirrors.scala | 2 +- .../scala/reflect/internal/Printers.scala | 4 +-- .../scala/reflect/internal/SymbolTable.scala | 7 ++--- .../scala/reflect/internal/Symbols.scala | 18 ++++++------- .../scala/reflect/internal/Trees.scala | 2 +- .../scala/reflect/internal/Types.scala | 26 +++++++++---------- .../reflect/internal/pickling/UnPickler.scala | 2 -- .../internal/settings/MutableSettings.scala | 2 ++ .../scala/reflect/internal/tpe/GlbLubs.scala | 2 +- .../reflect/internal/tpe/TypeComparers.scala | 2 +- .../internal/tpe/TypeConstraints.scala | 4 +-- .../reflect/internal/tpe/TypeToStrings.scala | 2 +- .../internal/util/StatisticsStatics.java | 8 ++++++ .../scala/reflect/runtime/JavaMirrors.scala | 2 +- .../scala/reflect/runtime/Settings.scala | 4 +-- .../scala/reflect/runtime/SymbolTable.scala | 2 +- .../scala/tools/nsc/interpreter/ILoop.scala | 4 +-- src/scaladoc/scala/tools/nsc/ScalaDoc.scala | 2 +- .../tools/nsc/doc/ScaladocAnalyzer.scala | 2 +- .../scala/tools/nsc/doc/Uncompilable.scala | 4 ++- .../files/run/t11802-pluginsdir/ploogin.scala | 2 +- .../run/t4841-isolate-plugins/ploogin.scala | 2 +- 54 files changed, 122 insertions(+), 109 deletions(-) diff --git a/src/compiler/scala/tools/nsc/CompilerCommand.scala b/src/compiler/scala/tools/nsc/CompilerCommand.scala index 6b952b5a1b85..6da3b3cb20b8 100644 --- a/src/compiler/scala/tools/nsc/CompilerCommand.scala +++ b/src/compiler/scala/tools/nsc/CompilerCommand.scala @@ -105,7 +105,7 @@ class CompilerCommand(arguments: List[String], val settings: Settings) { else if (Yhelp) yusageMsg else if (showPlugins) global.pluginDescriptions else if (showPhases) global.phaseDescriptions + ( - if (debug) "\n" + global.phaseFlagDescriptions else "" + if (settings.isDebug) "\n" + global.phaseFlagDescriptions else "" ) else if (genPhaseGraph.isSetByUser) { val components = global.phaseNames // global.phaseDescriptors // one initializes diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala index c9dde2828f5c..c04e79c61160 100644 --- a/src/compiler/scala/tools/nsc/Global.scala +++ b/src/compiler/scala/tools/nsc/Global.scala @@ -280,8 +280,8 @@ class Global(var currentSettings: Settings, reporter0: Reporter) // ------------------ Debugging ------------------------------------- - @inline final def ifDebug(body: => Unit) { - if (settings.debug) + @inline final def ifDebug(body: => Unit): Unit = { + if (settings.isDebug) body } @@ -311,8 +311,8 @@ class Global(var currentSettings: Settings, reporter0: Reporter) inform(s"[log $globalPhase$atPhaseStackMessage] $msg") } - @inline final override def debuglog(msg: => String) { - if (settings.debug) + @inline final override def debuglog(msg: => String): Unit = { + if (settings.isDebug) log(msg) } @@ -417,7 +417,7 @@ class Global(var currentSettings: Settings, reporter0: Reporter) if ((unit ne null) && unit.exists) lastSeenSourceFile = unit.source - if (settings.debug && (settings.verbose || currentRun.size < 5)) + if (settings.isDebug && (settings.verbose || currentRun.size < 5)) inform("[running phase " + name + " on " + unit + "]") } @@ -710,7 +710,7 @@ class Global(var currentSettings: Settings, reporter0: Reporter) protected def computePhaseDescriptors: List[SubComponent] = { /** Allow phases to opt out of the phase assembly. */ def cullPhases(phases: List[SubComponent]) = { - val enabled = if (settings.debug && settings.isInfo) phases else phases filter (_.enabled) + val enabled = if (settings.isDebug && settings.isInfo) phases else phases filter (_.enabled) def isEnabled(q: String) = enabled exists (_.phaseName == q) val (satisfied, unhappy) = enabled partition (_.requires forall isEnabled) unhappy foreach (u => globalError(s"Phase '${u.phaseName}' requires: ${u.requires filterNot isEnabled}")) @@ -741,7 +741,7 @@ class Global(var currentSettings: Settings, reporter0: Reporter) } /** A description of the phases that will run in this configuration, or all if -Ydebug. */ - def phaseDescriptions: String = phaseHelp("description", elliptically = !settings.debug, phasesDescMap) + def phaseDescriptions: String = phaseHelp("description", elliptically = !settings.isDebug, phasesDescMap) /** Summary of the per-phase values of nextFlags and newFlags, shown under -Xshow-phases -Ydebug. */ def phaseFlagDescriptions: String = { @@ -752,7 +752,7 @@ class Global(var currentSettings: Settings, reporter0: Reporter) else if (ph.phaseNewFlags != 0L && ph.phaseNextFlags != 0L) fstr1 + " " + fstr2 else fstr1 + fstr2 } - phaseHelp("new flags", elliptically = !settings.debug, fmt) + phaseHelp("new flags", elliptically = !settings.isDebug, fmt) } /** Emit a verbose phase table. @@ -1102,7 +1102,7 @@ class Global(var currentSettings: Settings, reporter0: Reporter) def echoPhaseSummary(ph: Phase) = { /* Only output a summary message under debug if we aren't echoing each file. */ - if (settings.debug && !(settings.verbose || currentRun.size < 5)) + if (settings.isDebug && !(settings.verbose || currentRun.size < 5)) inform("[running phase " + ph.name + " on " + currentRun.size + " compilation units]") } diff --git a/src/compiler/scala/tools/nsc/MainTokenMetric.scala b/src/compiler/scala/tools/nsc/MainTokenMetric.scala index 7fb1677420c7..19da94f879dd 100644 --- a/src/compiler/scala/tools/nsc/MainTokenMetric.scala +++ b/src/compiler/scala/tools/nsc/MainTokenMetric.scala @@ -50,7 +50,7 @@ object MainTokenMetric { tokenMetric(compiler, command.files) } catch { case ex @ FatalError(msg) => - if (command.settings.debug) + if (command.settings.isDebug) ex.printStackTrace() reporter.error(null, "fatal error: " + msg) } diff --git a/src/compiler/scala/tools/nsc/ast/Positions.scala b/src/compiler/scala/tools/nsc/ast/Positions.scala index ae5378c55dd3..ceab1abdcff9 100644 --- a/src/compiler/scala/tools/nsc/ast/Positions.scala +++ b/src/compiler/scala/tools/nsc/ast/Positions.scala @@ -39,6 +39,6 @@ trait Positions extends scala.reflect.internal.Positions { } override protected[this] lazy val posAssigner: PosAssigner = - if (settings.Yrangepos && settings.debug || settings.Yposdebug) new ValidatingPosAssigner + if (settings.Yrangepos && settings.isDebug || settings.Yposdebug) new ValidatingPosAssigner else new DefaultPosAssigner } diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeSkelBuilder.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeSkelBuilder.scala index a4eeb800505e..5bd3c080ffc7 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BCodeSkelBuilder.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeSkelBuilder.scala @@ -613,7 +613,7 @@ abstract class BCodeSkelBuilder extends BCodeHelpers { case Return(_) | Block(_, Return(_)) | Throw(_) | Block(_, Throw(_)) => () case EmptyTree => globalError("Concrete method has no definition: " + dd + ( - if (settings.debug) "(found: " + methSymbol.owner.info.decls.toList.mkString(", ") + ")" + if (settings.isDebug) "(found: " + methSymbol.owner.info.decls.toList.mkString(", ") + ")" else "")) case _ => bc emitRETURN returnType diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala b/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala index 9ebec8891631..a49c8604bc22 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala @@ -91,7 +91,7 @@ abstract class BTypesFromSymbols[G <: Global](val global: G) extends BTypes { assert(classSym != NoSymbol, "Cannot create ClassBType from NoSymbol") assert(classSym.isClass, s"Cannot create ClassBType from non-class symbol $classSym") - if (global.settings.debug) { + if (global.settings.isDebug) { // OPT these assertions have too much performance overhead to run unconditionally assertClassNotArrayNotPrimitive(classSym) assert(!primitiveTypeToBType.contains(classSym) || isCompilingPrimitive, s"Cannot create ClassBType for primitive class symbol $classSym") diff --git a/src/compiler/scala/tools/nsc/backend/jvm/CodeGen.scala b/src/compiler/scala/tools/nsc/backend/jvm/CodeGen.scala index 16ee1d2ca625..28ff39917c02 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/CodeGen.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/CodeGen.scala @@ -56,7 +56,7 @@ abstract class CodeGen[G <: Global](val global: G) extends PerRunInit { } catch { case ex: InterruptedException => throw ex case ex: Throwable => - if (settings.debug) ex.printStackTrace() + if (settings.isDebug) ex.printStackTrace() globalError(s"Error while emitting ${unit.source}\n${ex.getMessage}") } diff --git a/src/compiler/scala/tools/nsc/backend/jvm/PostProcessorFrontendAccess.scala b/src/compiler/scala/tools/nsc/backend/jvm/PostProcessorFrontendAccess.scala index 52fb9ff3b36d..2853fec3fe37 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/PostProcessorFrontendAccess.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/PostProcessorFrontendAccess.scala @@ -182,7 +182,7 @@ object PostProcessorFrontendAccess { private def buildCompilerSettings(): CompilerSettings = new CompilerSettings { import global.{settings => s} - val debug: Boolean = s.debug + @inline def debug: Boolean = s.isDebug val target: String = s.target.value diff --git a/src/compiler/scala/tools/nsc/plugins/Plugins.scala b/src/compiler/scala/tools/nsc/plugins/Plugins.scala index 22d397a469a8..289436f48892 100644 --- a/src/compiler/scala/tools/nsc/plugins/Plugins.scala +++ b/src/compiler/scala/tools/nsc/plugins/Plugins.scala @@ -159,7 +159,7 @@ trait Plugins { global: Global => } globalError("bad option: -P:" + opt) // Plugins may opt out, unless we just want to show info - plugs filter (p => p.init(p.options, globalError) || (settings.debug && settings.isInfo)) + plugs filter (p => p.init(p.options, globalError) || (settings.isDebug && settings.isInfo)) } lazy val plugins: List[Plugin] = loadPlugins() diff --git a/src/compiler/scala/tools/nsc/reporters/Reporter.scala b/src/compiler/scala/tools/nsc/reporters/Reporter.scala index 51252abdbc6f..15be43940c9e 100644 --- a/src/compiler/scala/tools/nsc/reporters/Reporter.scala +++ b/src/compiler/scala/tools/nsc/reporters/Reporter.scala @@ -107,7 +107,7 @@ abstract class FilteringReporter extends Reporter { // Invoked when an error or warning is filtered by position. @inline def suppress = { if (settings.prompt) doReport(pos, msg, severity) - else if (settings.debug) doReport(pos, s"[ suppressed ] $msg", severity) + else if (settings.isDebug) doReport(pos, s"[ suppressed ] $msg", severity) Suppress } if (!duplicateOk(pos, severity, msg)) suppress else if (!maxOk) Count else Display diff --git a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala index a769f3bb09ed..4da321c33885 100644 --- a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala +++ b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala @@ -116,7 +116,7 @@ trait ScalaSettings extends StandardScalaSettings with Warnings { _: MutableSett val Xhelp = BooleanSetting ("-X", "Print a synopsis of advanced options.") val async = BooleanSetting ("-Xasync", "Enable the async phase for scala.async.Async.{async,await}.") val checkInit = BooleanSetting ("-Xcheckinit", "Wrap field accessors to throw an exception on uninitialized access.") - val developer = BooleanSetting ("-Xdev", "Indicates user is a developer - issue warnings about anything which seems amiss") + val developer = BooleanSetting ("-Xdev", "Indicates user is a developer - issue warnings about anything which seems amiss").withPostSetHook(s => if (s.value) StatisticsStatics.enableDeveloperAndDeoptimize()) val noassertions = BooleanSetting ("-Xdisable-assertions", "Generate no assertions or assumptions.") andThen (flag => if (flag) elidebelow.value = elidable.ASSERTION + 1) val elidebelow = IntSetting ("-Xelide-below", "Calls to @elidable methods are omitted if method priority is lower than argument", @@ -208,7 +208,7 @@ trait ScalaSettings extends StandardScalaSettings with Warnings { _: MutableSett val Yshow = PhasesSetting ("-Yshow", "(Requires -Xshow-class or -Xshow-object) Show after") val Ycompacttrees = BooleanSetting ("-Ycompact-trees", "Use compact tree printer when displaying trees.") val noCompletion = BooleanSetting ("-Yno-completion", "Disable tab-completion in the REPL.") - val debug = BooleanSetting ("-Ydebug", "Increase the quantity of debugging output.") + val debug = BooleanSetting ("-Ydebug", "Increase the quantity of debugging output.").withPostSetHook(s => if (s.value) StatisticsStatics.enableDebugAndDeoptimize()) val termConflict = ChoiceSetting ("-Yresolve-term-conflict", "strategy", "Resolve term conflicts.", List("package", "object", "error"), "error") val log = PhasesSetting ("-Ylog", "Log operations during") val Ylogcp = BooleanSetting ("-Ylog-classpath", "Output information about what classpath is being applied.") diff --git a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala index a96d94bb9e3b..9b1448e1ff83 100644 --- a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala +++ b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala @@ -57,8 +57,8 @@ abstract class SymbolLoaders { member } - protected def signalError(root: Symbol, ex: Throwable) { - if (settings.debug) ex.printStackTrace() + protected def signalError(root: Symbol, ex: Throwable): Unit = { + if (settings.isDebug) ex.printStackTrace() globalError(ex.getMessage() match { case null => "i/o error while loading " + root.name case msg => "error while loading " + root.name + ", " + msg @@ -371,4 +371,4 @@ abstract class SymbolLoaders { /** used from classfile parser to avoid cycles */ var parentsLevel = 0 var pendingLoadActions: List[() => Unit] = Nil -} \ No newline at end of file +} diff --git a/src/compiler/scala/tools/nsc/symtab/SymbolTrackers.scala b/src/compiler/scala/tools/nsc/symtab/SymbolTrackers.scala index 102fe0549093..7642e496c9f2 100644 --- a/src/compiler/scala/tools/nsc/symtab/SymbolTrackers.scala +++ b/src/compiler/scala/tools/nsc/symtab/SymbolTrackers.scala @@ -134,7 +134,7 @@ trait SymbolTrackers { else " (" + Flags.flagsToString(masked) + ")" } def symString(sym: Symbol) = ( - if (settings.debug && sym.hasCompleteInfo) { + if (settings.isDebug && sym.hasCompleteInfo) { val s = sym.defString take 240 if (s.length == 240) s + "..." else s } diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala index 6f03537aa359..1607d90f3c42 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala @@ -105,11 +105,11 @@ abstract class ClassfileParser(reader: ReusableInstance[ReusableDataReader]) { } private def handleMissing(e: MissingRequirementError) = { - if (settings.debug) e.printStackTrace + if (settings.isDebug) e.printStackTrace throw new IOException(s"Missing dependency '${e.req}', required by $file") } private def handleError(e: Exception) = { - if (settings.debug) e.printStackTrace() + if (settings.isDebug) e.printStackTrace() throw new IOException(s"class file '$file' is broken\n(${e.getClass}/${e.getMessage})") } private def mismatchError(c: Symbol) = { @@ -399,7 +399,8 @@ abstract class ClassfileParser(reader: ReusableInstance[ReusableDataReader]) { // - better owner than `NoSymbol` // - remove eager warning val msg = s"Class $name not found - continuing with a stub." - if ((!settings.isScaladoc) && (settings.verbose || settings.developer)) loaders.warning(NoPosition, msg, WarningCategory.OtherDebug, clazz.fullNameString) + if ((!settings.isScaladoc) && (settings.verbose || settings.isDeveloper)) + loaders.warning(NoPosition, msg, WarningCategory.OtherDebug, clazz.fullNameString) NoSymbol.newStubSymbol(name.toTypeName, msg) } @@ -450,7 +451,7 @@ abstract class ClassfileParser(reader: ReusableInstance[ReusableDataReader]) { case ex: FatalError => // getClassByName can throw a MissingRequirementError (which extends FatalError) // definitions.getMember can throw a FatalError, for example in pos/t5165b - if (settings.debug) + if (settings.isDebug) ex.printStackTrace() stubClassSymbol(newTypeName(name)) } @@ -991,7 +992,7 @@ abstract class ClassfileParser(reader: ReusableInstance[ReusableDataReader]) { // with a `FatalError` exception, handled above. Here you'd end up after a NPE (for example), // and that should never be swallowed silently. loaders.warning(NoPosition, s"Caught: $ex while parsing annotations in ${file}", WarningCategory.Other, clazz.fullNameString) - if (settings.debug) ex.printStackTrace() + if (settings.isDebug) ex.printStackTrace() None // ignore malformed annotations } diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala index 37bac0223db8..4b4a075af69c 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala @@ -111,7 +111,7 @@ abstract class Pickler extends SubComponent { // // OPT: do this only as a recovery after fatal error. Checking in advance was expensive. if (t.isErroneous) { - if (settings.debug) e.printStackTrace() + if (settings.isDebug) e.printStackTrace() reporter.error(t.pos, "erroneous or inaccessible type") return } diff --git a/src/compiler/scala/tools/nsc/transform/CleanUp.scala b/src/compiler/scala/tools/nsc/transform/CleanUp.scala index da03bb29933a..3cec99c6f01f 100644 --- a/src/compiler/scala/tools/nsc/transform/CleanUp.scala +++ b/src/compiler/scala/tools/nsc/transform/CleanUp.scala @@ -370,7 +370,7 @@ abstract class CleanUp extends Statics with Transform with ast.TreeDSL { /* For testing purposes, the dynamic application's condition * can be printed-out in great detail. Remove? */ - if (settings.debug) { + if (settings.isDebug) { def paramsToString(xs: Any*) = xs map (_.toString) mkString ", " val mstr = ad.symbol.tpe match { case MethodType(mparams, resType) => diff --git a/src/compiler/scala/tools/nsc/transform/Erasure.scala b/src/compiler/scala/tools/nsc/transform/Erasure.scala index d3cdab24d72b..fde585842b4e 100644 --- a/src/compiler/scala/tools/nsc/transform/Erasure.scala +++ b/src/compiler/scala/tools/nsc/transform/Erasure.scala @@ -91,7 +91,7 @@ abstract class Erasure extends InfoTransform } } - override protected def verifyJavaErasure = settings.Xverify || settings.debug + override protected def verifyJavaErasure = settings.Xverify || settings.isDebug private def needsJavaSig(sym: Symbol, tp: Type, throwsArgs: List[Type]) = !settings.Ynogenericsig && { def needs(tp: Type) = NeedsSigCollector(sym.isClassConstructor).collect(tp) needs(tp) || throwsArgs.exists(needs) @@ -521,7 +521,7 @@ abstract class Erasure extends InfoTransform clashErrors += Tuple2(pos, msg) } for (bc <- root.baseClasses) { - if (settings.debug) + if (settings.isDebug) exitingPostErasure(println( sm"""check bridge overrides in $bc |${bc.info.nonPrivateDecl(bridge.name)} diff --git a/src/compiler/scala/tools/nsc/transform/Mixin.scala b/src/compiler/scala/tools/nsc/transform/Mixin.scala index 3c95b24f0379..4f3eed01a0d3 100644 --- a/src/compiler/scala/tools/nsc/transform/Mixin.scala +++ b/src/compiler/scala/tools/nsc/transform/Mixin.scala @@ -109,7 +109,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL with AccessorSynthes debuglog("starting rebindsuper " + base + " " + member + ":" + member.tpe + " " + mixinClass + " " + base.info.baseClasses + "/" + bcs) while (!bcs.isEmpty && sym == NoSymbol) { - if (settings.debug) { + if (settings.isDebug) { val other = bcs.head.info.nonPrivateDecl(member.name) debuglog("rebindsuper " + bcs.head + " " + other + " " + other.tpe + " " + other.isDeferred) diff --git a/src/compiler/scala/tools/nsc/transform/TypeAdaptingTransformer.scala b/src/compiler/scala/tools/nsc/transform/TypeAdaptingTransformer.scala index 91af26a73a88..cc3be2be91c5 100644 --- a/src/compiler/scala/tools/nsc/transform/TypeAdaptingTransformer.scala +++ b/src/compiler/scala/tools/nsc/transform/TypeAdaptingTransformer.scala @@ -116,7 +116,7 @@ trait TypeAdaptingTransformer { self: TreeDSL => * @pre pt eq pt.normalize */ final def cast(tree: Tree, pt: Type): Tree = { - if (settings.debug && (tree.tpe ne null) && !(tree.tpe =:= ObjectTpe)) { + if (settings.isDebug && (tree.tpe ne null) && !(tree.tpe =:= ObjectTpe)) { def word = if (tree.tpe <:< pt) "upcast" else if (pt <:< tree.tpe) "downcast" diff --git a/src/compiler/scala/tools/nsc/transform/async/AsyncPhase.scala b/src/compiler/scala/tools/nsc/transform/async/AsyncPhase.scala index ae4c81727ca3..7aebeddcc626 100644 --- a/src/compiler/scala/tools/nsc/transform/async/AsyncPhase.scala +++ b/src/compiler/scala/tools/nsc/transform/async/AsyncPhase.scala @@ -191,7 +191,7 @@ abstract class AsyncPhase extends Transform with TypingTransformers with AnfTran val applyBody = atPos(asyncPos)(asyncBlock.onCompleteHandler) // Logging - if ((settings.debug.value && shouldLogAtThisPhase)) + if ((settings.isDebug && shouldLogAtThisPhase)) logDiagnostics(anfTree, asyncBlock, asyncBlock.asyncStates.map(_.toString)) // Offer async frontends a change to produce the .dot diagram transformState.dotDiagram(applySym, asyncBody).foreach(f => f(asyncBlock.toDot)) diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala index 7309cf5d9f3c..d982c29da1e7 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala @@ -1420,7 +1420,7 @@ trait Implicits { if (args contains EmptyTree) EmptyTree else typedPos(tree.pos.focus) { val mani = gen.mkManifestFactoryCall(full, constructor, tparg, args.toList) - if (settings.debug) println("generated manifest: "+mani) // DEBUG + if (settings.isDebug) println("generated manifest: "+mani) // DEBUG mani } @@ -1638,7 +1638,7 @@ trait Implicits { } } - if (result.isFailure && settings.debug) // debuglog is not inlined for some reason + if (result.isFailure && settings.isDebug) // debuglog is not inlined for some reason log(s"no implicits found for ${pt} ${pt.typeSymbol.info.baseClasses} ${implicitsOfExpectedType}") result diff --git a/src/compiler/scala/tools/nsc/typechecker/Infer.scala b/src/compiler/scala/tools/nsc/typechecker/Infer.scala index dde7ff56feb3..36b10cf6f8f0 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Infer.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Infer.scala @@ -217,7 +217,7 @@ trait Infer extends Checkable { // When filtering sym down to the accessible alternatives leaves us empty handed. private def checkAccessibleError(tree: Tree, sym: Symbol, pre: Type, site: Tree): Tree = { - if (settings.debug) { + if (settings.isDebug) { Console.println(context) Console.println(tree) Console.println("" + pre + " " + sym.owner + " " + context.owner + " " + context.outer.enclClass.owner + " " + sym.owner.thisType + (pre =:= sym.owner.thisType)) diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala index 5cad833c0bcc..e4996fc5293c 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala @@ -1934,11 +1934,12 @@ trait Namers extends MethodSynthesis { } class LogTransitions[S](onEnter: S => String, onExit: S => String) { - val enabled = settings.debug.value @inline final def apply[T](entity: S)(body: => T): T = { - if (enabled) log(onEnter(entity)) - try body - finally if (enabled) log(onExit(entity)) + if (settings.isDebug) { + log(onEnter(entity)) + try body + finally log(onExit(entity)) + } else body } } private val logDefinition = new LogTransitions[Symbol]( diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala index 122d85d7f2eb..ea5f8295dc25 100644 --- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala +++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala @@ -173,7 +173,7 @@ abstract class RefChecks extends Transform { } // This has become noisy with implicit classes. - if (settings.warnPolyImplicitOverload && settings.developer) { + if (settings.isDeveloper && settings.warnPolyImplicitOverload) { clazz.info.decls.foreach(sym => if (sym.isImplicit && sym.typeParams.nonEmpty) { // implicit classes leave both a module symbol and a method symbol as residue val alts = clazz.info.decl(sym.name).alternatives filterNot (_.isModule) @@ -352,7 +352,7 @@ abstract class RefChecks extends Transform { infoStringWithLocation(other), infoStringWithLocation(member) ) - else if (settings.debug) + else if (settings.isDebug) analyzer.foundReqMsg(member.tpe, other.tpe) else "" @@ -1934,7 +1934,7 @@ abstract class RefChecks extends Transform { result1 } catch { case ex: TypeError => - if (settings.debug) ex.printStackTrace() + if (settings.isDebug) ex.printStackTrace() reporter.error(tree.pos, ex.getMessage()) tree } finally { diff --git a/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala b/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala index 0cff0d2098c1..17fc1592a0dc 100644 --- a/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala @@ -130,8 +130,8 @@ abstract class TreeCheckers extends Analyzer { def reportChanges(): Unit = { // new symbols if (newSyms.nonEmpty) { - informFn(newSyms.size + " new symbols.") - val toPrint = if (settings.debug) sortedNewSyms mkString " " else "" + informFn("" + newSyms.size + " new symbols.") + val toPrint = if (settings.isDebug) sortedNewSyms mkString " " else "" newSyms.clear() if (toPrint != "") @@ -177,8 +177,8 @@ abstract class TreeCheckers extends Analyzer { def errorFn(pos: Position, msg: Any): Unit = runReporting.warning(pos, "[check: %s] %s".format(phase.prev, msg), WarningCategory.OtherDebug, site = "") def errorFn(msg: Any): Unit = errorFn(NoPosition, msg) - def informFn(msg: Any) { - if (settings.verbose || settings.debug) + def informFn(msg: Any): Unit = { + if (settings.verbose || settings.isDebug) println("[check: %s] %s".format(phase.prev, msg)) } diff --git a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala index b8ea92eb04b0..d4a5f1f6d59b 100644 --- a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala +++ b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala @@ -796,7 +796,7 @@ trait TypeDiagnostics { // but it seems that throwErrors excludes some of the errors that should actually be // buffered, causing TypeErrors to fly around again. This needs some more investigation. if (!context0.reportErrors) throw ex - if (settings.debug) ex.printStackTrace() + if (settings.isDebug) ex.printStackTrace() ex match { case CyclicReference(sym, info: TypeCompleter) => diff --git a/src/compiler/scala/tools/nsc/typechecker/TypeStrings.scala b/src/compiler/scala/tools/nsc/typechecker/TypeStrings.scala index 0b0bd0910cf0..f0da3193040a 100644 --- a/src/compiler/scala/tools/nsc/typechecker/TypeStrings.scala +++ b/src/compiler/scala/tools/nsc/typechecker/TypeStrings.scala @@ -57,7 +57,7 @@ trait StructuredTypeStrings extends DestructureTypes { else block(level, grouping)(name, nodes) } private def shortClass(x: Any) = { - if (settings.debug) { + if (settings.isDebug) { val name = (x.getClass.getName split '.').last val str = if (TypeStrings.isAnonClass(x.getClass)) name else (name split '$').last diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index d6bf80ccf0db..23cc781428c0 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -1072,7 +1072,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper def adaptExprNotFunMode(): Tree = { def lastTry(err: AbsTypeError = null): Tree = { debuglog("error tree = " + tree) - if (settings.debug && settings.explaintypes) explainTypes(tree.tpe, pt) + if (settings.isDebug && settings.explaintypes) explainTypes(tree.tpe, pt) if (err ne null) context.issue(err) if (tree.tpe.isErroneous || pt.isErroneous) setError(tree) else adaptMismatchedSkolems() @@ -5396,7 +5396,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper AppliedTypeNoParametersError(tree, tpt1.tpe) } else { //Console.println("\{tpt1}:\{tpt1.symbol}:\{tpt1.symbol.info}") - if (settings.debug) Console.println(tpt1+":"+tpt1.symbol+":"+tpt1.symbol.info)//debug + if (settings.isDebug) Console.println(s"$tpt1:${tpt1.symbol}:${tpt1.symbol.info}")//debug AppliedTypeWrongNumberOfArgsError(tree, tpt1, tparams) } } diff --git a/src/compiler/scala/tools/nsc/typechecker/TypersTracking.scala b/src/compiler/scala/tools/nsc/typechecker/TypersTracking.scala index 8bc1822c50d8..5e14a3ac9273 100644 --- a/src/compiler/scala/tools/nsc/typechecker/TypersTracking.scala +++ b/src/compiler/scala/tools/nsc/typechecker/TypersTracking.scala @@ -29,7 +29,7 @@ trait TypersTracking { def fullSiteString(context: Context): String = { def owner_long_s = ( - if (settings.debug.value) { + if (settings.isDebug) { def flags_s = context.owner.debugFlagString match { case "" => "" case s => " with flags " + inLightMagenta(s) @@ -70,7 +70,7 @@ trait TypersTracking { private def truncAndOneLine(s: String): String = { val s1 = s.replaceAll("\\s+", " ") - if (s1.length < 60 || settings.debug.value) s1 else s1.take(57) + "..." + if (s1.length < 60 || settings.isDebug) s1 else s1.take(57) + "..." } private class Frame(val tree: Tree) { } @@ -173,7 +173,7 @@ trait TypersTracking { // Some trees which are typed with mind-numbing frequency and // which add nothing by being printed. Did () type to Unit? Let's // gamble on yes. - def printingOk(t: Tree) = printTypings && (settings.debug.value || !noPrint(t)) + def printingOk(t: Tree) = printTypings && (settings.isDebug || !noPrint(t)) def noPrintTyping(t: Tree) = (t.tpe ne null) || !printingOk(t) def noPrintAdapt(tree1: Tree, tree2: Tree) = !printingOk(tree1) || ( (tree1.tpe == tree2.tpe) diff --git a/src/compiler/scala/tools/reflect/ToolBoxFactory.scala b/src/compiler/scala/tools/reflect/ToolBoxFactory.scala index fa573ca00a36..89d31ec386d6 100644 --- a/src/compiler/scala/tools/reflect/ToolBoxFactory.scala +++ b/src/compiler/scala/tools/reflect/ToolBoxFactory.scala @@ -52,7 +52,7 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf => extends ReflectGlobal(settings, reporter0, toolBoxSelf.classLoader) { import definitions._ - private val trace = scala.tools.nsc.util.trace when settings.debug.value + private val trace = scala.tools.nsc.util.trace when settings.isDebug private var wrapCount = 0 @@ -267,7 +267,7 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf => val msym = wrapInPackageAndCompile(mdef.name, mdef) val className = msym.fullName - if (settings.debug) println("generated: "+className) + if (settings.isDebug) println("generated: "+className) def moduleFileName(className: String) = className + "$" val jclazz = jClass.forName(moduleFileName(className), true, classLoader) val jmeth = jclazz.getDeclaredMethods.find(_.getName == wrapperMethodName).get diff --git a/src/interactive/scala/tools/nsc/interactive/REPL.scala b/src/interactive/scala/tools/nsc/interactive/REPL.scala index 3ffd8ecd3862..0782abd870dd 100644 --- a/src/interactive/scala/tools/nsc/interactive/REPL.scala +++ b/src/interactive/scala/tools/nsc/interactive/REPL.scala @@ -57,7 +57,7 @@ object REPL { } } catch { case ex @ FatalError(msg) => - if (true || command.settings.debug) // !!! + if (true || command.settings.isDebug) // !!! ex.printStackTrace() reporter.error(null, "fatal error: " + msg) } diff --git a/src/reflect/scala/reflect/internal/Kinds.scala b/src/reflect/scala/reflect/internal/Kinds.scala index f8fb514936c9..2a2b2511ba48 100644 --- a/src/reflect/scala/reflect/internal/Kinds.scala +++ b/src/reflect/scala/reflect/internal/Kinds.scala @@ -152,7 +152,7 @@ trait Kinds { kindErrors = f(kindErrors) } - if (settings.debug) { + if (settings.isDebug) { log("checkKindBoundsHK expected: "+ param +" with params "+ hkparams +" by definition in "+ paramowner) log("checkKindBoundsHK supplied: "+ arg +" with params "+ hkargs +" from "+ owner) log("checkKindBoundsHK under params: "+ underHKParams +" with args "+ withHKArgs) @@ -209,7 +209,7 @@ trait Kinds { else NoKindErrors } - if (settings.debug && (tparams.nonEmpty || targs.nonEmpty)) log( + if (settings.isDebug && (tparams.nonEmpty || targs.nonEmpty)) log( "checkKindBounds0(" + tparams + ", " + targs + ", " + pre + ", " + owner + ", " + explainErrors + ")" ) diff --git a/src/reflect/scala/reflect/internal/Mirrors.scala b/src/reflect/scala/reflect/internal/Mirrors.scala index bdeae9c48e8b..8a4c485a7829 100644 --- a/src/reflect/scala/reflect/internal/Mirrors.scala +++ b/src/reflect/scala/reflect/internal/Mirrors.scala @@ -56,7 +56,7 @@ trait Mirrors extends api.Mirrors { val result = if (name.isTermName) sym.suchThat(_ hasFlag MODULE) else sym if (result != NoSymbol) result else { - if (settings.debug) { log(sym.info); log(sym.info.members) }//debug + if (settings.isDebug) { log(sym.info); log(sym.info.members) }//debug thisMirror.missingHook(owner, name) orElse { MissingRequirementError.notFound((if (name.isTermName) "object " else "class ")+path+" in "+thisMirror) } diff --git a/src/reflect/scala/reflect/internal/Printers.scala b/src/reflect/scala/reflect/internal/Printers.scala index b9462c222272..fbbe192fe782 100644 --- a/src/reflect/scala/reflect/internal/Printers.scala +++ b/src/reflect/scala/reflect/internal/Printers.scala @@ -195,7 +195,7 @@ trait Printers extends api.Printers { self: SymbolTable => ) def printFlags(flags: Long, privateWithin: String) = { - val mask: Long = if (settings.debug) -1L else PrintableFlags + val mask: Long = if (settings.isDebug) -1L else PrintableFlags val s = flagsToString(flags & mask, privateWithin) if (s != "") print(s + " ") } @@ -460,7 +460,7 @@ trait Printers extends api.Printers { self: SymbolTable => case th @ This(qual) => printThis(th, symName(tree, qual)) - case Select(qual: New, name) if !settings.debug => + case Select(qual: New, name) if !settings.isDebug => print(qual) case Select(qualifier, name) => diff --git a/src/reflect/scala/reflect/internal/SymbolTable.scala b/src/reflect/scala/reflect/internal/SymbolTable.scala index 95330eced4b1..15d337dfdd98 100644 --- a/src/reflect/scala/reflect/internal/SymbolTable.scala +++ b/src/reflect/scala/reflect/internal/SymbolTable.scala @@ -82,9 +82,10 @@ abstract class SymbolTable extends macros.Universe def shouldLogAtThisPhase = false def isPastTyper = false - final def isDeveloper: Boolean = settings.debug.value || settings.developer.value - def picklerPhase: Phase + @inline final def isDeveloper: Boolean = settings.isDebug || settings.isDeveloper + + def picklerPhase: Phase def erasurePhase: Phase def settings: MutableSettings @@ -96,7 +97,7 @@ abstract class SymbolTable extends macros.Universe def debugwarn(msg: => String): Unit = devWarning(msg) /** Override with final implementation for inlining. */ - def debuglog(msg: => String): Unit = if (settings.debug) log(msg) + def debuglog(msg: => String): Unit = if (settings.isDebug) log(msg) /** dev-warns if dev-warning is enabled and `cond` is true; no-op otherwise */ @inline final def devWarningIf(cond: => Boolean)(msg: => String): Unit = diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala index 616e56bab198..a35eed37410a 100644 --- a/src/reflect/scala/reflect/internal/Symbols.scala +++ b/src/reflect/scala/reflect/internal/Symbols.scala @@ -291,7 +291,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => def varianceString: String = variance.symbolicString override def flagMask = - if (settings.debug && !isAbstractType) AllFlags + if (settings.isDebug && !isAbstractType) AllFlags else if (owner.isRefinementClass) ExplicitFlags & ~OVERRIDE else ExplicitFlags @@ -2684,7 +2684,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => symbolKind.abbreviation final def kindString: String = - if (settings.debug.value) accurateKindString + if (settings.isDebug) accurateKindString else sanitizedKindString /** If the name of the symbol's owner should be used when you care about @@ -2708,7 +2708,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => * If settings.Yshowsymkinds, adds abbreviated symbol kind. */ def nameString: String = { - val name_s = if (settings.debug.value) "" + unexpandedName else unexpandedName.dropLocal.decode + val name_s = if (settings.isDebug) "" + unexpandedName else unexpandedName.dropLocal.decode val kind_s = if (settings.Yshowsymkinds.value) "#" + abbreviatedKindString else "" name_s + idString + kind_s @@ -2735,7 +2735,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => * If hasMeaninglessName is true, uses the owner's name to disambiguate identity. */ override def toString: String = { - val simplifyNames = !settings.debug + val simplifyNames = !settings.isDebug if (isPackageObjectOrClass && simplifyNames) s"package object ${owner.decodedName}" else { val kind = kindString @@ -2771,7 +2771,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => def isStructuralThisType = owner.isInitialized && owner.isStructuralRefinement && tp == owner.tpe // scala/bug#8158 // colon+space, preceded by an extra space if needed to prevent the colon glomming onto a symbolic name def postnominalColon: String = if (!followsParens && name.isOperatorName) " : " else ": " - def parents = if (settings.debug) parentsString(tp.parents) else briefParentsString(tp.parents) + def parents = if (settings.isDebug) parentsString(tp.parents) else briefParentsString(tp.parents) def typeRest = if (isClass) " extends " + parents else if (isAliasType) " = " + tp.resultType @@ -2825,7 +2825,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => /** String representation of existentially bound variable */ def existentialToString = - if (isSingletonExistential && !settings.debug.value) + if (isSingletonExistential && !settings.isDebug) "val " + tpnme.dropSingletonName(name) + ": " + dropSingletonType(info.upperBound) else defString } @@ -3279,7 +3279,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => owner.newTypeSkolemSymbol(name, origin, pos, newFlags) override def nameString: String = - if (settings.debug.value) (super.nameString + "&" + level) + if ((settings.isDebug)) (super.nameString + "&" + level) else super.nameString } @@ -3540,7 +3540,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => // Avoid issuing lots of redundant errors if (!hasFlag(IS_ERROR)) { globalError(pos, missingMessage) - if (settings.debug.value) + if (settings.isDebug) (new Throwable).printStackTrace this setFlag IS_ERROR @@ -3759,7 +3759,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => /** An exception for cyclic references of symbol definitions */ case class CyclicReference(sym: Symbol, info: Type) extends TypeError("illegal cyclic reference involving " + sym) { - if (settings.debug) printStackTrace() + if (settings.isDebug) printStackTrace() } /** A class for type histories */ diff --git a/src/reflect/scala/reflect/internal/Trees.scala b/src/reflect/scala/reflect/internal/Trees.scala index 71d0ff0579b4..cd0545a31e53 100644 --- a/src/reflect/scala/reflect/internal/Trees.scala +++ b/src/reflect/scala/reflect/internal/Trees.scala @@ -1138,7 +1138,7 @@ trait Trees extends api.Trees { private def requireLegal(value: Any, allowed: Any, what: String) = ( if (value != allowed) { log(s"can't set $what for $self to value other than $allowed") - if (settings.debug && settings.developer) + if (settings.isDebug && settings.isDeveloper) (new Throwable).printStackTrace } ) diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala index 25a57eb7b64f..7c0141c62dec 100644 --- a/src/reflect/scala/reflect/internal/Types.scala +++ b/src/reflect/scala/reflect/internal/Types.scala @@ -1198,7 +1198,7 @@ trait Types override def underlying: Type = sym.typeOfThis override def isHigherKinded = sym.isRefinementClass && underlying.isHigherKinded override def prefixString = - if (settings.debug) sym.nameString + ".this." + if (settings.isDebug) sym.nameString + ".this." else if (sym.isAnonOrRefinementClass) "this." else if (sym.isOmittablePrefix) "" else if (sym.isModuleClass) sym.fullNameString + "." @@ -1446,7 +1446,7 @@ trait Types override def isStructuralRefinement: Boolean = typeSymbol.isAnonOrRefinementClass && (decls exists symbolIsPossibleInRefinement) - protected def shouldForceScope = settings.debug || parents.isEmpty || !decls.isEmpty + protected def shouldForceScope = settings.isDebug || parents.isEmpty || !decls.isEmpty protected def initDecls = fullyInitializeScope(decls) protected def scopeString = if (shouldForceScope) initDecls.mkString("{", "; ", "}") else "" override def safeToString = parentsString(parents) + scopeString @@ -1802,7 +1802,7 @@ trait Types /** A nicely formatted string with newlines and such. */ def formattedToString = parents.mkString("\n with ") + scopeString - override protected def shouldForceScope = settings.debug || decls.size > 1 + override protected def shouldForceScope = settings.isDebug || decls.size > 1 override protected def scopeString = initDecls.mkString(" {\n ", "\n ", "\n}") override def safeToString = if (shouldForceScope) formattedToString else super.safeToString } @@ -2305,7 +2305,7 @@ trait Types } // ensure that symbol is not a local copy with a name coincidence private def needsPreString = ( - settings.debug + settings.isDebug || !shorthands(sym.fullName) || (sym.ownersIterator exists (s => !s.isClass)) ) @@ -2381,12 +2381,12 @@ trait Types "" } override def safeToString = { - val custom = if (settings.debug) "" else customToString + val custom = if (settings.isDebug) "" else customToString if (custom != "") custom else finishPrefix(preString + sym.nameString + argsString) } override def prefixString = "" + ( - if (settings.debug) + if (settings.isDebug) super.prefixString else if (sym.isOmittablePrefix) "" @@ -2772,11 +2772,11 @@ trait Types } override def nameAndArgsString: String = underlying match { - case TypeRef(_, sym, args) if !settings.debug && isRepresentableWithWildcards => - sym.name + wildcardArgsString(quantified.toSet, args).mkString("[", ",", "]") + case TypeRef(_, sym, args) if !settings.isDebug && isRepresentableWithWildcards => + sym.name.toString + wildcardArgsString(quantified.toSet, args).mkString("[", ",", "]") case TypeRef(_, sym, args) => - sym.name + args.mkString("[", ",", "]") + existentialClauses - case _ => underlying.typeSymbol.name + existentialClauses + sym.name.toString + args.mkString("[", ",", "]") + existentialClauses + case _ => underlying.typeSymbol.name.toString + existentialClauses } private def existentialClauses = { @@ -2813,7 +2813,7 @@ trait Types override def safeToString: String = { underlying match { - case TypeRef(pre, sym, args) if !settings.debug && isRepresentableWithWildcards => + case TypeRef(pre, sym, args) if !settings.isDebug && isRepresentableWithWildcards => "" + TypeRef(pre, sym, Nil) + wildcardArgsString(quantified.toSet, args).mkString("[", ", ", "]") case MethodType(_, _) | NullaryMethodType(_) | PolyType(_, _) => "(" + underlying + ")" + existentialClauses @@ -4652,7 +4652,7 @@ trait Types def this(msg: String) = this(NoPosition, msg) final override def fillInStackTrace() = - if (settings.debug) super.fillInStackTrace() else this + if (settings.isDebug) super.fillInStackTrace() else this } // TODO: RecoverableCyclicReference should be separated from TypeError, @@ -4660,7 +4660,7 @@ trait Types /** An exception for cyclic references from which we can recover */ case class RecoverableCyclicReference(sym: Symbol) extends TypeError("illegal cyclic reference involving " + sym) { - if (settings.debug) printStackTrace() + if (settings.isDebug) printStackTrace() } class NoCommonType(tps: List[Type]) extends Throwable( diff --git a/src/reflect/scala/reflect/internal/pickling/UnPickler.scala b/src/reflect/scala/reflect/internal/pickling/UnPickler.scala index 001790eb359b..6add2d84a5c6 100644 --- a/src/reflect/scala/reflect/internal/pickling/UnPickler.scala +++ b/src/reflect/scala/reflect/internal/pickling/UnPickler.scala @@ -61,8 +61,6 @@ abstract class UnPickler { class Scan(_bytes: Array[Byte], offset: Int, classRoot: ClassSymbol, moduleRoot: ModuleSymbol, filename: String) extends PickleBuffer(_bytes, offset, -1) { //println("unpickle " + classRoot + " and " + moduleRoot)//debug - protected def debug = settings.debug.value - checkVersion() private val loadingMirror = mirrorThatLoaded(classRoot) diff --git a/src/reflect/scala/reflect/internal/settings/MutableSettings.scala b/src/reflect/scala/reflect/internal/settings/MutableSettings.scala index 6e810d6c6970..1f93b6693d5a 100644 --- a/src/reflect/scala/reflect/internal/settings/MutableSettings.scala +++ b/src/reflect/scala/reflect/internal/settings/MutableSettings.scala @@ -87,5 +87,7 @@ object MutableSettings { implicit class SettingsOps(private val settings: MutableSettings) extends AnyVal { @inline final def areStatisticsEnabled = StatisticsStatics.areSomeColdStatsEnabled && settings.YstatisticsEnabled + @inline final def isDebug: Boolean = StatisticsStatics.isDebug && settings.debug + @inline final def isDeveloper: Boolean = StatisticsStatics.isDeveloper && settings.developer } } diff --git a/src/reflect/scala/reflect/internal/tpe/GlbLubs.scala b/src/reflect/scala/reflect/internal/tpe/GlbLubs.scala index a56eba05e482..9a619f6c5f61 100644 --- a/src/reflect/scala/reflect/internal/tpe/GlbLubs.scala +++ b/src/reflect/scala/reflect/internal/tpe/GlbLubs.scala @@ -386,7 +386,7 @@ private[internal] trait GlbLubs { // parameters are not handled correctly. val ok = ts forall { t => isSubType(t, lubRefined, depth) || { - if (settings.debug || printLubs) { + if (settings.isDebug || printLubs) { Console.println( "Malformed lub: " + lubRefined + "\n" + "Argument " + t + " does not conform. Falling back to " + lubBase diff --git a/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala b/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala index 45837746771c..b8025fe16d99 100644 --- a/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala +++ b/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala @@ -66,7 +66,7 @@ trait TypeComparers { private def isSubPre(pre1: Type, pre2: Type, sym: Symbol) = if ((pre1 ne pre2) && (pre1 ne NoPrefix) && (pre2 ne NoPrefix) && pre1 <:< pre2) { - if (settings.debug) println(s"new isSubPre $sym: $pre1 <:< $pre2") + if (settings.isDebug) println(s"new isSubPre $sym: $pre1 <:< $pre2") true } else false diff --git a/src/reflect/scala/reflect/internal/tpe/TypeConstraints.scala b/src/reflect/scala/reflect/internal/tpe/TypeConstraints.scala index f791498aa044..e42caeaf2644 100644 --- a/src/reflect/scala/reflect/internal/tpe/TypeConstraints.scala +++ b/src/reflect/scala/reflect/internal/tpe/TypeConstraints.scala @@ -61,8 +61,8 @@ private[internal] trait TypeConstraints { log ::= UndoPair(tv, tv.constr.cloneInternal) } - def clear() { - if (settings.debug) + def clear(): Unit = { + if (settings.isDebug) self.log("Clearing " + log.size + " entries from the undoLog.") log = Nil } diff --git a/src/reflect/scala/reflect/internal/tpe/TypeToStrings.scala b/src/reflect/scala/reflect/internal/tpe/TypeToStrings.scala index 41187c9c4f75..f4acdb99150b 100644 --- a/src/reflect/scala/reflect/internal/tpe/TypeToStrings.scala +++ b/src/reflect/scala/reflect/internal/tpe/TypeToStrings.scala @@ -39,7 +39,7 @@ private[internal] trait TypeToStrings { // else if (toStringRecursions >= maxToStringRecursions) { devWarning("Exceeded recursion depth attempting to print " + util.shortClassOfInstance(tpe)) - if (settings.debug) + if (settings.isDebug) (new Throwable).printStackTrace "..." diff --git a/src/reflect/scala/reflect/internal/util/StatisticsStatics.java b/src/reflect/scala/reflect/internal/util/StatisticsStatics.java index d2d27a7af6c7..1143a64268a7 100644 --- a/src/reflect/scala/reflect/internal/util/StatisticsStatics.java +++ b/src/reflect/scala/reflect/internal/util/StatisticsStatics.java @@ -23,13 +23,21 @@ public final class StatisticsStatics { private static final AlmostFinalValue COLD_STATS = new AlmostFinalValue(); private static final AlmostFinalValue HOT_STATS = new AlmostFinalValue(); + private static final AlmostFinalValue DEBUG = new AlmostFinalValue(); + private static final AlmostFinalValue DEVELOPER = new AlmostFinalValue(); private static final MethodHandle COLD_STATS_GETTER = COLD_STATS.invoker; private static final MethodHandle HOT_STATS_GETTER = HOT_STATS.invoker; + private static final MethodHandle DEBUG_GETTER = DEBUG.invoker; + private static final MethodHandle DEVELOPER_GETTER = DEVELOPER.invoker; public static boolean areSomeColdStatsEnabled() throws Throwable { return (boolean) COLD_STATS_GETTER.invokeExact(); } public static boolean areSomeHotStatsEnabled() throws Throwable { return (boolean) HOT_STATS_GETTER.invokeExact(); } + public static boolean isDebug() throws Throwable { return (boolean) DEBUG_GETTER.invokeExact(); } + public static boolean isDeveloper() throws Throwable { return (boolean) DEVELOPER_GETTER.invokeExact(); } public static void enableColdStatsAndDeoptimize() { COLD_STATS.toggleOnAndDeoptimize(); } public static void enableHotStatsAndDeoptimize() { HOT_STATS.toggleOnAndDeoptimize(); } + public static void enableDebugAndDeoptimize() { DEBUG.toggleOnAndDeoptimize(); } + public static void enableDeveloperAndDeoptimize() { DEVELOPER.toggleOnAndDeoptimize(); } } diff --git a/src/reflect/scala/reflect/runtime/JavaMirrors.scala b/src/reflect/scala/reflect/runtime/JavaMirrors.scala index e3045e15e0e8..21c0f5a6f45b 100644 --- a/src/reflect/scala/reflect/runtime/JavaMirrors.scala +++ b/src/reflect/scala/reflect/runtime/JavaMirrors.scala @@ -623,7 +623,7 @@ private[scala] trait JavaMirrors extends internal.SymbolTable with api.JavaUnive def markAbsent(tpe: Type) = setAllInfos(clazz, module, tpe) def handleError(ex: Exception) = { markAbsent(ErrorType) - if (settings.debug) ex.printStackTrace() + if (settings.isDebug) ex.printStackTrace() val msg = ex.getMessage() MissingRequirementError.signal( (if (msg eq null) "reflection error while loading " + clazz.name diff --git a/src/reflect/scala/reflect/runtime/Settings.scala b/src/reflect/scala/reflect/runtime/Settings.scala index 3f4cfa0e1c5a..3d96babda838 100644 --- a/src/reflect/scala/reflect/runtime/Settings.scala +++ b/src/reflect/scala/reflect/runtime/Settings.scala @@ -54,8 +54,8 @@ private[reflect] class Settings extends MutableSettings { val Yshowsymowners = new BooleanSetting(false) val Yshowsymkinds = new BooleanSetting(false) val breakCycles = new BooleanSetting(false) - val debug = new BooleanSetting(false) - val developer = new BooleanSetting(false) + val debug = new BooleanSetting(false) { override def postSetHook() = if (v) StatisticsStatics.enableDebugAndDeoptimize() } + val developer = new BooleanSetting(false) { override def postSetHook() = if (v) StatisticsStatics.enableDeveloperAndDeoptimize() } val explaintypes = new BooleanSetting(false) val overrideObjects = new BooleanSetting(false) val printtypes = new BooleanSetting(false) diff --git a/src/reflect/scala/reflect/runtime/SymbolTable.scala b/src/reflect/scala/reflect/runtime/SymbolTable.scala index 0d8a0bfd1a9b..aae1a2a641c8 100644 --- a/src/reflect/scala/reflect/runtime/SymbolTable.scala +++ b/src/reflect/scala/reflect/runtime/SymbolTable.scala @@ -25,7 +25,7 @@ private[scala] trait SymbolTable extends internal.SymbolTable with JavaMirrors w if (settings.verbose) println("[reflect-compiler] "+msg) def debugInfo(msg: => String) = - if (settings.debug) info(msg) + if (settings.isDebug) info(msg) /** Declares that this is a runtime reflection universe. * diff --git a/src/repl/scala/tools/nsc/interpreter/ILoop.scala b/src/repl/scala/tools/nsc/interpreter/ILoop.scala index 810c89ac1ec5..b611f58d89a6 100644 --- a/src/repl/scala/tools/nsc/interpreter/ILoop.scala +++ b/src/repl/scala/tools/nsc/interpreter/ILoop.scala @@ -911,7 +911,7 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter) extend type ReaderMaker = Completer => InteractiveReader def instantiater(className: String): ReaderMaker = completer => { - if (settings.debug) Console.println(s"Trying to instantiate an InteractiveReader from $className") + if (settings.isDebug) Console.println(s"Trying to instantiate an InteractiveReader from $className") Class.forName(className).getConstructor(classOf[Completer]). newInstance(completer). asInstanceOf[InteractiveReader] @@ -927,7 +927,7 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter) extend val reader = (readers collect { case Success(reader) => reader } headOption) getOrElse SimpleReader() - if (settings.debug) { + if (settings.isDebug) { val readerDiags = (readerClasses, readers).zipped map { case (cls, Failure(e)) => s" - $cls --> \n\t" + scala.tools.nsc.util.stackTraceString(e) + "\n" case (cls, Success(_)) => s" - $cls OK" diff --git a/src/scaladoc/scala/tools/nsc/ScalaDoc.scala b/src/scaladoc/scala/tools/nsc/ScalaDoc.scala index 53dffac1eded..880478044c70 100644 --- a/src/scaladoc/scala/tools/nsc/ScalaDoc.scala +++ b/src/scaladoc/scala/tools/nsc/ScalaDoc.scala @@ -49,7 +49,7 @@ class ScalaDoc { try { new DocFactory(reporter, docSettings) document command.files } catch { case ex @ FatalError(msg) => - if (docSettings.debug.value) ex.printStackTrace() + if (docSettings.isDebug) ex.printStackTrace() reporter.error(null, "fatal error: " + msg) } finally reporter.finish() diff --git a/src/scaladoc/scala/tools/nsc/doc/ScaladocAnalyzer.scala b/src/scaladoc/scala/tools/nsc/doc/ScaladocAnalyzer.scala index a1ddd8012b54..3151ae7e0f2d 100644 --- a/src/scaladoc/scala/tools/nsc/doc/ScaladocAnalyzer.scala +++ b/src/scaladoc/scala/tools/nsc/doc/ScaladocAnalyzer.scala @@ -97,7 +97,7 @@ trait ScaladocAnalyzer extends Analyzer { typedStats(trees, NoSymbol) useCase.defined = context.scope.toList filterNot (useCase.aliases contains _) - if (settings.debug) + if (settings.isDebug) useCase.defined foreach (sym => println("defined use cases: %s:%s".format(sym, sym.tpe))) useCase.defined diff --git a/src/scaladoc/scala/tools/nsc/doc/Uncompilable.scala b/src/scaladoc/scala/tools/nsc/doc/Uncompilable.scala index e25c0dc4da05..86f71d102cdb 100644 --- a/src/scaladoc/scala/tools/nsc/doc/Uncompilable.scala +++ b/src/scaladoc/scala/tools/nsc/doc/Uncompilable.scala @@ -12,7 +12,9 @@ package scala.tools.nsc package doc + import scala.language.implicitConversions + import scala.reflect.internal.util.NoPosition import scala.tools.nsc.Reporting.WarningCategory @@ -45,7 +47,7 @@ trait Uncompilable { def symbols = pairs map (_._1) def templates = symbols filter (x => x.isClass || x.isTrait || x == AnyRefClass/* which is now a type alias */) toSet def comments = { - if (settings.debug || settings.verbose) + if (settings.isDebug || settings.verbose) inform("Found %d uncompilable files: %s".format(files.size, files mkString ", ")) if (pairs.isEmpty) diff --git a/test/files/run/t11802-pluginsdir/ploogin.scala b/test/files/run/t11802-pluginsdir/ploogin.scala index 9b0c8066673f..d48c042e7626 100644 --- a/test/files/run/t11802-pluginsdir/ploogin.scala +++ b/test/files/run/t11802-pluginsdir/ploogin.scala @@ -22,7 +22,7 @@ abstract class Ploogin(val global: Global, val name: String = "ploogin") extends class TestPhase(prev: Phase) extends StdPhase(prev) { override def description = TestComponent.this.description def apply(unit: CompilationUnit) { - if (settings.developer) inform(s"My phase name is $phaseName") + if (settings.isDeveloper) inform(s"My phase name is $phaseName") } } } diff --git a/test/files/run/t4841-isolate-plugins/ploogin.scala b/test/files/run/t4841-isolate-plugins/ploogin.scala index bd8c7275ec1e..c48e9e748935 100644 --- a/test/files/run/t4841-isolate-plugins/ploogin.scala +++ b/test/files/run/t4841-isolate-plugins/ploogin.scala @@ -23,7 +23,7 @@ class Ploogin(val global: Global, val name: String = "ploogin") extends Plugin { class TestPhase(prev: Phase) extends StdPhase(prev) { override def description = TestComponent.this.description def apply(unit: CompilationUnit) { - if (settings.developer) inform(s"My phase name is $phaseName") + if (settings.isDeveloper) inform(s"My phase name is $phaseName") } } } From d06e2cf54264bff26426ac4db87aa94397404e21 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Thu, 29 Apr 2021 11:06:25 +1000 Subject: [PATCH 152/769] Fully JIT inlinable settings/statistics enabled checks Wrapping the method handle invocation in a static method relies on that method being JIT inlined. Otherwise, an unlucky caller can still incur the machine-code subroutine call overhead to a no-op method. Example: ``` [info] \-> TypeProfile (34723/34723 counts) = scala/tools/nsc/Global$GlobalMirror [info] @ 1 scala.reflect.internal.Mirrors$Roots::RootClass (21 bytes) inline (hot) [info] !m @ 12 scala.reflect.internal.Mirrors$Roots::RootClass$lzycompute (49 bytes) inline (hot) [info] @ 19 scala.reflect.internal.Mirrors$Roots$RootClass:: (61 bytes) inline (hot) [info] @ 13 scala.reflect.internal.Mirrors$Roots::scala$reflect$internal$Mirrors$Roots$$$outer (5 bytes) accessor [info] @ 21 scala.reflect.internal.Mirrors$Roots::scala$reflect$internal$Mirrors$Roots$$$outer (5 bytes) accessor [info] @ 24 scala.reflect.internal.SymbolTable::NoPosition (5 bytes) accessor [info] @ 28 scala.reflect.internal.Mirrors$Roots::scala$reflect$internal$Mirrors$Roots$$$outer (5 bytes) accessor [info] @ 31 scala.reflect.internal.SymbolTable::tpnme (16 bytes) inline (hot) [info] !m @ 8 scala.reflect.internal.SymbolTable::tpnme$lzycompute$1 (27 bytes) inline (hot) [info] @ 15 scala.reflect.internal.StdNames$tpnme$:: (6 bytes) inline (hot) [info] @ 2 scala.reflect.internal.StdNames$TypeNames:: (757 bytes) hot method too big [info] @ 34 scala.reflect.internal.StdNames$CommonNames::ROOT (5 bytes) accessor [info] @ 40 scala.reflect.internal.Symbols$PackageClassSymbol:: (10 bytes) inline (hot) [info] @ 6 scala.reflect.internal.Symbols$ModuleClassSymbol:: (41 bytes) inline (hot) [info] @ 11 scala.reflect.internal.Symbols$ClassSymbol:: (164 bytes) inline (hot) [info] @ 6 scala.reflect.internal.Symbols$TypeSymbol:: (145 bytes) inline (hot) [info] @ 6 scala.reflect.internal.Symbols$Symbol:: (168 bytes) inlining too deep [info] @ 11 scala.reflect.internal.SymbolTable::NoSymbol (22 bytes) inlining too deep [info] @ 14 scala.reflect.internal.Symbols$Symbol::privateWithin_$eq (6 bytes) inlining too deep [info] @ 32 scala.reflect.internal.util.StatisticsStatics::areSomeColdStatsEnabled (7 bytes) inlining too deep ``` Instead, push the `invokeExact` into the `@inline` checker methods, and use these pervasively. (cherry picked from commit a774c4fb282fdfbd46d8c0f54b75d6f03b77c338) --- .../tools/nsc/symtab/SymbolLoaders.scala | 8 +-- .../tools/nsc/transform/patmat/Logic.scala | 6 +-- .../nsc/transform/patmat/MatchAnalysis.scala | 9 ++-- .../transform/patmat/MatchTranslation.scala | 6 +-- .../tools/nsc/transform/patmat/Solving.scala | 5 +- .../tools/nsc/typechecker/Analyzer.scala | 8 ++- .../tools/nsc/typechecker/Implicits.scala | 52 +++++++++---------- .../scala/tools/nsc/typechecker/Macros.scala | 8 +-- .../scala/tools/nsc/typechecker/Typers.scala | 45 ++++++++-------- .../scala/reflect/internal/BaseTypeSeqs.scala | 6 +-- .../scala/reflect/internal/Scopes.scala | 10 ++-- .../scala/reflect/internal/Symbols.scala | 6 +-- .../scala/reflect/internal/Types.scala | 36 ++++++------- .../internal/settings/MutableSettings.scala | 7 +-- .../reflect/internal/tpe/FindMembers.scala | 12 ++--- .../scala/reflect/internal/tpe/GlbLubs.scala | 17 +++--- .../reflect/internal/tpe/TypeComparers.scala | 3 +- .../internal/util/StatisticsStatics.java | 13 ++--- .../scala/reflect/io/AbstractFile.scala | 2 +- src/reflect/scala/reflect/io/Path.scala | 10 ++-- 20 files changed, 128 insertions(+), 141 deletions(-) diff --git a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala index 9b1448e1ff83..4aa3515ce80d 100644 --- a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala +++ b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala @@ -19,7 +19,7 @@ import java.io.IOException import scala.reflect.internal.MissingRequirementError import scala.reflect.io.{AbstractFile, NoAbstractFile} import scala.tools.nsc.util.{ClassPath, ClassRepresentation} -import scala.reflect.internal.util.{ReusableInstance, StatisticsStatics} +import scala.reflect.internal.util.ReusableInstance import scala.tools.nsc.Reporting.WarningCategory /** This class ... @@ -337,12 +337,12 @@ abstract class SymbolLoaders { protected def description = "class file "+ classfile.toString - protected def doComplete(root: Symbol) { - val start = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.startTimer(statistics.classReadNanos) else null + protected def doComplete(root: Symbol): Unit = { + val start = if (settings.areStatisticsEnabled) statistics.startTimer(statistics.classReadNanos) else null classfileParser.parse(classfile, clazz, module) if (clazz.associatedFile eq NoAbstractFile) clazz.associatedFile = classfile if (module.associatedFile eq NoAbstractFile) module.associatedFile = classfile - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.stopTimer(statistics.classReadNanos, start) + if (settings.areStatisticsEnabled) statistics.stopTimer(statistics.classReadNanos, start) } override def sourcefile: Option[AbstractFile] = classfileParser.srcfile override def associatedFile(self: Symbol): AbstractFile = classfile diff --git a/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala b/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala index ee4878406904..ec37415ae1a6 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala @@ -15,7 +15,7 @@ package tools.nsc.transform.patmat import scala.language.postfixOps import scala.collection.mutable -import scala.reflect.internal.util.{HashSet, Position, StatisticsStatics} +import scala.reflect.internal.util.{HashSet, Position} trait Logic extends Debugging { import global._ @@ -392,7 +392,7 @@ trait Logic extends Debugging { // V1 = Nil implies -(V2 = Ci) for all Ci in V2's domain (i.e., it is unassignable) // may throw an AnalysisBudget.Exception def removeVarEq(props: List[Prop], modelNull: Boolean = false): (Prop, List[Prop]) = { - val start = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.startTimer(statistics.patmatAnaVarEq) else null + val start = if (settings.areStatisticsEnabled) statistics.startTimer(statistics.patmatAnaVarEq) else null val vars = new mutable.HashSet[Var] @@ -461,7 +461,7 @@ trait Logic extends Debugging { debug.patmat(s"eqAxioms:\n${eqAxioms.mkString("\n")}") debug.patmat(s"pure:${pure.mkString("\n")}") - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.stopTimer(statistics.patmatAnaVarEq, start) + if (settings.areStatisticsEnabled) statistics.stopTimer(statistics.patmatAnaVarEq, start) (And(eqAxioms: _*), pure) } diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala index 6be930a3a560..bd67d273ef5c 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala @@ -13,7 +13,6 @@ package scala.tools.nsc.transform.patmat import scala.collection.mutable -import scala.reflect.internal.util.StatisticsStatics import scala.tools.nsc.Reporting.WarningCategory trait TreeAndTypeAnalysis extends Debugging { @@ -463,7 +462,7 @@ trait MatchAnalysis extends MatchApproximation { // thus, the case is unreachable if there is no model for -(-P /\ C), // or, equivalently, P \/ -C, or C => P def unreachableCase(prevBinder: Symbol, cases: List[List[TreeMaker]], pt: Type): Option[Int] = { - val start = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.startTimer(statistics.patmatAnaReach) else null + val start = if (settings.areStatisticsEnabled) statistics.startTimer(statistics.patmatAnaReach) else null // use the same approximator so we share variables, // but need different conditions depending on whether we're conservatively looking for failure or success @@ -512,7 +511,7 @@ trait MatchAnalysis extends MatchApproximation { } } - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.stopTimer(statistics.patmatAnaReach, start) + if (settings.areStatisticsEnabled) statistics.stopTimer(statistics.patmatAnaReach, start) if (reachable) None else Some(caseIndex) } catch { @@ -531,7 +530,7 @@ trait MatchAnalysis extends MatchApproximation { // - back off (to avoid crying exhaustive too often) when: // - there are guards --> // - there are extractor calls (that we can't secretly/soundly) rewrite - val start = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.startTimer(statistics.patmatAnaExhaust) else null + val start = if (settings.areStatisticsEnabled) statistics.startTimer(statistics.patmatAnaExhaust) else null var backoff = false val approx = new TreeMakersToProps(prevBinder) @@ -585,7 +584,7 @@ trait MatchAnalysis extends MatchApproximation { // since e.g. List(_, _) would cover List(1, _) val pruned = CounterExample.prune(counterExamples.sortBy(_.toString)).map(_.toString) - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.stopTimer(statistics.patmatAnaExhaust, start) + if (settings.areStatisticsEnabled) statistics.stopTimer(statistics.patmatAnaExhaust, start) pruned } catch { case ex: AnalysisBudget.Exception => diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala index b31c76136768..fa19f67136a3 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala @@ -13,8 +13,6 @@ package scala.tools.nsc.transform.patmat import scala.language.postfixOps -import scala.reflect.internal.util.StatisticsStatics - /** Translate typed Trees that represent pattern matches into the patternmatching IR, defined by TreeMakers. */ @@ -211,7 +209,7 @@ trait MatchTranslation { debug.patmat("translating "+ cases.mkString("{", "\n", "}")) - val start = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.startTimer(statistics.patmatNanos) else null + val start = if (settings.areStatisticsEnabled) statistics.startTimer(statistics.patmatNanos) else null val selectorTp = repeatedToSeq(elimAnonymousClass(selector.tpe.widen.withoutAnnotations)) @@ -227,7 +225,7 @@ trait MatchTranslation { // pt = Any* occurs when compiling test/files/pos/annotDepMethType.scala with -Xexperimental val combined = combineCases(selector, selectorSym, nonSyntheticCases map translateCase(selectorSym, pt), pt, selectorPos, matchOwner, defaultOverride) - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.stopTimer(statistics.patmatNanos, start) + if (settings.areStatisticsEnabled) statistics.stopTimer(statistics.patmatNanos, start) combined } diff --git a/src/compiler/scala/tools/nsc/transform/patmat/Solving.scala b/src/compiler/scala/tools/nsc/transform/patmat/Solving.scala index ef54416a032d..ba82f14d1063 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/Solving.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/Solving.scala @@ -14,7 +14,6 @@ package scala.tools.nsc.transform.patmat import scala.collection.mutable.ArrayBuffer import scala.collection.{immutable,mutable} -import scala.reflect.internal.util.StatisticsStatics // a literal is a (possibly negated) variable class Lit(val v: Int) extends AnyVal { @@ -485,11 +484,11 @@ trait Solving extends Logic { def findTseitinModelFor(clauses: Array[Clause]): TseitinModel = { debug.patmat(s"DPLL\n${cnfString(clauses)}") - val start = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.startTimer(statistics.patmatAnaDPLL) else null + val start = if (settings.areStatisticsEnabled) statistics.startTimer(statistics.patmatAnaDPLL) else null val satisfiableWithModel = findTseitinModel0((clauses, Set.empty[Lit]) :: Nil) - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.stopTimer(statistics.patmatAnaDPLL, start) + if (settings.areStatisticsEnabled) statistics.stopTimer(statistics.patmatAnaDPLL, start) satisfiableWithModel } diff --git a/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala b/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala index 11c2f28703f7..b56fba420fe0 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala @@ -13,8 +13,6 @@ package scala.tools.nsc package typechecker -import scala.reflect.internal.util.StatisticsStatics - /** The main attribution phase. */ trait Analyzer extends AnyRef @@ -96,8 +94,8 @@ trait Analyzer extends AnyRef // Lacking a better fix, we clear it here (before the phase is created, meaning for each // compiler run). This is good enough for the resident compiler, which was the most affected. undoLog.clear() - override def run() { - val start = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.startTimer(statistics.typerNanos) else null + override def run(): Unit = { + val start = if (settings.areStatisticsEnabled) statistics.startTimer(statistics.typerNanos) else null global.echoPhaseSummary(this) val units = currentRun.units while (units.hasNext) { @@ -107,7 +105,7 @@ trait Analyzer extends AnyRef finishComputeParamAlias() // defensive measure in case the bookkeeping in deferred macro expansion is buggy clearDelayed() - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.stopTimer(statistics.typerNanos, start) + if (settings.areStatisticsEnabled) statistics.stopTimer(statistics.typerNanos, start) } def apply(unit: CompilationUnit) { diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala index d982c29da1e7..b45cc86f4d91 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala @@ -24,7 +24,7 @@ import scala.collection.mutable import mutable.{LinkedHashMap, ListBuffer} import scala.util.matching.Regex import symtab.Flags._ -import scala.reflect.internal.util.{ReusableInstance, Statistics, StatisticsStatics, TriState} +import scala.reflect.internal.util.{ReusableInstance, Statistics, TriState} import scala.reflect.internal.TypesStats import scala.language.implicitConversions import scala.tools.nsc.Reporting.WarningCategory @@ -100,9 +100,9 @@ trait Implicits { // Note that the isInvalidConversionTarget seems to make a lot more sense right here, before all the // work is performed, than at the point where it presently exists. val shouldPrint = printTypings && !context.undetparams.isEmpty - val findMemberStart = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.startCounter(findMemberImpl) else null - val subtypeStart = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.startCounter(subtypeImpl) else null - val start = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.startTimer(implicitNanos) else null + val findMemberStart = if (settings.areStatisticsEnabled) statistics.startCounter(findMemberImpl) else null + val subtypeStart = if (settings.areStatisticsEnabled) statistics.startCounter(subtypeImpl) else null + val start = if (settings.areStatisticsEnabled) statistics.startTimer(implicitNanos) else null if (shouldPrint) typingStack.printTyping(tree, "typing implicit: %s %s".format(tree, context.undetparamsString)) val implicitSearchContext = context.makeImplicit(reportAmbiguous) @@ -120,9 +120,9 @@ trait Implicits { // and then filter out any which *were* inferred and are part of the substitutor in the implicit search result. context.undetparams = ((context.undetparams ++ result.undetparams) filterNot result.subst.from.contains).distinct - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.stopTimer(implicitNanos, start) - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.stopCounter(findMemberImpl, findMemberStart) - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.stopCounter(subtypeImpl, subtypeStart) + if (settings.areStatisticsEnabled) statistics.stopTimer(implicitNanos, start) + if (settings.areStatisticsEnabled) statistics.stopCounter(findMemberImpl, findMemberStart) + if (settings.areStatisticsEnabled) statistics.stopCounter(subtypeImpl, subtypeStart) if (result.isSuccess && settings.warnSelfImplicit && result.tree.symbol != null) { val s = @@ -405,7 +405,7 @@ trait Implicits { } import infer._ - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(implicitSearchCount) + if (settings.areStatisticsEnabled) statistics.incCounter(implicitSearchCount) /** The type parameters to instantiate */ val undetParams = if (isView) Nil else context.outer.undetparams @@ -435,12 +435,12 @@ trait Implicits { /** Is implicit info `info1` better than implicit info `info2`? */ def improves(info1: ImplicitInfo, info2: ImplicitInfo) = { - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(improvesCount) + if (settings.areStatisticsEnabled) statistics.incCounter(improvesCount) (info2 == NoImplicitInfo) || (info1 != NoImplicitInfo) && { if (info1.sym.isStatic && info2.sym.isStatic) { improvesCache get ((info1, info2)) match { - case Some(b) => if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(improvesCachedCount); b + case Some(b) => if (settings.areStatisticsEnabled) statistics.incCounter(improvesCachedCount); b case None => val result = isStrictlyMoreSpecific(info1.tpe, info2.tpe, info1.sym, info2.sym) improvesCache((info1, info2)) = result @@ -560,14 +560,14 @@ trait Implicits { * This method is performance critical: 5-8% of typechecking time. */ private def matchesPt(tp: Type, pt: Type, undet: List[Symbol]): Boolean = { - val start = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.startTimer(matchesPtNanos) else null + val start = if (settings.areStatisticsEnabled) statistics.startTimer(matchesPtNanos) else null val result = normSubType(tp, pt) || isView && { pt match { case Function1(arg1, arg2) => matchesPtView(tp, arg1, arg2, undet) case _ => false } } - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.stopTimer(matchesPtNanos, start) + if (settings.areStatisticsEnabled) statistics.stopTimer(matchesPtNanos, start) result } private def matchesPt(info: ImplicitInfo): Boolean = ( @@ -594,7 +594,7 @@ trait Implicits { } private def matchesPtInst(info: ImplicitInfo): Boolean = { - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(matchesPtInstCalls) + if (settings.areStatisticsEnabled) statistics.incCounter(matchesPtInstCalls) info.tpe match { case PolyType(tparams, restpe) => try { @@ -603,14 +603,14 @@ trait Implicits { val tp = ApproximateDependentMap(restpe) val tpInstantiated = tp.instantiateTypeParams(allUndetparams, tvars) if(!matchesPt(tpInstantiated, wildPt, allUndetparams)) { - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(matchesPtInstMismatch1) + if (settings.areStatisticsEnabled) statistics.incCounter(matchesPtInstMismatch1) false } else { val targs = solvedTypes(tvars, allUndetparams, varianceInType(wildPt), upper = false, lubDepth(tpInstantiated :: wildPt :: Nil)) val adjusted = adjustTypeArgs(allUndetparams, tvars, targs) val tpSubst = deriveTypeWithWildcards(adjusted.undetParams)(tp.instantiateTypeParams(adjusted.okParams, adjusted.okArgs)) if(!matchesPt(tpSubst, wildPt, adjusted.undetParams)) { - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(matchesPtInstMismatch2) + if (settings.areStatisticsEnabled) statistics.incCounter(matchesPtInstMismatch2) false } else true } @@ -707,7 +707,7 @@ trait Implicits { } private def typedImplicit0(info: ImplicitInfo, ptChecked: Boolean, isLocalToCallsite: Boolean): SearchResult = { - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(plausiblyCompatibleImplicits) + if (settings.areStatisticsEnabled) statistics.incCounter(plausiblyCompatibleImplicits) val ok = ptChecked || matchesPt(info) && { def word = if (isLocalToCallsite) "local " else "" typingLog("match", s"$word$info") @@ -717,7 +717,7 @@ trait Implicits { } private def typedImplicit1(info: ImplicitInfo, isLocalToCallsite: Boolean): SearchResult = { - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(matchingImplicits) + if (settings.areStatisticsEnabled) statistics.incCounter(matchingImplicits) // workaround for deficient context provided by ModelFactoryImplicitSupport#makeImplicitConstraints val isScaladoc = context.tree == EmptyTree @@ -773,7 +773,7 @@ trait Implicits { case None => } - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(typedImplicits) + if (settings.areStatisticsEnabled) statistics.incCounter(typedImplicits) val itree3 = if (isView) treeInfo.dissectApplied(itree2).callee else adapt(itree2, EXPRmode, wildPt) @@ -863,7 +863,7 @@ trait Implicits { fail("typing TypeApply reported errors for the implicit tree: " + err.errMsg) case None => val result = new SearchResult(unsuppressMacroExpansion(itree3), subst, context.undetparams) - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(foundImplicits) + if (settings.areStatisticsEnabled) statistics.incCounter(foundImplicits) typingLog("success", s"inferred value of type $ptInstantiated is $result") result } @@ -1180,11 +1180,11 @@ trait Implicits { * @return map from infos to search results */ def applicableInfos(iss: Infoss, isLocalToCallsite: Boolean): mutable.LinkedHashMap[ImplicitInfo, SearchResult] = { - val start = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.startCounter(subtypeAppInfos) else null + val start = if (settings.areStatisticsEnabled) statistics.startCounter(subtypeAppInfos) else null val computation = new ImplicitComputation(iss, isLocalToCallsite) { } val applicable = computation.findAll() - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.stopCounter(subtypeAppInfos, start) + if (settings.areStatisticsEnabled) statistics.stopCounter(subtypeAppInfos, start) applicable } @@ -1323,13 +1323,13 @@ trait Implicits { * such that some part of `tp` has C as one of its superclasses. */ private def implicitsOfExpectedType: Infoss = { - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(implicitCacheAccs) + if (settings.areStatisticsEnabled) statistics.incCounter(implicitCacheAccs) implicitsCache get pt match { case Some(implicitInfoss) => - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(implicitCacheHits) + if (settings.areStatisticsEnabled) statistics.incCounter(implicitCacheHits) implicitInfoss case None => - val start = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.startTimer(subtypeETNanos) else null + val start = if (settings.areStatisticsEnabled) statistics.startTimer(subtypeETNanos) else null // val implicitInfoss = companionImplicits(pt) val implicitInfoss1 = companionImplicitMap(pt).valuesIterator.toList // val is1 = implicitInfoss.flatten.toSet @@ -1338,7 +1338,7 @@ trait Implicits { // if (!(is2 contains i)) println("!!! implicit infos of "+pt+" differ, new does not contain "+i+",\nold: "+implicitInfoss+",\nnew: "+implicitInfoss1) // for (i <- is2) // if (!(is1 contains i)) println("!!! implicit infos of "+pt+" differ, old does not contain "+i+",\nold: "+implicitInfoss+",\nnew: "+implicitInfoss1) - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.stopTimer(subtypeETNanos, start) + if (settings.areStatisticsEnabled) statistics.stopTimer(subtypeETNanos, start) implicitsCache(pt) = implicitInfoss1 if (implicitsCache.size >= sizeLimit) implicitsCache -= implicitsCache.keysIterator.next @@ -1565,7 +1565,7 @@ trait Implicits { * If all fails return SearchFailure */ def bestImplicit: SearchResult = { - val stats = StatisticsStatics.areSomeColdStatsEnabled + val stats = settings.areStatisticsEnabled val failstart = if (stats) statistics.startTimer(inscopeFailNanos) else null val succstart = if (stats) statistics.startTimer(inscopeSucceedNanos) else null diff --git a/src/compiler/scala/tools/nsc/typechecker/Macros.scala b/src/compiler/scala/tools/nsc/typechecker/Macros.scala index f9a047c3c28a..ee5ada60b355 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Macros.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Macros.scala @@ -17,7 +17,7 @@ import java.lang.Math.min import symtab.Flags._ import scala.reflect.internal.util.ScalaClassLoader import scala.reflect.runtime.ReflectionUtils -import scala.reflect.internal.util.{Statistics, StatisticsStatics} +import scala.reflect.internal.util.Statistics import scala.reflect.internal.TypesStats import scala.reflect.macros.util._ import scala.util.control.ControlThrowable @@ -615,8 +615,8 @@ trait Macros extends MacroRuntimes with Traces with Helpers { if (macroDebugVerbose) println(s"macroExpand: ${summary()}") linkExpandeeAndDesugared(expandee, desugared) - val start = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.startTimer(statistics.macroExpandNanos) else null - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(statistics.macroExpandCount) + val start = if (settings.areStatisticsEnabled) statistics.startTimer(statistics.macroExpandNanos) else null + if (settings.areStatisticsEnabled) statistics.incCounter(statistics.macroExpandCount) try { withInfoLevel(nodePrinters.InfoLevel.Quiet) { // verbose printing might cause recursive macro expansions if (expandee.symbol.isErroneous || (expandee exists (_.isErroneous))) { @@ -649,7 +649,7 @@ trait Macros extends MacroRuntimes with Traces with Helpers { } } } finally { - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.stopTimer(statistics.macroExpandNanos, start) + if (settings.areStatisticsEnabled) statistics.stopTimer(statistics.macroExpandNanos, start) } } } diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index 23cc781428c0..a1042d400f96 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -21,7 +21,7 @@ package tools.nsc package typechecker import scala.collection.mutable -import scala.reflect.internal.util.{FreshNameCreator, ListOfNil, Statistics, StatisticsStatics} +import scala.reflect.internal.util.{FreshNameCreator, ListOfNil, Statistics} import scala.reflect.internal.TypesStats import mutable.ListBuffer import symtab.Flags._ @@ -688,13 +688,13 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper def silent[T](op: Typer => T, reportAmbiguousErrors: Boolean = context.ambiguousErrors, newtree: Tree = context.tree): SilentResult[T] = { - val findMemberStart = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.startCounter(findMemberFailed) else null - val subtypeStart = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.startCounter(subtypeFailed) else null - val failedSilentStart = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.startTimer(failedSilentNanos) else null + val findMemberStart = if (settings.areStatisticsEnabled) statistics.startCounter(findMemberFailed) else null + val subtypeStart = if (settings.areStatisticsEnabled) statistics.startCounter(subtypeFailed) else null + val failedSilentStart = if (settings.areStatisticsEnabled) statistics.startTimer(failedSilentNanos) else null def stopStats() = { - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.stopCounter(findMemberFailed, findMemberStart) - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.stopCounter(subtypeFailed, subtypeStart) - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.stopTimer(failedSilentNanos, failedSilentStart) + if (settings.areStatisticsEnabled) statistics.stopCounter(findMemberFailed, findMemberStart) + if (settings.areStatisticsEnabled) statistics.stopCounter(subtypeFailed, subtypeStart) + if (settings.areStatisticsEnabled) statistics.stopTimer(failedSilentNanos, failedSilentStart) } @inline def wrapResult(reporter: ContextReporter, result: T) = if (reporter.hasErrors) { @@ -4063,9 +4063,9 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper def isCapturedExistential(sym: Symbol) = ( (sym hasAllFlags EXISTENTIAL | CAPTURED) && { - val start = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.startTimer(isReferencedNanos) else null + val start = if (settings.areStatisticsEnabled) statistics.startTimer(isReferencedNanos) else null try !isReferencedFrom(context, sym) - finally if (StatisticsStatics.areSomeColdStatsEnabled) statistics.stopTimer(isReferencedNanos, start) + finally if (settings.areStatisticsEnabled) statistics.stopTimer(isReferencedNanos, start) } ) @@ -4761,10 +4761,10 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper * insert an implicit conversion. */ def tryTypedApply(fun: Tree, args: List[Tree]): Tree = { - val start = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.startTimer(failedApplyNanos) else null + val start = if (settings.areStatisticsEnabled) statistics.startTimer(failedApplyNanos) else null def onError(typeErrors: Seq[AbsTypeError], warnings: Seq[(Position, String, WarningCategory, Symbol)]): Tree = { - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.stopTimer(failedApplyNanos, start) + if (settings.areStatisticsEnabled) statistics.stopTimer(failedApplyNanos, start) // If the problem is with raw types, convert to existentials and try again. // See #4712 for a case where this situation arises, @@ -4833,8 +4833,8 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper // TODO: replace `fun.symbol.isStable` by `treeInfo.isStableIdentifierPattern(fun)` val stableApplication = (fun.symbol ne null) && fun.symbol.isMethod && fun.symbol.isStable val funpt = if (mode.inPatternMode) pt else WildcardType - val appStart = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.startTimer(failedApplyNanos) else null - val opeqStart = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.startTimer(failedOpEqNanos) else null + val appStart = if (settings.areStatisticsEnabled) statistics.startTimer(failedApplyNanos) else null + val opeqStart = if (settings.areStatisticsEnabled) statistics.startTimer(failedOpEqNanos) else null def isConversionCandidate(qual: Tree, name: Name): Boolean = !mode.inPatternMode && nme.isOpAssignmentName(TermName(name.decode)) && !qual.exists(_.isErroneous) @@ -4864,7 +4864,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper case Select(qual, name) if isConversionCandidate(qual, name) => val qual1 = typedQualifier(qual) if (treeInfo.isVariableOrGetter(qual1)) { - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.stopTimer(failedOpEqNanos, opeqStart) + if (settings.areStatisticsEnabled) statistics.stopTimer(failedOpEqNanos, opeqStart) val erred = qual1.exists(_.isErroneous) || args.exists(_.isErroneous) if (erred) reportError(error) else { val convo = convertToAssignment(fun, qual1, name, args) @@ -4876,7 +4876,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper } } } else { - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.stopTimer(failedApplyNanos, appStart) + if (settings.areStatisticsEnabled) statistics.stopTimer(failedApplyNanos, appStart) val Apply(Select(qual2, _), args2) = tree val erred = qual2.exists(_.isErroneous) || args2.exists(_.isErroneous) reportError { @@ -4884,7 +4884,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper } } case _ => - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.stopTimer(failedApplyNanos, appStart) + if (settings.areStatisticsEnabled) statistics.stopTimer(failedApplyNanos, appStart) reportError(error) } val silentResult = silent( @@ -4895,7 +4895,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper silentResult match { case SilentResultValue(fun1) => val fun2 = if (stableApplication) stabilizeFun(fun1, mode, pt) else fun1 - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(typedApplyCount) + if (settings.areStatisticsEnabled) statistics.incCounter(typedApplyCount) val noSecondTry = ( isPastTyper || context.inSecondTry @@ -5192,7 +5192,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper else UnstableTreeError(qualTyped) typedSelect(tree, qualStableOrError, name) } else { - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(typedSelectCount) + if (settings.areStatisticsEnabled) statistics.incCounter(typedSelectCount) val qualTyped = checkDead(context, typedQualifier(qual, mode)) val tree1 = typedSelect(tree, qualTyped, name) @@ -5293,7 +5293,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper def typedIdentOrWildcard(tree: Ident) = { val name = tree.name - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(typedIdentCount) + if (settings.areStatisticsEnabled) statistics.incCounter(typedIdentCount) if ((name == nme.WILDCARD && mode.typingPatternNotConstructor) || (name == tpnme.WILDCARD && mode.inTypeMode)) tree setType makeFullyDefined(pt) @@ -5756,9 +5756,8 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper def typed(tree: Tree, mode: Mode, pt: Type): Tree = { lastTreeToTyper = tree - val statsEnabled = StatisticsStatics.areSomeHotStatsEnabled && settings.areStatisticsEnabled && settings.YhotStatisticsEnabled - val startByType = if (statsEnabled) statistics.pushTimer(byTypeStack, byTypeNanos(tree.getClass)) else null - if (statsEnabled) statistics.incCounter(visitsByType, tree.getClass) + val startByType = if (settings.areHotStatisticsEnabled) statistics.pushTimer(byTypeStack, byTypeNanos(tree.getClass)) else null + if (settings.areHotStatisticsEnabled) statistics.incCounter(visitsByType, tree.getClass) val shouldPrintTyping = printTypings && !phase.erasedTypes && !noPrintTyping(tree) val shouldPopTypingStack = shouldPrintTyping && typingStack.beforeNextTyped(tree, mode, pt, context) try { @@ -5828,7 +5827,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper throw ex } finally { if (shouldPopTypingStack) typingStack.pop(tree) - if (statsEnabled) statistics.popTimer(byTypeStack, startByType) + if (settings.areHotStatisticsEnabled) statistics.popTimer(byTypeStack, startByType) } } diff --git a/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala b/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala index 6f92ef99d44c..8f373391bce2 100644 --- a/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala +++ b/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala @@ -16,7 +16,7 @@ package internal // todo implement in terms of BitSet import scala.collection.mutable -import util.{Statistics, StatisticsStatics} +import util.Statistics /** A base type sequence (BaseTypeSeq) is an ordered sequence spanning all the base types * of a type. It characterized by the following two laws: @@ -50,8 +50,8 @@ trait BaseTypeSeqs { */ class BaseTypeSeq protected[reflect] (private[BaseTypeSeqs] val parents: List[Type], private[BaseTypeSeqs] val elems: Array[Type]) { self => - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(baseTypeSeqCount) - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(baseTypeSeqLenTotal, elems.length) + if (settings.areStatisticsEnabled) statistics.incCounter(baseTypeSeqCount) + if (settings.areStatisticsEnabled) statistics.incCounter(baseTypeSeqLenTotal, elems.length) private[this] val typeSymbols = { val tmp = new Array[Int](elems.length) var i = 0 diff --git a/src/reflect/scala/reflect/internal/Scopes.scala b/src/reflect/scala/reflect/internal/Scopes.scala index cd8e6b49bbca..b75e00ecccc1 100644 --- a/src/reflect/scala/reflect/internal/Scopes.scala +++ b/src/reflect/scala/reflect/internal/Scopes.scala @@ -16,7 +16,7 @@ package internal import scala.annotation.tailrec import scala.collection.generic.Clearable -import scala.reflect.internal.util.{Statistics, StatisticsStatics} +import scala.reflect.internal.util.Statistics trait Scopes extends api.Scopes { self: SymbolTable => @@ -496,22 +496,22 @@ trait Scopes extends api.Scopes { self: SymbolTable => /** Create a new scope nested in another one with which it shares its elements */ final def newNestedScope(outer: Scope): Scope = { - val startTime = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.startTimer(statistics.scopePopulationTime) else null + val startTime = if (settings.areStatisticsEnabled) statistics.startTimer(statistics.scopePopulationTime) else null val nested = newScope // not `new Scope`, we must allow the runtime reflection universe to mixin SynchronizedScopes! nested.elems = outer.elems nested.nestinglevel = outer.nestinglevel + 1 if (outer.hashtable ne null) nested.hashtable = java.util.Arrays.copyOf(outer.hashtable, outer.hashtable.length) - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.stopTimer(statistics.scopePopulationTime, startTime) + if (settings.areStatisticsEnabled) statistics.stopTimer(statistics.scopePopulationTime, startTime) nested } /** Create a new scope with given initial elements */ def newScopeWith(elems: Symbol*): Scope = { - val startTime = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.startTimer(statistics.scopePopulationTime) else null + val startTime = if (settings.areStatisticsEnabled) statistics.startTimer(statistics.scopePopulationTime) else null val scope = newScope elems foreach scope.enter - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.stopTimer(statistics.scopePopulationTime, startTime) + if (settings.areStatisticsEnabled) statistics.stopTimer(statistics.scopePopulationTime, startTime) scope } diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala index a35eed37410a..d786b8ef61cb 100644 --- a/src/reflect/scala/reflect/internal/Symbols.scala +++ b/src/reflect/scala/reflect/internal/Symbols.scala @@ -21,7 +21,7 @@ package internal import scala.collection.immutable import scala.collection.mutable.ListBuffer -import util.{ Statistics, shortClassOfInstance, StatisticsStatics } +import util.{ Statistics, shortClassOfInstance } import Flags._ import scala.annotation.tailrec import scala.reflect.io.{AbstractFile, NoAbstractFile} @@ -3229,7 +3229,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => * info for T in Test1 should be >: Nothing <: Test3[_] */ - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(typeSymbolCount) + if (settings.areStatisticsEnabled) statistics.incCounter(typeSymbolCount) } implicit val TypeSymbolTag = ClassTag[TypeSymbol](classOf[TypeSymbol]) @@ -3441,7 +3441,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => else super.toString ) - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(classSymbolCount) + if (settings.areStatisticsEnabled) statistics.incCounter(classSymbolCount) } implicit val ClassSymbolTag = ClassTag[ClassSymbol](classOf[ClassSymbol]) diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala index 7c0141c62dec..64d118208c58 100644 --- a/src/reflect/scala/reflect/internal/Types.scala +++ b/src/reflect/scala/reflect/internal/Types.scala @@ -22,7 +22,7 @@ import mutable.{ListBuffer, LinkedHashSet} import Flags._ import scala.util.control.ControlThrowable import scala.annotation.tailrec -import util.{Statistics, StatisticsStatics} +import util.Statistics import util.ThreeValues._ import Variance._ import Depth._ @@ -691,7 +691,7 @@ trait Types * = Int */ def asSeenFrom(pre: Type, clazz: Symbol): Type = { - val start = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.pushTimer(typeOpsStack, asSeenFromNanos) else null + val start = if (settings.areStatisticsEnabled) statistics.pushTimer(typeOpsStack, asSeenFromNanos) else null try { val trivial = ( this.isTrivial @@ -707,7 +707,7 @@ trait Types if (m.capturedSkolems.isEmpty) tp1 else deriveType(m.capturedSkolems, _.cloneSymbol setFlag CAPTURED)(tp1) } - } finally if (StatisticsStatics.areSomeColdStatsEnabled) statistics.popTimer(typeOpsStack, start) + } finally if (settings.areStatisticsEnabled) statistics.popTimer(typeOpsStack, start) } /** The info of `sym`, seen as a member of this type. @@ -813,7 +813,7 @@ trait Types /** Is this type a subtype of that type? */ def <:<(that: Type): Boolean = { - if (StatisticsStatics.areSomeColdStatsEnabled) stat_<:<(that) + if (settings.areStatisticsEnabled) stat_<:<(that) else { (this eq that) || (if (explainSwitch) explain("<:", isSubType(_: Type, _: Type), this, that) @@ -845,26 +845,26 @@ trait Types }) def stat_<:<(that: Type): Boolean = { - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(subtypeCount) - val start = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.pushTimer(typeOpsStack, subtypeNanos) else null + if (settings.areStatisticsEnabled) statistics.incCounter(subtypeCount) + val start = if (settings.areStatisticsEnabled) statistics.pushTimer(typeOpsStack, subtypeNanos) else null val result = (this eq that) || (if (explainSwitch) explain("<:", isSubType(_: Type, _: Type), this, that) else isSubType(this, that)) - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.popTimer(typeOpsStack, start) + if (settings.areStatisticsEnabled) statistics.popTimer(typeOpsStack, start) result } /** Is this type a weak subtype of that type? True also for numeric types, i.e. Int weak_<:< Long. */ def weak_<:<(that: Type): Boolean = { - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(subtypeCount) - val start = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.pushTimer(typeOpsStack, subtypeNanos) else null + if (settings.areStatisticsEnabled) statistics.incCounter(subtypeCount) + val start = if (settings.areStatisticsEnabled) statistics.pushTimer(typeOpsStack, subtypeNanos) else null val result = ((this eq that) || (if (explainSwitch) explain("weak_<:", isWeakSubType, this, that) else isWeakSubType(this, that))) - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.popTimer(typeOpsStack, start) + if (settings.areStatisticsEnabled) statistics.popTimer(typeOpsStack, start) result } @@ -1517,8 +1517,8 @@ trait Types tpe.baseTypeSeqCache = tpWithoutTypeVars.baseTypeSeq lateMap paramToVar } else { - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(compoundBaseTypeSeqCount) - val start = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.pushTimer(typeOpsStack, baseTypeSeqNanos) else null + if (settings.areStatisticsEnabled) statistics.incCounter(compoundBaseTypeSeqCount) + val start = if (settings.areStatisticsEnabled) statistics.pushTimer(typeOpsStack, baseTypeSeqNanos) else null try { tpe.baseTypeSeqCache = undetBaseTypeSeq tpe.baseTypeSeqCache = @@ -1527,7 +1527,7 @@ trait Types else compoundBaseTypeSeq(tpe) } finally { - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.popTimer(typeOpsStack, start) + if (settings.areStatisticsEnabled) statistics.popTimer(typeOpsStack, start) } // [Martin] suppressing memoization solves the problem with "same type after erasure" errors // when compiling with @@ -1550,13 +1550,13 @@ trait Types if (period != currentPeriod) { tpe.baseClassesPeriod = currentPeriod if (!isValidForBaseClasses(period)) { - val start = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.pushTimer(typeOpsStack, baseClassesNanos) else null + val start = if (settings.areStatisticsEnabled) statistics.pushTimer(typeOpsStack, baseClassesNanos) else null try { tpe.baseClassesCache = null tpe.baseClassesCache = tpe.memo(computeBaseClasses(tpe))(tpe.typeSymbol :: _.baseClasses.tail) } finally { - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.popTimer(typeOpsStack, start) + if (settings.areStatisticsEnabled) statistics.popTimer(typeOpsStack, start) } } } @@ -2449,13 +2449,13 @@ trait Types if (period != currentPeriod) { tpe.baseTypeSeqPeriod = currentPeriod if (!isValidForBaseClasses(period)) { - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(typerefBaseTypeSeqCount) - val start = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.pushTimer(typeOpsStack, baseTypeSeqNanos) else null + if (settings.areStatisticsEnabled) statistics.incCounter(typerefBaseTypeSeqCount) + val start = if (settings.areStatisticsEnabled) statistics.pushTimer(typeOpsStack, baseTypeSeqNanos) else null try { tpe.baseTypeSeqCache = undetBaseTypeSeq tpe.baseTypeSeqCache = tpe.baseTypeSeqImpl } finally { - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.popTimer(typeOpsStack, start) + if (settings.areStatisticsEnabled) statistics.popTimer(typeOpsStack, start) } } } diff --git a/src/reflect/scala/reflect/internal/settings/MutableSettings.scala b/src/reflect/scala/reflect/internal/settings/MutableSettings.scala index 1f93b6693d5a..eab662c751b9 100644 --- a/src/reflect/scala/reflect/internal/settings/MutableSettings.scala +++ b/src/reflect/scala/reflect/internal/settings/MutableSettings.scala @@ -86,8 +86,9 @@ object MutableSettings { @inline implicit def reflectSettingToBoolean(s: MutableSettings#BooleanSetting): Boolean = s.value implicit class SettingsOps(private val settings: MutableSettings) extends AnyVal { - @inline final def areStatisticsEnabled = StatisticsStatics.areSomeColdStatsEnabled && settings.YstatisticsEnabled - @inline final def isDebug: Boolean = StatisticsStatics.isDebug && settings.debug - @inline final def isDeveloper: Boolean = StatisticsStatics.isDeveloper && settings.developer + @inline final def areStatisticsEnabled = (StatisticsStatics.COLD_STATS_GETTER.invokeExact(): Boolean) && settings.YstatisticsEnabled + @inline final def areHotStatisticsEnabled = (StatisticsStatics.HOT_STATS_GETTER.invokeExact(): Boolean) && settings.YhotStatisticsEnabled + @inline final def isDebug: Boolean = (StatisticsStatics.DEBUG_GETTER.invokeExact(): Boolean) && settings.debug + @inline final def isDeveloper: Boolean = (StatisticsStatics.DEVELOPER_GETTER.invokeExact(): Boolean) && settings.developer } } diff --git a/src/reflect/scala/reflect/internal/tpe/FindMembers.scala b/src/reflect/scala/reflect/internal/tpe/FindMembers.scala index ba90a0ba3624..1a58aed24bc0 100644 --- a/src/reflect/scala/reflect/internal/tpe/FindMembers.scala +++ b/src/reflect/scala/reflect/internal/tpe/FindMembers.scala @@ -13,7 +13,7 @@ package scala.reflect.internal package tpe -import util.{ReusableInstance, StatisticsStatics} +import util.ReusableInstance import Flags._ trait FindMembers { @@ -50,10 +50,10 @@ trait FindMembers { // Main entry point def apply(): T = { - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(findMemberCount) - val start = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.pushTimer(typeOpsStack, findMemberNanos) else null + if (settings.areStatisticsEnabled) statistics.incCounter(findMemberCount) + val start = if (settings.areStatisticsEnabled) statistics.pushTimer(typeOpsStack, findMemberNanos) else null try searchConcreteThenDeferred - finally if (StatisticsStatics.areSomeColdStatsEnabled) statistics.popTimer(typeOpsStack, start) + finally if (settings.areStatisticsEnabled) statistics.popTimer(typeOpsStack, start) } protected def result: T @@ -315,11 +315,11 @@ trait FindMembers { // Assemble the result from the hand-rolled ListBuffer protected def result: Symbol = if (members eq null) { if (member0 == NoSymbol) { - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(noMemberCount) + if (settings.areStatisticsEnabled) statistics.incCounter(noMemberCount) NoSymbol } else member0 } else { - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(multMemberCount) + if (settings.areStatisticsEnabled) statistics.incCounter(multMemberCount) lastM.tl = Nil initBaseClasses.head.newOverloaded(tpe, members) } diff --git a/src/reflect/scala/reflect/internal/tpe/GlbLubs.scala b/src/reflect/scala/reflect/internal/tpe/GlbLubs.scala index 9a619f6c5f61..8ae68044ef76 100644 --- a/src/reflect/scala/reflect/internal/tpe/GlbLubs.scala +++ b/src/reflect/scala/reflect/internal/tpe/GlbLubs.scala @@ -17,7 +17,6 @@ package tpe import scala.collection.mutable import scala.annotation.tailrec -import scala.reflect.internal.util.StatisticsStatics import Variance._ private[internal] trait GlbLubs { @@ -268,8 +267,8 @@ private[internal] trait GlbLubs { case Nil => NothingTpe case t :: Nil => t case _ => - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(lubCount) - val start = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.pushTimer(typeOpsStack, lubNanos) else null + if (settings.areStatisticsEnabled) statistics.incCounter(lubCount) + val start = if (settings.areStatisticsEnabled) statistics.pushTimer(typeOpsStack, lubNanos) else null try { val res = lub(ts, lubDepth(ts)) // If the number of unapplied type parameters in all incoming @@ -287,7 +286,7 @@ private[internal] trait GlbLubs { finally { lubResults.clear() glbResults.clear() - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.popTimer(typeOpsStack, start) + if (settings.areStatisticsEnabled) statistics.popTimer(typeOpsStack, start) } } @@ -410,7 +409,7 @@ private[internal] trait GlbLubs { indent = indent + " " assert(indent.length <= 100) } - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(nestedLubCount) + if (settings.areStatisticsEnabled) statistics.incCounter(nestedLubCount) val res = lub0(ts) if (printLubs) { indent = indent stripSuffix " " @@ -435,14 +434,14 @@ private[internal] trait GlbLubs { case List() => AnyTpe case List(t) => t case ts0 => - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(lubCount) - val start = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.pushTimer(typeOpsStack, lubNanos) else null + if (settings.areStatisticsEnabled) statistics.incCounter(lubCount) + val start = if (settings.areStatisticsEnabled) statistics.pushTimer(typeOpsStack, lubNanos) else null try { glbNorm(ts0, lubDepth(ts0)) } finally { lubResults.clear() glbResults.clear() - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.popTimer(typeOpsStack, start) + if (settings.areStatisticsEnabled) statistics.popTimer(typeOpsStack, start) } } @@ -567,7 +566,7 @@ private[internal] trait GlbLubs { } } // if (settings.debug.value) { println(indent + "glb of " + ts + " at depth "+depth); indent = indent + " " } //DEBUG - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(nestedLubCount) + if (settings.areStatisticsEnabled) statistics.incCounter(nestedLubCount) glb0(ts) // if (settings.debug.value) { indent = indent.substring(0, indent.length() - 2); log(indent + "glb of " + ts + " is " + res) }//DEBUG } diff --git a/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala b/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala index b8025fe16d99..abc8ebe9ad9d 100644 --- a/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala +++ b/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala @@ -18,7 +18,6 @@ package tpe import scala.collection.{ mutable } import util.TriState import scala.annotation.tailrec -import scala.reflect.internal.util.StatisticsStatics trait TypeComparers { self: SymbolTable => @@ -104,7 +103,7 @@ trait TypeComparers { /** Do `tp1` and `tp2` denote equivalent types? */ def isSameType(tp1: Type, tp2: Type): Boolean = try { - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(sametypeCount) + if (settings.areStatisticsEnabled) statistics.incCounter(sametypeCount) subsametypeRecursions += 1 //OPT cutdown on Function0 allocation //was: diff --git a/src/reflect/scala/reflect/internal/util/StatisticsStatics.java b/src/reflect/scala/reflect/internal/util/StatisticsStatics.java index 1143a64268a7..76c1644e18bf 100644 --- a/src/reflect/scala/reflect/internal/util/StatisticsStatics.java +++ b/src/reflect/scala/reflect/internal/util/StatisticsStatics.java @@ -26,15 +26,10 @@ public final class StatisticsStatics { private static final AlmostFinalValue DEBUG = new AlmostFinalValue(); private static final AlmostFinalValue DEVELOPER = new AlmostFinalValue(); - private static final MethodHandle COLD_STATS_GETTER = COLD_STATS.invoker; - private static final MethodHandle HOT_STATS_GETTER = HOT_STATS.invoker; - private static final MethodHandle DEBUG_GETTER = DEBUG.invoker; - private static final MethodHandle DEVELOPER_GETTER = DEVELOPER.invoker; - - public static boolean areSomeColdStatsEnabled() throws Throwable { return (boolean) COLD_STATS_GETTER.invokeExact(); } - public static boolean areSomeHotStatsEnabled() throws Throwable { return (boolean) HOT_STATS_GETTER.invokeExact(); } - public static boolean isDebug() throws Throwable { return (boolean) DEBUG_GETTER.invokeExact(); } - public static boolean isDeveloper() throws Throwable { return (boolean) DEVELOPER_GETTER.invokeExact(); } + public static final MethodHandle COLD_STATS_GETTER = COLD_STATS.invoker; + public static final MethodHandle HOT_STATS_GETTER = HOT_STATS.invoker; + public static final MethodHandle DEBUG_GETTER = DEBUG.invoker; + public static final MethodHandle DEVELOPER_GETTER = DEVELOPER.invoker; public static void enableColdStatsAndDeoptimize() { COLD_STATS.toggleOnAndDeoptimize(); } public static void enableHotStatsAndDeoptimize() { HOT_STATS.toggleOnAndDeoptimize(); } diff --git a/src/reflect/scala/reflect/io/AbstractFile.scala b/src/reflect/scala/reflect/io/AbstractFile.scala index 243b3c4e7daf..3abeaa1159df 100644 --- a/src/reflect/scala/reflect/io/AbstractFile.scala +++ b/src/reflect/scala/reflect/io/AbstractFile.scala @@ -121,7 +121,7 @@ abstract class AbstractFile extends Iterable[AbstractFile] { /** Does this abstract file denote an existing file? */ def exists: Boolean = { - //if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(IOStats.fileExistsCount) + //if (settings.areStatisticsEnabled) statistics.incCounter(IOStats.fileExistsCount) (file eq null) || file.exists } diff --git a/src/reflect/scala/reflect/io/Path.scala b/src/reflect/scala/reflect/io/Path.scala index ea4f4d4a8531..c8558c789413 100644 --- a/src/reflect/scala/reflect/io/Path.scala +++ b/src/reflect/scala/reflect/io/Path.scala @@ -60,12 +60,12 @@ object Path { def apply(path: String): Path = apply(new JFile(path)) def apply(jfile: JFile): Path = try { def isFile = { - //if (StatisticsStatics.areSomeColdStatsEnabled) Statistics.incCounter(IOStats.fileIsFileCount) + //if (settings.areStatisticsEnabled) Statistics.incCounter(IOStats.fileIsFileCount) jfile.isFile } def isDirectory = { - //if (StatisticsStatics.areSomeColdStatsEnabled) Statistics.incCounter(IOStats.fileIsDirectoryCount) + //if (settings.areStatisticsEnabled) Statistics.incCounter(IOStats.fileIsDirectoryCount) jfile.isDirectory } @@ -201,16 +201,16 @@ class Path private[io] (val jfile: JFile) { def canRead = jfile.canRead() def canWrite = jfile.canWrite() def exists = { - //if (StatisticsStatics.areSomeColdStatsEnabled) Statistics.incCounter(IOStats.fileExistsCount) + //if (settings.areStatisticsEnabled) Statistics.incCounter(IOStats.fileExistsCount) try jfile.exists() catch { case ex: SecurityException => false } } def isFile = { - //if (StatisticsStatics.areSomeColdStatsEnabled) Statistics.incCounter(IOStats.fileIsFileCount) + //if (settings.areStatisticsEnabled) Statistics.incCounter(IOStats.fileIsFileCount) try jfile.isFile() catch { case ex: SecurityException => false } } def isDirectory = { - //if (StatisticsStatics.areSomeColdStatsEnabled) Statistics.incCounter(IOStats.fileIsDirectoryCount) + //if (settings.areStatisticsEnabled) Statistics.incCounter(IOStats.fileIsDirectoryCount) try jfile.isDirectory() catch { case ex: SecurityException => jfile.getPath == "." } } def isAbsolute = jfile.isAbsolute() From 1185f84584f64f3b2b0482cbd7dd266079cebfd2 Mon Sep 17 00:00:00 2001 From: Kai Date: Thu, 6 May 2021 12:36:54 +0100 Subject: [PATCH 153/769] Differentiate Scala 2 and Scala 3 wildcard identifier names This change names wildcards written with Scala 3 `?` syntax with `?$N` pattern instead of `_$N` used for Scala 2 wildcards There are two reasons for it: - To allow `kind-projector` to implement Scala 3 underscore syntax for type lambdas by transforming old-style underscores, but leaving Scala 3 underscores intact - To show a mildly more relevant error message, since a wildcard introduced by `?` will now also have a name with `?` in the error message --- .../scala/tools/nsc/ast/parser/Parsers.scala | 18 ++++++++++-------- test/files/neg/wildcards-future.check | 11 +++++++++++ test/files/neg/wildcards-future.scala | 11 +++++++++++ 3 files changed, 32 insertions(+), 8 deletions(-) create mode 100644 test/files/neg/wildcards-future.check create mode 100644 test/files/neg/wildcards-future.scala diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala index 42767df41f78..d410d7989af4 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala @@ -723,9 +723,8 @@ self => def isRawBar = isRawIdent && in.name == raw.BAR def isRawIdent = in.token == IDENTIFIER - def isWildcardType = - in.token == USCORE || - settings.isScala3 && isRawIdent && in.name == raw.QMARK + def isWildcardType = in.token == USCORE || isScala3WildcardType + def isScala3WildcardType = settings.isScala3 && isRawIdent && in.name == raw.QMARK def isIdent = in.token == IDENTIFIER || in.token == BACKQUOTED_IDENT def isMacro = in.token == IDENTIFIER && in.name == nme.MACROkw @@ -1140,8 +1139,10 @@ self => else atPos(start)(makeSafeTupleType(inParens(types()))) case _ => - if (isWildcardType) - wildcardType(in.skipToken()) + if (isWildcardType) { + val scala3Wildcard = isScala3WildcardType + wildcardType(in.skipToken(), scala3Wildcard) + } else path(thisOK = false, typeOK = true) match { case r @ SingletonTypeTree(_) => r @@ -1540,8 +1541,8 @@ self => * WildcardType ::= `_` TypeBounds * }}} */ - def wildcardType(start: Offset) = { - val pname = freshTypeName("_$") + def wildcardType(start: Offset, qmark: Boolean) = { + val pname = if (qmark) freshTypeName("?$") else freshTypeName("_$") val t = atPos(start)(Ident(pname)) val bounds = typeBounds() val param = atPos(t.pos union bounds.pos) { makeSyntheticTypeParam(pname, bounds) } @@ -2056,8 +2057,9 @@ self => final def argType(): Tree = { val start = in.offset if (isWildcardType) { + val scala3Wildcard = isScala3WildcardType in.nextToken() - if (in.token == SUBTYPE || in.token == SUPERTYPE) wildcardType(start) + if (in.token == SUBTYPE || in.token == SUPERTYPE) wildcardType(start, scala3Wildcard) else atPos(start) { Bind(tpnme.WILDCARD, EmptyTree) } } else typ() match { diff --git a/test/files/neg/wildcards-future.check b/test/files/neg/wildcards-future.check new file mode 100644 index 000000000000..0aedb6dd8b01 --- /dev/null +++ b/test/files/neg/wildcards-future.check @@ -0,0 +1,11 @@ +wildcards-future.scala:7: error: type mismatch; + found : Map[_$1,_$2] where type _$2 >: Null, type _$1 <: AnyRef + required: Map[String,String] + underscores : Map[String, String] // error wildcard variables starting with `_` + ^ +wildcards-future.scala:9: error: type mismatch; + found : Map[?$1,?$2] where type ?$2 >: Null, type ?$1 <: AnyRef + required: Map[String,String] + qmarks : Map[String, String] // error – wildcard variables should start with `?` to differentiate from the old syntax + ^ +2 errors diff --git a/test/files/neg/wildcards-future.scala b/test/files/neg/wildcards-future.scala new file mode 100644 index 000000000000..54b7675813e7 --- /dev/null +++ b/test/files/neg/wildcards-future.scala @@ -0,0 +1,11 @@ +// scalac: -Xsource:3 +// +object Test { + val underscores: Map[_ <: AnyRef, _ >: Null] = Map() + val qmarks: Map[? <: AnyRef, ? >: Null] = Map() + + underscores : Map[String, String] // error wildcard variables starting with `_` + + qmarks : Map[String, String] // error – wildcard variables should start with `?` to differentiate from the old syntax + // (and have a mildly more readable error...) +} From e4a11fa1b83e8e36544159ca442af5860e0c9cf7 Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Thu, 6 May 2021 15:37:02 +0200 Subject: [PATCH 154/769] No access boundary check between two protected Java members PR 9525 added access boundary checks when overriding a protected Java member. This check should only be done if the overriding member is defined in Scala, not if the (Scala) class inherits two members both defined in Java. --- src/compiler/scala/tools/nsc/typechecker/RefChecks.scala | 2 +- test/files/pos/t12349b/A.java | 7 +++++++ test/files/pos/t12349b/B.java | 7 +++++++ test/files/pos/t12349b/Test.scala | 1 + 4 files changed, 16 insertions(+), 1 deletion(-) create mode 100644 test/files/pos/t12349b/A.java create mode 100644 test/files/pos/t12349b/B.java create mode 100644 test/files/pos/t12349b/Test.scala diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala index 05ca87bb663c..08d24671876c 100644 --- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala +++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala @@ -383,7 +383,7 @@ abstract class RefChecks extends Transform { def isOverrideAccessOK = member.isPublic || { // member is public, definitely same or relaxed access (!other.isProtected || member.isProtected) && // if o is protected, so is m ((!isRootOrNone(ob) && ob.hasTransOwner(mb)) || // m relaxes o's access boundary - (other.isJavaDefined && other.isProtected)) // overriding a protected java member, see #3946 #12349 + (other.isJavaDefined && (member.isJavaDefined || other.isProtected))) // overriding a protected java member, see #3946 #12349 } if (!isOverrideAccessOK) { overrideAccessError() diff --git a/test/files/pos/t12349b/A.java b/test/files/pos/t12349b/A.java new file mode 100644 index 000000000000..aab1185d87ac --- /dev/null +++ b/test/files/pos/t12349b/A.java @@ -0,0 +1,7 @@ +package p; + +public class A { + public static class R { } + + /* package-protected */ R foo() { return null; } +} diff --git a/test/files/pos/t12349b/B.java b/test/files/pos/t12349b/B.java new file mode 100644 index 000000000000..735c91372a03 --- /dev/null +++ b/test/files/pos/t12349b/B.java @@ -0,0 +1,7 @@ +package q; + +public class B extends p.A { + public static class RR extends p.A.R { } + + /* package-protected */ RR foo() { return null; } +} diff --git a/test/files/pos/t12349b/Test.scala b/test/files/pos/t12349b/Test.scala new file mode 100644 index 000000000000..3f22fa033e08 --- /dev/null +++ b/test/files/pos/t12349b/Test.scala @@ -0,0 +1 @@ +class Test extends q.B From be57ce90a6630e0c49e43fda9a649cd132123e2e Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Thu, 6 May 2021 14:06:07 +0200 Subject: [PATCH 155/769] Perform override checks again for two Java-defined methods The shortcut that's removed here was added in PR 8643 but turned out to be incorrect. The bug fixed by that PR remains fixed, because the shortcut was only added as a "second layer", the underlying bug was fixed as well. Note that RefChecks only runs on Scala classes, so we only perform override checks of two Java-defined members if a Scala class inherits them. These checks are not optional, as shown by the test cases added here. --- .../tools/nsc/transform/OverridingPairs.scala | 3 --- test/files/neg/t12380.check | 8 ++++++++ test/files/neg/t12380/J.java | 14 ++++++++++++++ test/files/neg/t12380/Test.scala | 5 +++++ 4 files changed, 27 insertions(+), 3 deletions(-) create mode 100644 test/files/neg/t12380.check create mode 100644 test/files/neg/t12380/J.java create mode 100644 test/files/neg/t12380/Test.scala diff --git a/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala b/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala index 6387ddde49d7..181390ec3858 100644 --- a/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala +++ b/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala @@ -53,9 +53,6 @@ abstract class OverridingPairs extends SymbolPairs { && !exclude(low) // this admits private, as one can't have a private member that matches a less-private member. && (lowMemberType matches (self memberType high)) ) // TODO we don't call exclude(high), should we? - - override def skipOwnerPair(lowClass: Symbol, highClass: Symbol): Boolean = - lowClass.isJavaDefined && highClass.isJavaDefined // javac is already checking this better than we could } private def bothJavaOwnedAndEitherIsField(low: Symbol, high: Symbol): Boolean = { diff --git a/test/files/neg/t12380.check b/test/files/neg/t12380.check new file mode 100644 index 000000000000..4b9f7ae63a68 --- /dev/null +++ b/test/files/neg/t12380.check @@ -0,0 +1,8 @@ +Test.scala:1: error: incompatible type in overriding +def m(): String (defined in trait I) + with def m(): Object (defined in class C); + found : (): Object + required: (): String +object Test extends p.J.C with p.J.I { + ^ +1 error diff --git a/test/files/neg/t12380/J.java b/test/files/neg/t12380/J.java new file mode 100644 index 000000000000..280cea1286b1 --- /dev/null +++ b/test/files/neg/t12380/J.java @@ -0,0 +1,14 @@ +package p; + +public class J { + public static class C { + public Object m() { return new Object(); } + } + public interface I { + public String m(); + } + + public static class Test extends C implements I { + @Override public String m() { return ""; } + } +} diff --git a/test/files/neg/t12380/Test.scala b/test/files/neg/t12380/Test.scala new file mode 100644 index 000000000000..976b42ffdb93 --- /dev/null +++ b/test/files/neg/t12380/Test.scala @@ -0,0 +1,5 @@ +object Test extends p.J.C with p.J.I { + def main(args: Array[String]): Unit = { + println((this: p.J.I).m.trim) + } +} From 5ad3b0577253fb82e133cadb10c6ff5578f20451 Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Thu, 6 May 2021 14:09:07 +0200 Subject: [PATCH 156/769] Generate attributed trees when creating bridge methods The Select and This trees generated for bridge methods before only had a Symbol assigned, but no Type. This lead to an NPE in the attached test case. --- .../scala/tools/nsc/transform/Erasure.scala | 2 +- test/files/run/t12380/A.java | 28 +++++++++++++++++++ test/files/run/t12380/Test.scala | 7 +++++ 3 files changed, 36 insertions(+), 1 deletion(-) create mode 100644 test/files/run/t12380/A.java create mode 100644 test/files/run/t12380/Test.scala diff --git a/src/compiler/scala/tools/nsc/transform/Erasure.scala b/src/compiler/scala/tools/nsc/transform/Erasure.scala index c950d89fd258..751134fd6b07 100644 --- a/src/compiler/scala/tools/nsc/transform/Erasure.scala +++ b/src/compiler/scala/tools/nsc/transform/Erasure.scala @@ -652,7 +652,7 @@ abstract class Erasure extends InfoTransform val rhs = member.tpe match { case MethodType(Nil, FoldableConstantType(c)) => Literal(c) case _ => - val sel: Tree = Select(This(root), member) + val sel: Tree = gen.mkAttributedSelect(gen.mkAttributedThis(root), member) val bridgingCall = bridge.paramss.foldLeft(sel)((fun, vparams) => Apply(fun, vparams map Ident)) maybeWrap(bridgingCall) diff --git a/test/files/run/t12380/A.java b/test/files/run/t12380/A.java new file mode 100644 index 000000000000..1cdbd7e83bbf --- /dev/null +++ b/test/files/run/t12380/A.java @@ -0,0 +1,28 @@ +// filter: unchecked + +package p; + +public class A { + public static interface I { + public I w(); + } + + public static interface J> extends I { + @Override public R w(); + } + + public static interface K extends I { + @Override public K w(); + + public default String mK() { return "K"; } + } + + /* package-private */ static class B> implements J { + @Override public R w() { return (R) this; } + } + + public static class C> extends B implements J { } + + // OK in Java, also OK in Scala + public static class Test extends C implements K { } +} diff --git a/test/files/run/t12380/Test.scala b/test/files/run/t12380/Test.scala new file mode 100644 index 000000000000..abab74cde7da --- /dev/null +++ b/test/files/run/t12380/Test.scala @@ -0,0 +1,7 @@ +class Test extends p.A.C[Test] with p.A.K +object Test { + def main(args: Array[String]): Unit = { + assert((new Test).w.mK == "K") + assert((new p.A.Test).w.mK == "K") + } +} From b7caa901955c623793e8e78015075b00a629840c Mon Sep 17 00:00:00 2001 From: superseeker13 Date: Thu, 6 May 2021 10:58:58 -0400 Subject: [PATCH 157/769] Update template.js Replace deprecated :gt(0) with .slice(1) --- .../scala/tools/nsc/doc/html/resource/lib/template.js | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/template.js b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/template.js index d6935dd01ee5..e8b44e9b6744 100644 --- a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/template.js +++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/template.js @@ -61,7 +61,7 @@ $(document).ready(function() { return $(elem).attr("data-hidden") == 'true'; }; - $("#linearization li:gt(0)").filter(function(){ + $("#linearization li").slice(1).filter(function(){ return isHiddenClass($(this).attr("name")); }).removeClass("in").addClass("out"); @@ -440,7 +440,7 @@ function filter() { var orderingAlphabetic = $("#order > ol > li.alpha").hasClass("in"); var orderingInheritance = $("#order > ol > li.inherit").hasClass("in"); var orderingGroups = $("#order > ol > li.group").hasClass("in"); - var hiddenSuperclassElementsLinearization = orderingInheritance ? $("#linearization > li:gt(0)") : $("#linearization > li.out"); + var hiddenSuperclassElementsLinearization = orderingInheritance ? $("#linearization > li").slice(1) : $("#linearization > li.out"); var hiddenSuperclassesLinearization = hiddenSuperclassElementsLinearization.map(function() { return $(this).attr("name"); }).get(); From 946b129615752a2786ffeb628b3bc0b4ca13ec07 Mon Sep 17 00:00:00 2001 From: Kai Date: Fri, 7 May 2021 00:52:53 +0100 Subject: [PATCH 158/769] Fix a bug with +_/-_ not parsing in type constructor position --- .../scala/tools/nsc/ast/parser/Parsers.scala | 14 +++++++------- test/files/pos/variant-placeholders-future.scala | 6 ++++++ 2 files changed, 13 insertions(+), 7 deletions(-) diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala index d4f3f20c0df2..4013b9dd2a2f 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala @@ -1127,12 +1127,6 @@ self => val start = in.offset in.nextToken() atPos(start)(SingletonTypeTree(literal(isNegated = true, start = start))) - } else if ((in.name == raw.PLUS || in.name == raw.MINUS) && lookingAhead(in.token == USCORE)) { - val start = in.offset - val identName = in.name.encode.append("_").toTypeName - in.nextToken() - in.nextToken() - atPos(start)(Ident(identName)) } else { val start = in.offset simpleTypeRest(in.token match { @@ -1146,7 +1140,13 @@ self => else atPos(start)(makeSafeTupleType(inParens(types()))) case _ => - if (isWildcardType) + if ((in.name == raw.PLUS || in.name == raw.MINUS) && lookingAhead(in.token == USCORE)) { + val start = in.offset + val identName = in.name.encode.append("_").toTypeName + in.nextToken() + in.nextToken() + atPos(start)(Ident(identName)) + } else if (isWildcardType) wildcardType(in.skipToken()) else path(thisOK = false, typeOK = true) match { diff --git a/test/files/pos/variant-placeholders-future.scala b/test/files/pos/variant-placeholders-future.scala index cb2cf4c4cb20..49a80bf9bcf9 100644 --- a/test/files/pos/variant-placeholders-future.scala +++ b/test/files/pos/variant-placeholders-future.scala @@ -24,4 +24,10 @@ object Test { val optErr: - _ = opt.get val opt2: Int = opt1 } + + locally { + type `-_`[A] = A + type `+_`[A] = Option[A] + val optOpt: Option[ + _ [+_[-_[Int]]]] = Some(Some(Some(1))) + } } From 80aa2e8f943cc00dd4a92b32554d1079cc55a2c8 Mon Sep 17 00:00:00 2001 From: Ikko Ashimine Date: Sat, 8 May 2021 11:05:20 +0900 Subject: [PATCH 159/769] Fix typo in Logic.scala instantiatable -> instantiable --- src/compiler/scala/tools/nsc/transform/patmat/Logic.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala b/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala index d88f1505b7b6..551a54f9cf02 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala @@ -395,7 +395,7 @@ trait Logic extends Debugging { // according to subtyping, e.g., V = ConstantType(1) and V = Int are valid assignments // we rewrite V = C to a fresh boolean symbol, and model what we know about the variable's domain // in a prelude (the equality axioms) - // 1. a variable with a closed domain (of a sealed type) must be assigned one of the instantiatable types in its domain + // 1. a variable with a closed domain (of a sealed type) must be assigned one of the instantiable types in its domain // 2. for each variable V in props, and each constant C it is compared to, // compute which assignments imply each other (as in the example above: V = 1 implies V = Int) // and which assignments are mutually exclusive (V = String implies -(V = Int)) From 196c65648578d27339d143fd26c9f1fac6fcf8d0 Mon Sep 17 00:00:00 2001 From: Kai Date: Sat, 8 May 2021 23:32:42 +0100 Subject: [PATCH 160/769] Gate `-_`/`+_` parsing behind `-Xsource:3` to guarantee minimal disruption to existing code --- src/compiler/scala/tools/nsc/ast/parser/Parsers.scala | 2 +- test/files/neg/variant-placeholders-future.check | 4 ++-- test/files/neg/variant-placeholders-future.scala | 2 ++ test/files/neg/variant-placeholders-nofuture.check | 7 +++++++ test/files/neg/variant-placeholders-nofuture.scala | 8 ++++++++ test/files/pos/variant-placeholders-future.scala | 2 ++ 6 files changed, 22 insertions(+), 3 deletions(-) create mode 100644 test/files/neg/variant-placeholders-nofuture.check create mode 100644 test/files/neg/variant-placeholders-nofuture.scala diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala index 4013b9dd2a2f..97a261c72798 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala @@ -1140,7 +1140,7 @@ self => else atPos(start)(makeSafeTupleType(inParens(types()))) case _ => - if ((in.name == raw.PLUS || in.name == raw.MINUS) && lookingAhead(in.token == USCORE)) { + if (settings.isScala3 && (in.name == raw.PLUS || in.name == raw.MINUS) && lookingAhead(in.token == USCORE)) { val start = in.offset val identName = in.name.encode.append("_").toTypeName in.nextToken() diff --git a/test/files/neg/variant-placeholders-future.check b/test/files/neg/variant-placeholders-future.check index 1ae4080af390..d166e8d577a9 100644 --- a/test/files/neg/variant-placeholders-future.check +++ b/test/files/neg/variant-placeholders-future.check @@ -1,7 +1,7 @@ -variant-placeholders-future.scala:2: error: `=`, `>:`, or `<:` expected +variant-placeholders-future.scala:4: error: `=`, `>:`, or `<:` expected type -_ = Int // error -_ not allowed as a type def name without backticks ^ -variant-placeholders-future.scala:3: error: `=`, `>:`, or `<:` expected +variant-placeholders-future.scala:5: error: `=`, `>:`, or `<:` expected type +_ = Int // error +_ not allowed as a type def name without backticks ^ 2 errors diff --git a/test/files/neg/variant-placeholders-future.scala b/test/files/neg/variant-placeholders-future.scala index 973fb1f3b74d..75296ff945b4 100644 --- a/test/files/neg/variant-placeholders-future.scala +++ b/test/files/neg/variant-placeholders-future.scala @@ -1,3 +1,5 @@ +// scalac: -Xsource:3 +// object Test { type -_ = Int // error -_ not allowed as a type def name without backticks type +_ = Int // error +_ not allowed as a type def name without backticks diff --git a/test/files/neg/variant-placeholders-nofuture.check b/test/files/neg/variant-placeholders-nofuture.check new file mode 100644 index 000000000000..8cf591d0a32f --- /dev/null +++ b/test/files/neg/variant-placeholders-nofuture.check @@ -0,0 +1,7 @@ +variant-placeholders-nofuture.scala:5: error: ';' expected but '_' found. + val fnMinusPlus1: -_ => +_ = (_: Int).toLong // error -_/+_ won't parse without -Xsource:3 + ^ +variant-placeholders-nofuture.scala:6: error: ')' expected but '_' found. + val fnMinusPlus2: (-_) => +_ = fnMinusPlus1 // error -_/+_ won't parse without -Xsource:3 + ^ +2 errors diff --git a/test/files/neg/variant-placeholders-nofuture.scala b/test/files/neg/variant-placeholders-nofuture.scala new file mode 100644 index 000000000000..5f638f68a84a --- /dev/null +++ b/test/files/neg/variant-placeholders-nofuture.scala @@ -0,0 +1,8 @@ +object Test { + type `-_` = Int + type `+_` = Long + + val fnMinusPlus1: -_ => +_ = (_: Int).toLong // error -_/+_ won't parse without -Xsource:3 + val fnMinusPlus2: (-_) => +_ = fnMinusPlus1 // error -_/+_ won't parse without -Xsource:3 + val fnMinusPlus3: -_ => (+_) = fnMinusPlus2 // error -_/+_ won't parse without -Xsource:3 +} diff --git a/test/files/pos/variant-placeholders-future.scala b/test/files/pos/variant-placeholders-future.scala index 49a80bf9bcf9..383db8420f85 100644 --- a/test/files/pos/variant-placeholders-future.scala +++ b/test/files/pos/variant-placeholders-future.scala @@ -1,3 +1,5 @@ +// scalac: -Xsource:3 +// object Test { type `-_` = Int type `+_` = Long From c60318f8247aa60a22fe795eea9c0d81d305316d Mon Sep 17 00:00:00 2001 From: Kai Date: Mon, 3 May 2021 23:20:30 +0100 Subject: [PATCH 161/769] Parse `+_` and `-_` in types as identifiers to support Scala 3.2 placeholder syntax This change allows `kind-projector` plugin to rewrite `+_` and `-_` tokens to type lambdas, in line with proposed syntax for Scala 3.2 in http://dotty.epfl.ch/docs/reference/changed-features/wildcards.html When used in conjunction with `-Xsource:3` this will let the user use `?` for wildcards and `_` for placeholders, letting the user cross-compile the same sources with Scala 3 with `-source:3.2` flag. This change is not source breaking since currently `+_` and `-_` fail to parse entirely, this change also does not allow the user to declare types with these names without backticks, they can only be used as part of a type tree. Gate `-_`/`+_` parsing behind `-Xsource:3` to guarantee minimal disruption to existing code --- .../scala/tools/nsc/ast/parser/Parsers.scala | 9 ++++- .../neg/variant-placeholders-future.check | 7 ++++ .../neg/variant-placeholders-future.scala | 6 ++++ .../neg/variant-placeholders-nofuture.check | 7 ++++ .../neg/variant-placeholders-nofuture.scala | 8 +++++ .../pos/variant-placeholders-future.scala | 35 +++++++++++++++++++ 6 files changed, 71 insertions(+), 1 deletion(-) create mode 100644 test/files/neg/variant-placeholders-future.check create mode 100644 test/files/neg/variant-placeholders-future.scala create mode 100644 test/files/neg/variant-placeholders-nofuture.check create mode 100644 test/files/neg/variant-placeholders-nofuture.scala create mode 100644 test/files/pos/variant-placeholders-future.scala diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala index 7df4b3a5b0b0..a724652a1aa1 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala @@ -1077,7 +1077,14 @@ self => simpleTypeRest(in.token match { case LPAREN => atPos(start)(makeSafeTupleType(inParens(types()), start)) case _ => - if (isWildcardType) + if (currentRun.isScala3 && (in.name == raw.PLUS || in.name == raw.MINUS) && lookingAhead(in.token == USCORE)) { + val start = in.offset + val identName = in.name.encode.append("_").toTypeName + in.nextToken() + in.nextToken() + atPos(start)(Ident(identName)) + } + else if (isWildcardType) wildcardType(in.skipToken()) else path(thisOK = false, typeOK = true) match { diff --git a/test/files/neg/variant-placeholders-future.check b/test/files/neg/variant-placeholders-future.check new file mode 100644 index 000000000000..e3361c5560a7 --- /dev/null +++ b/test/files/neg/variant-placeholders-future.check @@ -0,0 +1,7 @@ +variant-placeholders-future.scala:4: error: `=', `>:', or `<:' expected + type -_ = Int // error -_ not allowed as a type def name without backticks + ^ +variant-placeholders-future.scala:5: error: `=', `>:', or `<:' expected + type +_ = Int // error +_ not allowed as a type def name without backticks + ^ +two errors found diff --git a/test/files/neg/variant-placeholders-future.scala b/test/files/neg/variant-placeholders-future.scala new file mode 100644 index 000000000000..75296ff945b4 --- /dev/null +++ b/test/files/neg/variant-placeholders-future.scala @@ -0,0 +1,6 @@ +// scalac: -Xsource:3 +// +object Test { + type -_ = Int // error -_ not allowed as a type def name without backticks + type +_ = Int // error +_ not allowed as a type def name without backticks +} diff --git a/test/files/neg/variant-placeholders-nofuture.check b/test/files/neg/variant-placeholders-nofuture.check new file mode 100644 index 000000000000..b4148154918a --- /dev/null +++ b/test/files/neg/variant-placeholders-nofuture.check @@ -0,0 +1,7 @@ +variant-placeholders-nofuture.scala:5: error: ';' expected but '_' found. + val fnMinusPlus1: -_ => +_ = (_: Int).toLong // error -_/+_ won't parse without -Xsource:3 + ^ +variant-placeholders-nofuture.scala:6: error: ')' expected but '_' found. + val fnMinusPlus2: (-_) => +_ = fnMinusPlus1 // error -_/+_ won't parse without -Xsource:3 + ^ +two errors found diff --git a/test/files/neg/variant-placeholders-nofuture.scala b/test/files/neg/variant-placeholders-nofuture.scala new file mode 100644 index 000000000000..5f638f68a84a --- /dev/null +++ b/test/files/neg/variant-placeholders-nofuture.scala @@ -0,0 +1,8 @@ +object Test { + type `-_` = Int + type `+_` = Long + + val fnMinusPlus1: -_ => +_ = (_: Int).toLong // error -_/+_ won't parse without -Xsource:3 + val fnMinusPlus2: (-_) => +_ = fnMinusPlus1 // error -_/+_ won't parse without -Xsource:3 + val fnMinusPlus3: -_ => (+_) = fnMinusPlus2 // error -_/+_ won't parse without -Xsource:3 +} diff --git a/test/files/pos/variant-placeholders-future.scala b/test/files/pos/variant-placeholders-future.scala new file mode 100644 index 000000000000..383db8420f85 --- /dev/null +++ b/test/files/pos/variant-placeholders-future.scala @@ -0,0 +1,35 @@ +// scalac: -Xsource:3 +// +object Test { + type `-_` = Int + type `+_` = Long + + val fnMinusPlus1: -_ => +_ = (_: Int).toLong + val fnMinusPlus2: (-_) => +_ = fnMinusPlus1 + val fnMinusPlus3: -_ => (+_) = fnMinusPlus2 + + val fnTupMinusPlus2: (=> -_, -_) => +_ = (a, b) => ((a: Int) + (b: Int)).toLong + def defMinusPlus2(byname: => -_, vararg: -_*): +_ = ((vararg.sum: Int) + (byname: -_)).toLong + val infixMinusPlus2: -_ Either +_ = Right[-_, +_](1L) + + val optPlus: Option[+_] = Some[ + _ ](1L) // spaces allowed + optPlus match { + case opt: Option[ + _ ] => + val opt1: + _ = opt.get + val opt2: Long = opt1 + } + + val optMinus: Option[-_] = Some[ - _ ](1) // spaces allowed + optMinus match { + case opt: Option[ - _ ] => + val opt1: `-_` = opt.get + val optErr: - _ = opt.get + val opt2: Int = opt1 + } + + locally { + type `-_`[A] = A + type `+_`[A] = Option[A] + val optOpt: Option[ + _ [+_[-_[Int]]]] = Some(Some(Some(1))) + } +} From 27afdd318cc18df909ff8d70a0e1ed2b5ba97452 Mon Sep 17 00:00:00 2001 From: Eugene Yokota Date: Sun, 9 May 2021 01:10:03 -0400 Subject: [PATCH 162/769] Fixes HashMap error message Fixes scala bug 12391 Scala 2.13 throws NoSuchElementException without any messages. It's expected to throw "key not found: x" This fixes it. --- src/library/scala/collection/immutable/HashMap.scala | 4 ++-- .../scala/collection/immutable/HashMapTest.scala | 12 ++++++++++++ 2 files changed, 14 insertions(+), 2 deletions(-) diff --git a/src/library/scala/collection/immutable/HashMap.scala b/src/library/scala/collection/immutable/HashMap.scala index 4ba5208aad5a..b37e1a0646c2 100644 --- a/src/library/scala/collection/immutable/HashMap.scala +++ b/src/library/scala/collection/immutable/HashMap.scala @@ -628,11 +628,11 @@ private final class BitmapIndexedMapNode[K, +V]( if ((dataMap & bitpos) != 0) { val index = indexFrom(dataMap, mask, bitpos) - if (key == getKey(index)) getValue(index) else throw new NoSuchElementException + if (key == getKey(index)) getValue(index) else throw new NoSuchElementException("key not found: " + key) } else if ((nodeMap & bitpos) != 0) { getNode(indexFrom(nodeMap, mask, bitpos)).apply(key, originalHash, keyHash, shift + BitPartitionSize) } else { - throw new NoSuchElementException + throw new NoSuchElementException("key not found: " + key) } } diff --git a/test/junit/scala/collection/immutable/HashMapTest.scala b/test/junit/scala/collection/immutable/HashMapTest.scala index d9f1bf53d27a..703a9f1f1f05 100644 --- a/test/junit/scala/collection/immutable/HashMapTest.scala +++ b/test/junit/scala/collection/immutable/HashMapTest.scala @@ -339,4 +339,16 @@ class HashMapTest extends AllocationTest{ check(cs => TreeMap(cs: _*)) // exercise special case for HashMap/HasForEachEntry check(cs => HashMap(cs: _*).withDefault(_ => ???)) // default cases } + + @Test + def noSuchElement(): Unit = { + val m = HashMap[Int, Int](1 -> 1) + try { + m(2) + } catch { + case e: NoSuchElementException => + assertEquals("key not found: 2", e.getMessage()) + case e: Throwable => throw e + } + } } From 0a1e7d95155754be1f0ccf19f9bb2fc1028ab942 Mon Sep 17 00:00:00 2001 From: Eugene Yokota Date: Sun, 9 May 2021 03:35:21 -0400 Subject: [PATCH 163/769] address reviews --- src/library/scala/collection/immutable/HashMap.scala | 4 ++-- .../scala/collection/immutable/HashMapTest.scala | 11 +++-------- 2 files changed, 5 insertions(+), 10 deletions(-) diff --git a/src/library/scala/collection/immutable/HashMap.scala b/src/library/scala/collection/immutable/HashMap.scala index b37e1a0646c2..c6fb4abe6e03 100644 --- a/src/library/scala/collection/immutable/HashMap.scala +++ b/src/library/scala/collection/immutable/HashMap.scala @@ -628,11 +628,11 @@ private final class BitmapIndexedMapNode[K, +V]( if ((dataMap & bitpos) != 0) { val index = indexFrom(dataMap, mask, bitpos) - if (key == getKey(index)) getValue(index) else throw new NoSuchElementException("key not found: " + key) + if (key == getKey(index)) getValue(index) else throw new NoSuchElementException(s"key not found: $key") } else if ((nodeMap & bitpos) != 0) { getNode(indexFrom(nodeMap, mask, bitpos)).apply(key, originalHash, keyHash, shift + BitPartitionSize) } else { - throw new NoSuchElementException("key not found: " + key) + throw new NoSuchElementException(s"key not found: $key") } } diff --git a/test/junit/scala/collection/immutable/HashMapTest.scala b/test/junit/scala/collection/immutable/HashMapTest.scala index 703a9f1f1f05..a73c02d000b3 100644 --- a/test/junit/scala/collection/immutable/HashMapTest.scala +++ b/test/junit/scala/collection/immutable/HashMapTest.scala @@ -8,6 +8,7 @@ import org.junit.runner.RunWith import org.junit.runners.JUnit4 import scala.tools.testkit.AllocationTest +import scala.tools.testkit.AssertUtil.assertThrows @RunWith(classOf[JUnit4]) class HashMapTest extends AllocationTest{ @@ -342,13 +343,7 @@ class HashMapTest extends AllocationTest{ @Test def noSuchElement(): Unit = { - val m = HashMap[Int, Int](1 -> 1) - try { - m(2) - } catch { - case e: NoSuchElementException => - assertEquals("key not found: 2", e.getMessage()) - case e: Throwable => throw e - } + assertThrows[NoSuchElementException](HashMap(1->1)(2), _ == "key not found: 2") + assertThrows[NoSuchElementException](HashMap.empty(3), _ == "key not found: 3") } } From ac638c278fd088c89e97d39eecb665bacdefa0bf Mon Sep 17 00:00:00 2001 From: Kai Date: Thu, 6 May 2021 12:36:54 +0100 Subject: [PATCH 164/769] Differentiate Scala 2 and Scala 3 wildcard identifier names This change names wildcards written with Scala 3 `?` syntax with `?$N` pattern instead of `_$N` used for Scala 2 wildcards There are two reasons for it: - To allow `kind-projector` to implement Scala 3 underscore syntax for type lambdas by transforming old-style underscores, but leaving Scala 3 underscores intact - To show a mildly more relevant error message, since a wildcard introduced by `?` will now also have a name with `?` in the error message --- .../scala/tools/nsc/ast/parser/Parsers.scala | 22 +++++++++---------- test/files/neg/wildcards-future.check | 11 ++++++++++ test/files/neg/wildcards-future.scala | 11 ++++++++++ 3 files changed, 33 insertions(+), 11 deletions(-) create mode 100644 test/files/neg/wildcards-future.check create mode 100644 test/files/neg/wildcards-future.scala diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala index a724652a1aa1..5532d9328354 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala @@ -722,9 +722,8 @@ self => def isRawBar = isRawIdent && in.name == raw.BAR def isRawIdent = in.token == IDENTIFIER - def isWildcardType = - in.token == USCORE || - currentRun.isScala3 && isRawIdent && in.name == raw.QMARK + def isWildcardType = in.token == USCORE || isScala3WildcardType + def isScala3WildcardType = currentRun.isScala3 && isRawIdent && in.name == raw.QMARK def isIdent = in.token == IDENTIFIER || in.token == BACKQUOTED_IDENT def isMacro = in.token == IDENTIFIER && in.name == nme.MACROkw @@ -1083,10 +1082,10 @@ self => in.nextToken() in.nextToken() atPos(start)(Ident(identName)) - } - else if (isWildcardType) - wildcardType(in.skipToken()) - else + } else if (isWildcardType) { + val scala3Wildcard = isScala3WildcardType + wildcardType(in.skipToken(), scala3Wildcard) + } else path(thisOK = false, typeOK = true) match { case r @ SingletonTypeTree(_) => r case r => convertToTypeId(r) @@ -1483,8 +1482,8 @@ self => * WildcardType ::= `_' TypeBounds * }}} */ - def wildcardType(start: Offset) = { - val pname = freshTypeName("_$") + def wildcardType(start: Offset, qmark: Boolean) = { + val pname = if (qmark) freshTypeName("?$") else freshTypeName("_$") val t = atPos(start)(Ident(pname)) val bounds = typeBounds() val param = atPos(t.pos union bounds.pos) { makeSyntheticTypeParam(pname, bounds) } @@ -1996,8 +1995,9 @@ self => def argType(): Tree = { val start = in.offset if (isWildcardType) { + val scala3Wildcard = isScala3WildcardType in.nextToken() - if (in.token == SUBTYPE || in.token == SUPERTYPE) wildcardType(start) + if (in.token == SUBTYPE || in.token == SUPERTYPE) wildcardType(start, scala3Wildcard) else atPos(start) { Bind(tpnme.WILDCARD, EmptyTree) } } else typ() match { @@ -2364,7 +2364,7 @@ self => val vds = new ListBuffer[List[ValDef]] val start = in.offset def paramClause(): List[ValDef] = if (in.token == RPAREN) Nil else { - val implicitmod = + val implicitmod = if (in.token == IMPLICIT) { if (implicitOffset == -1) { implicitOffset = in.offset ; implicitSection = vds.length } else if (warnAt == -1) warnAt = in.offset diff --git a/test/files/neg/wildcards-future.check b/test/files/neg/wildcards-future.check new file mode 100644 index 000000000000..a5b4b23520f3 --- /dev/null +++ b/test/files/neg/wildcards-future.check @@ -0,0 +1,11 @@ +wildcards-future.scala:7: error: type mismatch; + found : Map[_$1,_$2] where type _$2 >: Null, type _$1 <: AnyRef + required: Map[String,String] + underscores : Map[String, String] // error wildcard variables starting with `_` + ^ +wildcards-future.scala:9: error: type mismatch; + found : Map[?$1,?$2] where type ?$2 >: Null, type ?$1 <: AnyRef + required: Map[String,String] + qmarks : Map[String, String] // error – wildcard variables should start with `?` to differentiate from the old syntax + ^ +two errors found diff --git a/test/files/neg/wildcards-future.scala b/test/files/neg/wildcards-future.scala new file mode 100644 index 000000000000..54b7675813e7 --- /dev/null +++ b/test/files/neg/wildcards-future.scala @@ -0,0 +1,11 @@ +// scalac: -Xsource:3 +// +object Test { + val underscores: Map[_ <: AnyRef, _ >: Null] = Map() + val qmarks: Map[? <: AnyRef, ? >: Null] = Map() + + underscores : Map[String, String] // error wildcard variables starting with `_` + + qmarks : Map[String, String] // error – wildcard variables should start with `?` to differentiate from the old syntax + // (and have a mildly more readable error...) +} From a5cd735d8b4b015550b7a229f8966ebfb9dfe596 Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Fri, 26 Mar 2021 14:41:56 +0100 Subject: [PATCH 165/769] Update to Dotty 3.0.0-RC3, fix tests. - load Scala 3 compiler in sandboxed classloader. This change was added due to a conflict in reading the file compiler.properties. - Add new erasure mode for Scala 3 intersection types - Test erasure for constructors and SAM types - Implement erasure for Arrays and test it --- build.sbt | 27 +- project/DottySupport.scala | 8 +- .../scala/tools/nsc/tasty/TreeUnpickler.scala | 6 +- .../tools/nsc/tasty/bridge/ContextOps.scala | 3 + .../tools/nsc/tasty/bridge/FlagOps.scala | 8 +- .../tools/nsc/tasty/bridge/SymbolOps.scala | 46 +-- .../tools/nsc/tasty/bridge/TypeOps.scala | 11 +- .../scala/tools/nsc/transform/Erasure.scala | 6 +- .../scala/tools/nsc/typechecker/Typers.scala | 2 +- .../scala/tools/tasty/TastyFlags.scala | 19 +- .../scala/tools/tasty/TastyFormat.scala | 9 +- .../reflect/internal/StdAttachments.scala | 2 + .../scala/reflect/internal/SymbolPairs.scala | 4 +- .../scala/reflect/internal/Symbols.scala | 4 +- .../reflect/internal/transform/Erasure.scala | 288 ++++++++++++++++-- .../reflect/runtime/JavaUniverseForce.scala | 3 + .../dotty/tools/vulpix/ParallelTesting.scala | 11 + .../scala/tools/tastytest/ClasspathOps.scala | 10 + .../scala/tools/tastytest/Classpaths.scala | 17 ++ .../scala/tools/tastytest/Dotc.scala | 71 ++++- .../tools/tastytest/DotcDecompiler.scala | 15 +- .../scala/tools/tastytest/TastyTest.scala | 21 +- .../scala/tools/tastytest/package.scala | 5 + .../src-2/TestMacroCompat.check | 2 +- .../neg/src-2/TestCompiletimeQuoteType.check | 2 +- test/tasty/neg/src-3/ErasedTypes.scala | 2 + test/tasty/run/pre/tastytest/package.scala | 23 ++ .../pre/tastytest/reflectshims/Context.scala | 9 + .../pre/tastytest/reflectshims/Universe.scala | 8 + .../tastytest/reflectshims/impl/Context.scala | 17 ++ .../run/pre/tastytest/scala2Erasure/api.scala | 250 +++++++++++++++ .../run/src-2/tastytest/TestErasure.scala | 172 +++++++++++ .../tastytest/TestIntersectionErasure.scala | 12 + .../run/src-2/tastytest/TestReflection.scala | 18 ++ .../run/src-2/tastytest/TestSAMErasure.scala | 23 ++ .../src-3/tastytest/IntersectionErasure.scala | 28 ++ .../run/src-3/tastytest/Reflection.scala | 23 ++ .../run/src-3/tastytest/SAMErasure.scala | 18 ++ .../src-3/tastytest/dottyErasure/api.scala | 259 ++++++++++++++++ .../tools/tastytest/TastyTestJUnit.scala | 22 +- 40 files changed, 1358 insertions(+), 126 deletions(-) create mode 100644 src/tastytest/dotty/tools/vulpix/ParallelTesting.scala create mode 100644 src/tastytest/scala/tools/tastytest/ClasspathOps.scala create mode 100644 src/tastytest/scala/tools/tastytest/Classpaths.scala create mode 100644 test/tasty/run/pre/tastytest/reflectshims/Context.scala create mode 100644 test/tasty/run/pre/tastytest/reflectshims/Universe.scala create mode 100644 test/tasty/run/pre/tastytest/reflectshims/impl/Context.scala create mode 100644 test/tasty/run/pre/tastytest/scala2Erasure/api.scala create mode 100644 test/tasty/run/src-2/tastytest/TestErasure.scala create mode 100644 test/tasty/run/src-2/tastytest/TestIntersectionErasure.scala create mode 100644 test/tasty/run/src-2/tastytest/TestReflection.scala create mode 100644 test/tasty/run/src-2/tastytest/TestSAMErasure.scala create mode 100644 test/tasty/run/src-3/tastytest/IntersectionErasure.scala create mode 100644 test/tasty/run/src-3/tastytest/Reflection.scala create mode 100644 test/tasty/run/src-3/tastytest/SAMErasure.scala create mode 100644 test/tasty/run/src-3/tastytest/dottyErasure/api.scala diff --git a/build.sbt b/build.sbt index 4879f841d428..292a6cccb2dd 100644 --- a/build.sbt +++ b/build.sbt @@ -616,7 +616,9 @@ lazy val tastytest = configureAsSubproject(project) .settings( name := "scala-tastytest", description := "Scala TASTy Integration Testing Tool", - libraryDependencies ++= List(diffUtilsDep, TastySupport.scala3Compiler), + libraryDependencies ++= List( + diffUtilsDep, + ), Compile / scalacOptions ++= Seq("-feature", "-Xlint"), ) @@ -730,7 +732,7 @@ lazy val tasty = project.in(file("test") / "tasty") .settings(publish / skip := true) .settings( Test / fork := true, - libraryDependencies += junitInterfaceDep, + libraryDependencies ++= Seq(junitInterfaceDep, TastySupport.scala3Library), testOptions += Tests.Argument(TestFrameworks.JUnit, "-a", "-v"), Test / testOptions += Tests.Argument( s"-Dtastytest.src=${baseDirectory.value}", @@ -739,6 +741,27 @@ lazy val tasty = project.in(file("test") / "tasty") Compile / unmanagedSourceDirectories := Nil, Test / unmanagedSourceDirectories := List(baseDirectory.value/"test"), ) + .configs(TastySupport.CompilerClasspath, TastySupport.LibraryClasspath) + .settings( + inConfig(TastySupport.CompilerClasspath)(Defaults.configSettings), + inConfig(TastySupport.LibraryClasspath)(Defaults.configSettings), + libraryDependencies ++= Seq( + TastySupport.scala3Compiler % TastySupport.CompilerClasspath, + TastySupport.scala3Library % TastySupport.LibraryClasspath, + ), + javaOptions ++= { + import java.io.File.pathSeparator + val lib = (library / Compile / classDirectory).value.getAbsoluteFile() + val ref = (reflect / Compile / classDirectory).value.getAbsoluteFile() + val classpath = (TastySupport.CompilerClasspath / managedClasspath).value.seq.map(_.data) :+ lib + val libraryClasspath = (TastySupport.LibraryClasspath / managedClasspath).value.seq.map(_.data) :+ lib + Seq( + s"-Dtastytest.classpaths.dottyCompiler=${classpath.mkString(pathSeparator)}", + s"-Dtastytest.classpaths.dottyLibrary=${libraryClasspath.mkString(pathSeparator)}", + s"-Dtastytest.classpaths.scalaReflect=${ref}", + ) + }, + ) lazy val scalacheck = project.in(file("test") / "scalacheck") .dependsOn(library, reflect, compiler, scaladoc) diff --git a/project/DottySupport.scala b/project/DottySupport.scala index 94c29eed0701..8f9f0b056f5a 100644 --- a/project/DottySupport.scala +++ b/project/DottySupport.scala @@ -12,8 +12,12 @@ import sbt.librarymanagement.{ * Settings to support validation of TastyUnpickler against the release of dotty with the matching TASTy version */ object TastySupport { - val supportedTASTyRelease = "3.0.0-RC1" // TASTy version 28.0.1 - val scala3Compiler = "org.scala-lang" % "scala3-compiler_3.0.0-RC1" % supportedTASTyRelease + val supportedTASTyRelease = "3.0.0-RC3" // TASTy version 28.0.3 + val scala3Compiler = "org.scala-lang" % "scala3-compiler_3.0.0-RC3" % supportedTASTyRelease + val scala3Library = "org.scala-lang" % "scala3-library_3.0.0-RC3" % supportedTASTyRelease + + val CompilerClasspath = Configuration.of("TastySupport.CompilerClasspath", "TastySupport.CompilerClasspath") + val LibraryClasspath = Configuration.of("TastySupport.LibraryClasspath", "TastySupport.LibraryClasspath") } /** Settings needed to compile with Dotty, diff --git a/src/compiler/scala/tools/nsc/tasty/TreeUnpickler.scala b/src/compiler/scala/tools/nsc/tasty/TreeUnpickler.scala index 8a10f400b61a..2e2b742b5490 100644 --- a/src/compiler/scala/tools/nsc/tasty/TreeUnpickler.scala +++ b/src/compiler/scala/tools/nsc/tasty/TreeUnpickler.scala @@ -428,7 +428,7 @@ class TreeUnpickler[Tasty <: TastyUniverse]( if (tag === VALDEF) { if (flags.is(Inline) || ctx.owner.is(Trait)) flags |= FieldAccessor if (flags.not(Mutable)) flags |= Stable - if (flags.is(SingletonEnumFlags)) flags |= Object // we will encode dotty enum constants as objects (this needs to be corrected in bytecode) + if (flags.is(SingletonEnumInitFlags)) flags |= Object | Stable // we will encode dotty enum constants as objects (this needs to be corrected in bytecode) } if (ctx.owner.isClass) { if (tag === TYPEPARAM) flags |= Param @@ -595,6 +595,7 @@ class TreeUnpickler[Tasty <: TastyUniverse]( case PARAMalias => addFlag(ParamAlias) case EXPORTED => addFlag(Exported) case OPEN => addFlag(Open) + case INVISIBLE => addFlag(Invisible) case PRIVATEqualified => readByte() privateWithin = readWithin(ctx) @@ -751,6 +752,7 @@ class TreeUnpickler[Tasty <: TastyUniverse]( } val valueParamss = normalizeIfConstructor(vparamss, isCtor) val resType = effectiveResultType(sym, typeParams, tpt.tpe) + ctx.markAsMethod(sym) ctx.setInfo(sym, defn.DefDefType(if (isCtor) Nil else typeParams, valueParamss, resType)) } @@ -1001,7 +1003,7 @@ class TreeUnpickler[Tasty <: TastyUniverse]( (tag: @switch) match { case SELECTin => val name = readTastyName() - val qual = readTerm() + val qual = readTerm() if (inParentCtor) { assert(name.isSignedConstructor, s"Parent of ${ctx.owner} is not a constructor.") skipTree() diff --git a/src/compiler/scala/tools/nsc/tasty/bridge/ContextOps.scala b/src/compiler/scala/tools/nsc/tasty/bridge/ContextOps.scala index 77fe08b23e7c..f3485f0ea3bd 100644 --- a/src/compiler/scala/tools/nsc/tasty/bridge/ContextOps.scala +++ b/src/compiler/scala/tools/nsc/tasty/bridge/ContextOps.scala @@ -429,6 +429,9 @@ trait ContextOps { self: TastyUniverse => final def markAsEnumSingleton(sym: Symbol): Unit = sym.updateAttachment(new u.DottyEnumSingleton(sym.name.toString)) + final def markAsMethod(sym: Symbol): Unit = + sym.updateAttachment(u.DottyMethod) + final def markAsOpaqueType(sym: Symbol, alias: Type): Unit = sym.updateAttachment(new u.DottyOpaqueTypeAlias(alias)) diff --git a/src/compiler/scala/tools/nsc/tasty/bridge/FlagOps.scala b/src/compiler/scala/tools/nsc/tasty/bridge/FlagOps.scala index b4f88b88c886..8bdd53a0c655 100644 --- a/src/compiler/scala/tools/nsc/tasty/bridge/FlagOps.scala +++ b/src/compiler/scala/tools/nsc/tasty/bridge/FlagOps.scala @@ -24,13 +24,14 @@ trait FlagOps { self: TastyUniverse => object FlagSets { val TastyOnlyFlags: TastyFlagSet = ( - Erased | Internal | Inline | InlineProxy | Opaque | Extension | Given | Exported | Transparent | Enum | Infix - | Open | ParamAlias + Erased | Internal | Inline | InlineProxy | Opaque | Extension | Given | Exported | Transparent + | Enum | Infix | Open | ParamAlias | Invisible ) val TermParamOrAccessor: TastyFlagSet = Param | ParamSetter val ObjectCreationFlags: TastyFlagSet = Object | Lazy | Final | Stable val ObjectClassCreationFlags: TastyFlagSet = Object | Final - val SingletonEnumFlags: TastyFlagSet = Case | Static | Enum | Stable + val SingletonEnumInitFlags: TastyFlagSet = Case | Static | Enum + val SingletonEnumFlags: TastyFlagSet = SingletonEnumInitFlags | Stable val FieldAccessorFlags: TastyFlagSet = FieldAccessor | Stable val LocalFieldFlags: TastyFlagSet = Private | Local } @@ -90,6 +91,7 @@ trait FlagOps { self: TastyUniverse => if (flags.is(Open)) sb += "open" if (flags.is(ParamAlias)) sb += "" if (flags.is(Infix)) sb += "infix" + if (flags.is(Invisible)) sb += "" sb.mkString(" | ") } } diff --git a/src/compiler/scala/tools/nsc/tasty/bridge/SymbolOps.scala b/src/compiler/scala/tools/nsc/tasty/bridge/SymbolOps.scala index 2dccefa5a129..004a14cefa1f 100644 --- a/src/compiler/scala/tools/nsc/tasty/bridge/SymbolOps.scala +++ b/src/compiler/scala/tools/nsc/tasty/bridge/SymbolOps.scala @@ -17,6 +17,7 @@ import scala.tools.nsc.tasty.SafeEq import scala.tools.nsc.tasty.{TastyUniverse, TastyModes}, TastyModes._ import scala.tools.tasty.{TastyName, Signature, TastyFlags}, TastyName.SignedName, Signature.MethodSignature, TastyFlags._ import scala.tools.tasty.ErasedTypeRef +import scala.util.chaining._ /**This layer deals with selecting a member symbol from a type using a `TastyName`, * also contains factories for making type references to symbols. @@ -143,13 +144,13 @@ trait SymbolOps { self: TastyUniverse => val kind = if (tname.isTypeName) "type" else "term" def typeToString(tpe: Type) = { def inner(sb: StringBuilder, tpe: Type): StringBuilder = tpe match { - case u.SingleType(pre, sym) => inner(sb, pre) append '.' append ( - if (sym.isPackageObjectOrClass) s"`${sym.name}`" - else String valueOf sym.name - ) - case u.TypeRef(pre, sym, _) if sym.isTerm => - if ((pre eq u.NoPrefix) || (pre eq u.NoType)) sb append sym.name - else inner(sb, pre) append '.' append sym.name + case u.ThisType(cls) => sb append cls.fullNameString + case u.SingleType(pre, sym) => + if ((pre eq u.NoPrefix) || (pre eq u.NoType)) sb append sym.nameString + else inner(sb, pre) append '.' append sym.nameString + case u.TypeRef(pre, sym, _) => + if ((pre eq u.NoPrefix) || (pre eq u.NoType)) sb append sym.nameString + else inner(sb, pre) append '.' append sym.nameString case tpe => sb append tpe } inner(new StringBuilder(), tpe).toString @@ -170,7 +171,7 @@ trait SymbolOps { self: TastyUniverse => ctx.log(s"""<<< looking for overload in symbolOf[$space] @@ $qual: ${showSig(sig)}""") val member = space.member(encodeTermName(qual)) if (!(isSymbol(member) && hasType(member))) errorMissing(space, qual) - val (tyParamCount, argTpeRefs) = { + val (tyParamCount, paramRefs) = { val (tyParamCounts, params) = sig.params.partitionMap(identity) if (tyParamCounts.length > 1) { unsupportedError(s"method with unmergeable type parameters: $qual") @@ -179,24 +180,27 @@ trait SymbolOps { self: TastyUniverse => } def compareSym(sym: Symbol): Boolean = sym match { case sym: u.MethodSymbol => - val method = sym.tpe.asSeenFrom(space, sym.owner) - ctx.log(s">>> trying $sym: $method") - val params = method.paramss.flatten - val isJava = sym.isJavaDefined - NameErasure.sigName(method.finalResultType, isJava) === sig.result && - params.length === argTpeRefs.length && - (qual === TastyName.Constructor && tyParamCount === member.owner.typeParams.length - || tyParamCount === sym.typeParams.length) && - params.zip(argTpeRefs).forall { case (param, tpe) => NameErasure.sigName(param.tpe, isJava) === tpe } && { - ctx.log(s">>> selected ${showSym(sym)}: ${sym.tpe}") - true - } + val meth0 = u.unwrapWrapperTypes(sym.tpe.asSeenFrom(space, sym.owner)) + val paramSyms = meth0.paramss.flatten + val resTpe = meth0.finalResultType + val sameParamSize = paramSyms.length === paramRefs.length + def sameTyParamSize = tyParamCount === ( + if (qual === TastyName.Constructor) member.owner.typeParams.length + else sym.typeParams.length + ) + def sameParams = paramSyms.lazyZip(paramRefs).forall({ + case (paramSym, paramRef) => sameErasure(sym)(paramSym.tpe, paramRef) + }) + sameParamSize && sameTyParamSize && sameParams && sameErasure(sym)(resTpe, sig.result) case _ => ctx.log(s"""! member[$space]("$qual") ${showSym(sym)} is not a method""") false } member.asTerm.alternatives.find(compareSym).getOrElse( - typeError(s"No matching overload of $space.$qual with signature ${showSig(sig)}")) + typeError(s"No matching overload of $space.$qual with signature ${showSig(sig)}") + ).tap(overload => + ctx.log(s">>> selected ${showSym(overload)}: ${overload.tpe}") + ) } } diff --git a/src/compiler/scala/tools/nsc/tasty/bridge/TypeOps.scala b/src/compiler/scala/tools/nsc/tasty/bridge/TypeOps.scala index 94d9645b8ca3..e67636a66753 100644 --- a/src/compiler/scala/tools/nsc/tasty/bridge/TypeOps.scala +++ b/src/compiler/scala/tools/nsc/tasty/bridge/TypeOps.scala @@ -240,6 +240,9 @@ trait TypeOps { self: TastyUniverse => bounds } + private[bridge] def sameErasure(sym: Symbol)(tpe: Type, ref: ErasedTypeRef)(implicit ctx: Context) = + NameErasure.sigName(tpe, sym) === ref + /** This is a port from Dotty of transforming a Method type to an ErasedTypeRef */ private[bridge] object NameErasure { @@ -279,9 +282,11 @@ trait TypeOps { self: TastyUniverse => else self } - def sigName(tp: Type, isJava: Boolean)(implicit ctx: Context): ErasedTypeRef = { - val normTp = translateFromRepeated(tp)(toArray = isJava) - erasedSigName(normTp.erasure) + def sigName(tp: Type, sym: Symbol)(implicit ctx: Context): ErasedTypeRef = { + val normTp = translateFromRepeated(tp)(toArray = sym.isJavaDefined) + erasedSigName( + u.erasure.erasure(sym)(normTp) + ) } private def erasedSigName(erased: Type)(implicit ctx: Context): ErasedTypeRef = erased match { diff --git a/src/compiler/scala/tools/nsc/transform/Erasure.scala b/src/compiler/scala/tools/nsc/transform/Erasure.scala index c950d89fd258..8eec39c7de05 100644 --- a/src/compiler/scala/tools/nsc/transform/Erasure.scala +++ b/src/compiler/scala/tools/nsc/transform/Erasure.scala @@ -575,7 +575,7 @@ abstract class Erasure extends InfoTransform debuglog("generating bridge from %s (%s): %s%s to %s: %s%s".format( other, flagsToString(newFlags), otpe, other.locationString, member, - specialErasure(root)(member.tpe), member.locationString) + specialErasure(root)(member.tpe, root), member.locationString) ) // the parameter symbols need to have the new owner @@ -1120,7 +1120,7 @@ abstract class Erasure extends InfoTransform gen.mkMethodCall( qual1(), fun.symbol, - List(specialErasure(fun.symbol)(arg.tpe)), + List(specialErasure(fun.symbol)(arg.tpe, fun.symbol)), Nil ), isArrayTest(qual1()) @@ -1355,7 +1355,7 @@ abstract class Erasure extends InfoTransform fields.dropFieldAnnotationsFromGetter(tree.symbol) try super.transform(tree1).clearType() - finally tpt setType specialErasure(tree1.symbol)(tree1.symbol.tpe).resultType + finally tpt setType specialErasure(tree1.symbol)(tree1.symbol.tpe, tree1.symbol).resultType case ApplyDynamic(qual, Literal(Constant(bootstrapMethodRef: Symbol)) :: _) => tree case _: Apply if tree1 ne tree => diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index c253fdc7e368..851994cf47c1 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -232,7 +232,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper // (it erases in TypeTrees, but not in, e.g., the type a Function node) def phasedAppliedType(sym: Symbol, args: List[Type]) = { val tp = appliedType(sym, args) - if (phase.erasedTypes) erasure.specialScalaErasure(tp) else tp + if (phase.erasedTypes) erasure.specialScalaErasureFor(sym)(tp) else tp } def typedDocDef(docDef: DocDef, mode: Mode, pt: Type): Tree = diff --git a/src/compiler/scala/tools/tasty/TastyFlags.scala b/src/compiler/scala/tools/tasty/TastyFlags.scala index 0041a3e3f632..62e71e614859 100644 --- a/src/compiler/scala/tools/tasty/TastyFlags.scala +++ b/src/compiler/scala/tools/tasty/TastyFlags.scala @@ -60,8 +60,7 @@ object TastyFlags { final val Open = Enum.next final val ParamAlias = Open.next final val Infix = ParamAlias.next - - private[TastyFlags] final val maxFlag: Long = ParamAlias.shift + final val Invisible = Infix.next def optFlag(cond: Boolean)(flag: TastyFlagSet): TastyFlagSet = if (cond) flag else EmptyTastyFlags @@ -138,24 +137,10 @@ object TastyFlags { if (is(Open)) sb += "Open" if (is(ParamAlias)) sb += "ParamAlias" if (is(Infix)) sb += "Infix" + if (is(Invisible)) sb += "Invisible" sb.mkString(" | ") } } } - case class SingletonSets(val toLong: Long) extends AnyVal { - def map[A](f: TastyFlagSet => A): Iterable[A] = { - val buf = Iterable.newBuilder[A] - val orig = TastyFlagSet(toLong) - var flag = EmptyTastyFlags - while (flag.shift <= maxFlag) { - flag = flag.next - if (orig.is(flag)) { - buf += f(flag) - } - } - buf.result() - } - } - } diff --git a/src/compiler/scala/tools/tasty/TastyFormat.scala b/src/compiler/scala/tools/tasty/TastyFormat.scala index 73415a13199f..cc5d320d1dce 100644 --- a/src/compiler/scala/tools/tasty/TastyFormat.scala +++ b/src/compiler/scala/tools/tasty/TastyFormat.scala @@ -51,7 +51,7 @@ object TastyFormat { * is able to read final TASTy documents if the file's * `MinorVersion` is strictly less than the current value. */ - final val ExperimentalVersion: Int = 1 + final val ExperimentalVersion: Int = 3 /**This method implements a binary relation (`<:<`) between two TASTy versions. * We label the lhs `file` and rhs `compiler`. @@ -223,8 +223,9 @@ object TastyFormat { final val PARAMalias = 41 final val TRANSPARENT = 42 final val INFIX = 43 - final val EMPTYCLAUSE = 44 - final val SPLITCLAUSE = 45 + final val INVISIBLE = 44 + final val EMPTYCLAUSE = 45 + final val SPLITCLAUSE = 46 // Cat. 2: tag Nat @@ -387,6 +388,7 @@ object TastyFormat { | PARAMalias | EXPORTED | OPEN + | INVISIBLE | ANNOTATION | PRIVATEqualified | PROTECTEDqualified => true @@ -449,6 +451,7 @@ object TastyFormat { case PARAMsetter => "PARAMsetter" case EXPORTED => "EXPORTED" case OPEN => "OPEN" + case INVISIBLE => "INVISIBLE" case PARAMalias => "PARAMalias" case EMPTYCLAUSE => "EMPTYCLAUSE" case SPLITCLAUSE => "SPLITCLAUSE" diff --git a/src/reflect/scala/reflect/internal/StdAttachments.scala b/src/reflect/scala/reflect/internal/StdAttachments.scala index 8f820ae11d0c..d13fd027586a 100644 --- a/src/reflect/scala/reflect/internal/StdAttachments.scala +++ b/src/reflect/scala/reflect/internal/StdAttachments.scala @@ -122,6 +122,8 @@ trait StdAttachments { class DottyOpaqueTypeAlias(val tpe: Type) + case object DottyMethod extends PlainAttachment + class QualTypeSymAttachment(val sym: Symbol) case object ConstructorNeedsFence extends PlainAttachment diff --git a/src/reflect/scala/reflect/internal/SymbolPairs.scala b/src/reflect/scala/reflect/internal/SymbolPairs.scala index 83a3d8abca22..495b3c4e18a6 100644 --- a/src/reflect/scala/reflect/internal/SymbolPairs.scala +++ b/src/reflect/scala/reflect/internal/SymbolPairs.scala @@ -43,12 +43,12 @@ abstract class SymbolPairs { def rootType: Type = self def lowType: Type = self memberType low - def lowErased: Type = erasure.specialErasure(base)(low.tpe) + def lowErased: Type = erasure.specialErasure(base)(low.tpe, low) def lowClassBound: Type = classBoundAsSeen(low.tpe.typeSymbol) def highType: Type = self memberType high def highInfo: Type = self memberInfo high - def highErased: Type = erasure.specialErasure(base)(high.tpe) + def highErased: Type = erasure.specialErasure(base)(high.tpe, high) def highClassBound: Type = classBoundAsSeen(high.tpe.typeSymbol) def isErroneous = low.tpe.isErroneous || high.tpe.isErroneous diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala index be808ffdf20c..519f46ba4baa 100644 --- a/src/reflect/scala/reflect/internal/Symbols.scala +++ b/src/reflect/scala/reflect/internal/Symbols.scala @@ -261,7 +261,9 @@ trait Symbols extends api.Symbols { self: SymbolTable => rawatt = initPos final val id = nextId() // identity displayed when -uniqid - //assert(id != 3390, initName) + // assert(id != 11924, initName) + + def debugTasty = s"Symbol($this, #$id, ${flagString})" private[this] var _validTo: Period = NoPeriod diff --git a/src/reflect/scala/reflect/internal/transform/Erasure.scala b/src/reflect/scala/reflect/internal/transform/Erasure.scala index 981a0e3ce140..055234ada4f6 100644 --- a/src/reflect/scala/reflect/internal/transform/Erasure.scala +++ b/src/reflect/scala/reflect/internal/transform/Erasure.scala @@ -118,6 +118,7 @@ trait Erasure { abstract class ErasureMap extends TypeMap { def mergeParents(parents: List[Type]): Type + def eraseArray(arrayRef: Type, pre: Type, args: List[Type]): Type def eraseNormalClassRef(tref: TypeRef): Type = { val TypeRef(pre, clazz, args) = tref @@ -141,10 +142,7 @@ trait Erasure { case tref @ TypeRef(pre, sym, args) => def isDottyEnumSingleton(sym: Symbol): Boolean = sym.isModuleClass && sym.sourceModule.hasAttachment[DottyEnumSingleton] - if (sym eq ArrayClass) - if (unboundedGenericArrayLevel(tp) == 1) ObjectTpe - else if (args.head.typeSymbol.isBottomClass) arrayType(ObjectTpe) - else typeRef(apply(pre), sym, args map applyInArray) + if (sym eq ArrayClass) eraseArray(tp, pre, args) else if ((sym eq AnyClass) || (sym eq AnyValClass) || (sym eq SingletonClass)) ObjectTpe else if (sym eq UnitClass) BoxedUnitTpe else if (sym.isRefinementClass) apply(mergeParents(tp.parents)) @@ -152,7 +150,7 @@ trait Erasure { else if (isDottyEnumSingleton(sym)) apply(intersectionType(tp.parents)) // TODO [tasty]: dotty enum singletons are not modules. else if (sym.isClass) eraseNormalClassRef(tref) else sym.attachments.get[DottyOpaqueTypeAlias] match { - case Some(alias: DottyOpaqueTypeAlias) => apply(alias.tpe) // TODO [tasty]: refactor if we build-in opaque types + case Some(alias: DottyOpaqueTypeAlias) => apply(alias.tpe.asSeenFrom(pre, sym.owner)) // TODO [tasty]: refactor if we build-in opaque types case _ => apply(sym.info.asSeenFrom(pre, sym.owner)) // alias type or abstract type } case PolyType(tparams, restpe) => @@ -247,38 +245,47 @@ trait Erasure { * - for all other types, the type itself (with any sub-components erased) */ def erasure(sym: Symbol): ErasureMap = - if (sym == NoSymbol || !sym.enclClass.isJavaDefined) scalaErasure - else if (verifyJavaErasure && sym.isMethod) verifiedJavaErasure - else javaErasure + if (sym == NoSymbol) scalaErasure + else if (sym.enclClass.isJavaDefined) { + if (verifyJavaErasure && sym.isMethod) verifiedJavaErasure + else javaErasure + } + else if (sym.hasAttachment[DottyMethod.type]) scala3Erasure + else scalaErasure /** This is used as the Scala erasure during the erasure phase itself * It differs from normal erasure in that value classes are erased to ErasedValueTypes which * are then later converted to the underlying parameter type in phase posterasure. + * + * @param symOfTp used to determine the erasure mode for the type, + * e.g. in `SymbolPair#highErased`, `sym` may be an anonymous class for a SAM type, + * but `symOfTp` may be the a bridge method for the SAM method being erased. */ - def specialErasure(sym: Symbol)(tp: Type): Type = + def specialErasure(sym: Symbol)(tp: Type, symOfTp: Symbol): Type = if (sym != NoSymbol && sym.enclClass.isJavaDefined) erasure(sym)(tp) else if (sym.isClassConstructor) - specialConstructorErasure(sym.owner, tp) - else - specialScalaErasure(tp) + specialConstructorErasure(sym.owner, symOfTp, tp) + else { + specialScalaErasureFor(symOfTp)(tp) + } - def specialConstructorErasure(clazz: Symbol, tpe: Type): Type = { + def specialConstructorErasure(clazz: Symbol, ctor: Symbol, tpe: Type): Type = { tpe match { case PolyType(tparams, restpe) => - specialConstructorErasure(clazz, restpe) + specialConstructorErasure(clazz, ctor, restpe) case ExistentialType(tparams, restpe) => - specialConstructorErasure(clazz, restpe) + specialConstructorErasure(clazz, ctor, restpe) case mt @ MethodType(params, restpe) => MethodType( - cloneSymbolsAndModify(params, specialScalaErasure), - specialConstructorErasure(clazz, restpe)) + cloneSymbolsAndModify(params, specialScalaErasureFor(ctor)), + specialConstructorErasure(clazz, ctor, restpe)) case TypeRef(pre, `clazz`, args) => typeRef(pre, clazz, List()) case tp => if (!(clazz == ArrayClass || tp.isError)) assert(clazz == ArrayClass || tp.isError, s"!!! unexpected constructor erasure $tp for $clazz") - specialScalaErasure(tp) + specialScalaErasureFor(ctor)(tp) } } @@ -294,7 +301,8 @@ trait Erasure { * For this reason and others (such as distinguishing constructors from other methods) * erasure is now (Symbol, Type) => Type rather than Type => Type. */ - class ScalaErasureMap extends ErasureMap { + abstract class ScalaErasureMap extends ErasureMap with Scala2JavaArrayErasure { + /** In scala, calculate a useful parent. * An intersection such as `Object with Trait` erases to Trait. */ @@ -302,7 +310,42 @@ trait Erasure { intersectionDominator(parents) } - class JavaErasureMap extends ErasureMap { + trait Scala2JavaArrayErasure { self: ErasureMap => + + def eraseArray(arrayRef: Type, pre: Type, args: List[Type]): Type = + if (unboundedGenericArrayLevel(arrayRef) == 1) ObjectTpe + else if (args.head.typeSymbol.isBottomClass) arrayType(ObjectTpe) + else typeRef(self(pre), ArrayClass, args map applyInArray) + + } + + class Scala3ErasureMap extends ErasureMap { self => + + def mergeParents(parents: List[Type]): Type = { + erasedGlb(parents.map(self(_))) + } + + def mergeParentsInArray(parents: List[Type]): Type = { + erasedGlb(parents.map(super.applyInArray(_))) + } + + override def applyInArray(tp: Type): Type = { + tp match { + case RefinedType(parents, _) => + super.applyInArray(mergeParentsInArray(parents)) + case _ => + super.applyInArray(tp) + } + } + + def eraseArray(arrayRef: Type, pre: Type, args: List[Type]): Type = { + if (isGenericArrayElement(args.head)) ObjectTpe + else typeRef(self(pre), ArrayClass, args map applyInArray) + } + + } + + class JavaErasureMap extends ErasureMap with Scala2JavaArrayErasure { /** In java, always take the first parent. * An intersection such as `Object with Trait` erases to Object. */ @@ -314,14 +357,27 @@ trait Erasure { } object scalaErasure extends ScalaErasureMap + object scala3Erasure extends Scala3ErasureMap + + trait SpecialScalaErasure extends ErasureMap { + override def eraseDerivedValueClassRef(tref: TypeRef): Type = + ErasedValueType(tref.sym, erasedValueClassArg(tref)) + } /** This is used as the Scala erasure during the erasure phase itself * It differs from normal erasure in that value classes are erased to ErasedValueTypes which * are then later unwrapped to the underlying parameter type in phase posterasure. */ - object specialScalaErasure extends ScalaErasureMap { - override def eraseDerivedValueClassRef(tref: TypeRef): Type = - ErasedValueType(tref.sym, erasedValueClassArg(tref)) + object specialScalaErasure extends ScalaErasureMap with SpecialScalaErasure + + /** This is used as the Scala erasure for Scala 3 methods during the erasure phase itself. + * @see specialScalaErasure + */ + object specialScala3Erasure extends Scala3ErasureMap with SpecialScalaErasure + + def specialScalaErasureFor(sym: Symbol): ErasureMap = { + if (sym.hasAttachment[DottyMethod.type]) specialScala3Erasure + else specialScalaErasure } object javaErasure extends JavaErasureMap @@ -388,6 +444,180 @@ trait Erasure { } } + /** Scala 3 implementation of erasure for intersection types. + * @param components the erased component types of the intersection. + */ + def erasedGlb(components: List[Type]): Type = { + + /** A comparison function that induces a total order on erased types, + * where `A <= B` implies that the erasure of `A & B` should be A. + * + * This order respects the following properties: + * - ErasedValueTypes <= non-ErasedValueTypes + * - arrays <= non-arrays + * - primitives <= non-primitives + * - real classes <= traits + * - subtypes <= supertypes + * + * Since this isn't enough to order to unrelated classes, we use + * lexicographic ordering of the class symbol full name as a tie-breaker. + * This ensure that `A <= B && B <= A` iff `A =:= B`. + */ + def compareErasedGlb(tp1: Type, tp2: Type): Int = { + // this check is purely an optimization. + if (tp1 eq tp2) return 0 + + val isEVT1 = tp1.isInstanceOf[ErasedValueType] + val isEVT2 = tp2.isInstanceOf[ErasedValueType] + if (isEVT1 && isEVT2) { + return compareErasedGlb( + tp1.asInstanceOf[ErasedValueType].valueClazz.tpe_*, + tp2.asInstanceOf[ErasedValueType].valueClazz.tpe_*) + } + else if (isEVT1) + return -1 + else if (isEVT2) + return 1 + + val sym1 = tp1.baseClasses.head + val sym2 = tp2.baseClasses.head + + def compareClasses: Int = { + if (sym1.isSubClass(sym2)) + -1 + else if (sym2.isSubClass(sym1)) + 1 + else + sym1.fullName.compareTo(sym2.fullName) + } + + val isArray1 = tp1.typeArgs.nonEmpty && sym1.isSubClass(definitions.ArrayClass) + val isArray2 = tp2.typeArgs.nonEmpty && sym2.isSubClass(definitions.ArrayClass) + if (isArray1 && isArray2) + return compareErasedGlb(tp1.typeArgs.head, tp2.typeArgs.head) + else if (isArray1) + return -1 + else if (isArray2) + return 1 + + val isPrimitive1 = sym1.isPrimitiveValueClass + val isPrimitive2 = sym2.isPrimitiveValueClass + if (isPrimitive1 && isPrimitive2) + return compareClasses + else if (isPrimitive1) + return -1 + else if (isPrimitive2) + return 1 + + val isRealClass1 = sym1.isClass && !sym1.isTrait + val isRealClass2 = sym2.isClass && !sym2.isTrait + if (isRealClass1 && isRealClass2) + return compareClasses + else if (isRealClass1) + return -1 + else if (isRealClass2) + return 1 + + compareClasses + } + + components.min((t, u) => compareErasedGlb(t, u)) + } + + /** Dotty implementation of Array Erasure: + * + * Is `Array[tp]` a generic Array that needs to be erased to `Object`? + * This is true if among the subtypes of `Array[tp]` there is either: + * - both a reference array type and a primitive array type + * (e.g. `Array[_ <: Int | String]`, `Array[_ <: Any]`) + * - or two different primitive array types (e.g. `Array[_ <: Int | Double]`) + * In both cases the erased lub of those array types on the JVM is `Object`. + */ + def isGenericArrayElement(tp: Type): Boolean = { + + object DottyTypeProxy { + + def unapply(tp: Type): Option[Type] = { + val superTpe = translucentSuperType(tp) + if (superTpe ne NoType) Some(superTpe) else None + } + + def translucentSuperType(tp: Type): Type = tp match { + case tp: TypeRef => + tp.sym.attachments.get[DottyOpaqueTypeAlias] match { + case Some(alias) => alias.tpe.asSeenFrom(tp.pre, tp.sym.owner) + case None => tp.sym.info.asSeenFrom(tp.pre, tp.sym.owner) + } + case tp: SingleType => tp.underlying + case tp: ThisType => tp.sym.typeOfThis + case tp: ConstantType => tp.value.tpe + case tp: RefinedType if tp.decls.nonEmpty => intersectionType(tp.parents) + case tp: PolyType => tp.resultType + case tp: ExistentialType => tp.underlying + case tp: TypeBounds => tp.hi + case tp: AnnotatedType => tp.underlying + case tp: SuperType => tp.thistpe.baseType(tp.supertpe.typeSymbol) + case tp => NoType + } + + } + + object DottyAndType { + def unapply(tp: RefinedType): Boolean = tp.decls.isEmpty + } + + /** A symbol that represents the sort of JVM array that values of type `t` can be stored in: + * - If we can always store such values in a reference array, return Object + * - If we can always store them in a specific primitive array, return the + * corresponding primitive class + * - Otherwise, return `NoSymbol`. + */ + def arrayUpperBound(tp: Type): Symbol = tp.dealias match { + case tp: TypeRef if tp.sym.isClass => + val cls = tp.sym + // Only a few classes have both primitives and references as subclasses. + if ((cls eq AnyClass) || (cls eq AnyValClass) || (cls eq SingletonClass)) + NoSymbol + // We only need to check for primitives because derived value classes in arrays are always boxed. + else if (cls.isPrimitiveValueClass) + cls + else + ObjectClass + case DottyTypeProxy(unwrapped) => + arrayUpperBound(unwrapped) + case tp @ DottyAndType() => + // Find first `p` in `parents` where `arrayUpperBound(p) ne NoSymbol` + @tailrec def loop(tps: List[Type]): Symbol = tps match { + case tp :: tps1 => + val ub = arrayUpperBound(tp) + if (ub ne NoSymbol) ub + else loop(tps1) + case nil => NoSymbol + } + loop(tp.parents) + case _ => + NoSymbol + } + + /** Can one of the JVM Array type store all possible values of type `t`? */ + def fitsInJVMArray(tp: Type): Boolean = arrayUpperBound(tp) ne NoSymbol + + def isOpaque(sym: Symbol) = !sym.isClass && sym.hasAttachment[DottyOpaqueTypeAlias] + + tp.dealias match { + case tp: TypeRef if !isOpaque(tp.sym) => + !tp.sym.isClass && + !tp.sym.isJavaDefined && // In Java code, Array[T] can never erase to Object + !fitsInJVMArray(tp) + case DottyTypeProxy(unwrapped) => + isGenericArrayElement(unwrapped) + case tp @ DottyAndType() => + tp.parents.forall(isGenericArrayElement) + case tp => + false + } + } + /** The symbol's erased info. This is the type's erasure, except for the following primitive symbols: * * - $asInstanceOf --> [T]T @@ -407,15 +637,15 @@ trait Erasure { if (sym == Object_asInstanceOf || synchronizedPrimitive(sym)) sym.info else if (sym == Object_isInstanceOf || sym == ArrayClass) - PolyType(sym.info.typeParams, specialErasure(sym)(sym.info.resultType)) + PolyType(sym.info.typeParams, specialErasure(sym)(sym.info.resultType, sym)) else if (sym.isAbstractType) TypeBounds(WildcardType, WildcardType) // TODO why not use the erasure of the type's bounds, as stated in the doc? else if (sym.isTerm && sym.owner == ArrayClass) { if (sym.isClassConstructor) // TODO: switch on name for all branches -- this one is sym.name == nme.CONSTRUCTOR tp match { case MethodType(params, TypeRef(pre, sym1, args)) => - MethodType(cloneSymbolsAndModify(params, specialErasure(sym)), - typeRef(specialErasure(sym)(pre), sym1, args)) + MethodType(cloneSymbolsAndModify(params, tp => specialErasure(sym)(tp, sym)), + typeRef(specialErasure(sym)(pre, sym), sym1, args)) case x => throw new MatchError(x) } else if (sym.name == nme.apply) @@ -423,9 +653,9 @@ trait Erasure { else if (sym.name == nme.update) (tp: @unchecked) match { case MethodType(List(index, tvar), restpe) => - MethodType(List(index.cloneSymbol.setInfo(specialErasure(sym)(index.tpe)), tvar), UnitTpe) + MethodType(List(index.cloneSymbol.setInfo(specialErasure(sym)(index.tpe, sym)), tvar), UnitTpe) } - else specialErasure(sym)(tp) + else specialErasure(sym)(tp, sym) } else if ( sym.owner != NoSymbol && sym.owner.owner == ArrayClass && @@ -437,7 +667,7 @@ trait Erasure { } else { // TODO OPT: altogether, there are 9 symbols that we special-case. // Could we get to the common case more quickly by looking them up in a set? - specialErasure(sym)(tp) + specialErasure(sym)(tp, sym) } } } diff --git a/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala b/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala index be33ed5a6651..5ca00953eff4 100644 --- a/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala +++ b/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala @@ -67,6 +67,7 @@ trait JavaUniverseForce { self: runtime.JavaUniverse => this.UseInvokeSpecial this.TypeParamVarargsAttachment this.KnownDirectSubclassesCalled + this.DottyMethod this.ConstructorNeedsFence this.MultiargInfixAttachment this.NullaryOverrideAdapted @@ -520,7 +521,9 @@ trait JavaUniverseForce { self: runtime.JavaUniverse => uncurry.DesugaredParameterType erasure.GenericArray erasure.scalaErasure + erasure.scala3Erasure erasure.specialScalaErasure + erasure.specialScala3Erasure erasure.javaErasure erasure.verifiedJavaErasure erasure.boxingErasure diff --git a/src/tastytest/dotty/tools/vulpix/ParallelTesting.scala b/src/tastytest/dotty/tools/vulpix/ParallelTesting.scala new file mode 100644 index 000000000000..fc1245e47de7 --- /dev/null +++ b/src/tastytest/dotty/tools/vulpix/ParallelTesting.scala @@ -0,0 +1,11 @@ +package dotty.tools.vulpix + +/** As of Scala 3.0.0-RC2, dotty compiler will enable the + * usage of experimental features if the compiler is invoked + * within a method on the class `dotty.tools.vulpix.ParallelTesting` + * + * We use this to test experimental features on non-nightly releases. + */ +class ParallelTesting { + def unlockExperimentalFeatures[T](op: => T): T = op +} diff --git a/src/tastytest/scala/tools/tastytest/ClasspathOps.scala b/src/tastytest/scala/tools/tastytest/ClasspathOps.scala new file mode 100644 index 000000000000..257eacf1d781 --- /dev/null +++ b/src/tastytest/scala/tools/tastytest/ClasspathOps.scala @@ -0,0 +1,10 @@ +package scala.tools.tastytest + +import java.net.URL +import java.nio.file.Paths + +object ClasspathOps { + implicit class ClassPathSyntax(private val ls: List[String]) extends AnyVal { + def asURLs: List[URL] = ls.map(Paths.get(_).toUri().toURL()) + } +} diff --git a/src/tastytest/scala/tools/tastytest/Classpaths.scala b/src/tastytest/scala/tools/tastytest/Classpaths.scala new file mode 100644 index 000000000000..5458966fe74d --- /dev/null +++ b/src/tastytest/scala/tools/tastytest/Classpaths.scala @@ -0,0 +1,17 @@ +package scala.tools.tastytest + +import scala.util.Properties +import java.io.File.pathSeparatorChar + +object Classpaths { + + private def classpathProp(name: String) = + Properties.propOrNone(name).map(_.split(pathSeparatorChar).filter(_.nonEmpty).toList).getOrElse(Nil) + + def dottyCompiler: List[String] = classpathProp("tastytest.classpaths.dottyCompiler") + + def scalaReflect: List[String] = classpathProp("tastytest.classpaths.scalaReflect") + + def dottyLibrary: List[String] = classpathProp("tastytest.classpaths.dottyLibrary") + +} diff --git a/src/tastytest/scala/tools/tastytest/Dotc.scala b/src/tastytest/scala/tools/tastytest/Dotc.scala index 0c0a7ebf3c8b..2e9d3b68a2cb 100644 --- a/src/tastytest/scala/tools/tastytest/Dotc.scala +++ b/src/tastytest/scala/tools/tastytest/Dotc.scala @@ -1,40 +1,73 @@ package scala.tools.tastytest -import scala.util.{ Try, Success } +import scala.util.{Try, Success, Failure} +import scala.util.control.NonFatal -import java.lang.reflect.Modifier +import scala.reflect.internal.util.ScalaClassLoader +import scala.reflect.runtime.ReflectionUtils +import java.lang.reflect.{Modifier, Method} + +import ClasspathOps._ object Dotc extends Script.Command { - private[this] lazy val dotcProcess = processMethod("dotty.tools.dotc.Main") + final case class ClassLoader private (val parent: ScalaClassLoader) + + def initClassloader(): Try[Dotc.ClassLoader] = + Try(Dotc.ClassLoader(ScalaClassLoader.fromURLs(Classpaths.dottyCompiler.asURLs))) - def processMethod(mainClassName: String): Array[String] => Try[Boolean] = { - // TODO call it directly when we are bootstrapped - val mainClass = Class.forName(mainClassName) - val reporterClass = Class.forName("dotty.tools.dotc.reporting.Reporter") + def loadClass(name: String)(implicit cl: Dotc.ClassLoader) = + Class.forName(name, true, cl.parent) + + def invokeStatic(method: Method, args: Seq[Any])(implicit cl: Dotc.ClassLoader) = { + assert(Modifier.isStatic(method.getModifiers), s"$method is not static!") + invoke(method, null, args) + } + + def invoke(method: Method, obj: AnyRef, args: Seq[Any])(implicit cl: Dotc.ClassLoader) = { + try cl.parent.asContext[AnyRef] { + method.invoke(obj, args.toArray:_*) + } + catch { + case NonFatal(ex) => throw ReflectionUtils.unwrapThrowable(ex) + } + } + + private def dotcProcess(args: Seq[String])(implicit cl: Dotc.ClassLoader) = processMethod("dotty.tools.dotc.Main")(args) + + def processMethod(mainClassName: String)(args: Seq[String])(implicit cl: Dotc.ClassLoader): Try[Boolean] = { + val mainClass = loadClass(mainClassName) + val reporterClass = loadClass("dotty.tools.dotc.reporting.Reporter") val Main_process = mainClass.getMethod("process", classOf[Array[String]]) - assert(Modifier.isStatic(Main_process.getModifiers), s"$mainClassName.process is not static!") val Reporter_hasErrors = reporterClass.getMethod("hasErrors") - args => Try { - val reporter = Main_process.invoke(null, args) - val hasErrors = Reporter_hasErrors.invoke(reporter).asInstanceOf[Boolean] + Try { + val reporter = unlockExperimentalFeatures(invokeStatic(Main_process, Seq(args.toArray))) + val hasErrors = invoke(Reporter_hasErrors, reporter, Seq.empty).asInstanceOf[Boolean] !hasErrors } } - def dotc(out: String, classpath: String, additionalSettings: Seq[String], sources: String*): Try[Boolean] = { + def dotcVersion(implicit cl: Dotc.ClassLoader): String = { + val compilerPropertiesClass = loadClass("dotty.tools.dotc.config.Properties") + val Properties_simpleVersionString = compilerPropertiesClass.getMethod("simpleVersionString") + invokeStatic(Properties_simpleVersionString, Seq.empty).asInstanceOf[String] + } + + def dotc(out: String, classpath: String, additionalSettings: Seq[String], sources: String*)(implicit cl: Dotc.ClassLoader): Try[Boolean] = { if (sources.isEmpty) { Success(true) } else { - val args = Array( + val libraryDeps = Classpaths.dottyLibrary ++ Classpaths.scalaReflect + val args = Seq( "-d", out, - "-classpath", classpath, + "-classpath", libraryDeps.mkString(classpath + Files.classpathSep, Files.classpathSep, ""), "-deprecation", - "-Yerased-terms", "-Xfatal-warnings", - "-usejavacp" ) ++ additionalSettings ++ sources + if (TastyTest.verbose) { + println(yellow(s"Invoking dotc (version $dotcVersion) with args: $args")) + } dotcProcess(args) } } @@ -48,6 +81,12 @@ object Dotc extends Script.Command { return 1 } val Seq(out, src) = args: @unchecked + implicit val scala3classloader: Dotc.ClassLoader = initClassloader() match { + case Success(cl) => cl + case Failure(err) => + println(red(s"could not initialise Scala 3 classpath: $err")) + return 1 + } val success = dotc(out, out, Nil, src).get if (success) 0 else 1 } diff --git a/src/tastytest/scala/tools/tastytest/DotcDecompiler.scala b/src/tastytest/scala/tools/tastytest/DotcDecompiler.scala index ff53ccd782b4..c10582a42bd5 100644 --- a/src/tastytest/scala/tools/tastytest/DotcDecompiler.scala +++ b/src/tastytest/scala/tools/tastytest/DotcDecompiler.scala @@ -1,13 +1,14 @@ package scala.tools.tastytest -import scala.util.Try +import scala.util.{Try, Success, Failure} object DotcDecompiler extends Script.Command { - private[this] lazy val dotcProcess = Dotc.processMethod("dotty.tools.dotc.decompiler.Main") + private def dotcProcess(args: Seq[String])(implicit cl: Dotc.ClassLoader) = + Dotc.processMethod("dotty.tools.dotc.decompiler.Main")(args) - def decompile(source: String, additionalSettings: Seq[String]): Try[Boolean] = - dotcProcess(("-usejavacp" +: additionalSettings :+ source).toArray) + def decompile(source: String, additionalSettings: Seq[String])(implicit cl: Dotc.ClassLoader): Try[Boolean] = + dotcProcess(("-usejavacp" +: additionalSettings :+ source)) val commandName: String = "dotcd" val describe: String = s"$commandName " @@ -18,6 +19,12 @@ object DotcDecompiler extends Script.Command { return 1 } val Seq(tasty, additionalSettings @ _*) = args: @unchecked + implicit val scala3classloader: Dotc.ClassLoader = Dotc.initClassloader() match { + case Success(cl) => cl + case Failure(err) => + println(red(s"could not initialise Scala 3 classpath: $err")) + return 1 + } val success = decompile(tasty, additionalSettings).get if (success) 0 else 1 } diff --git a/src/tastytest/scala/tools/tastytest/TastyTest.scala b/src/tastytest/scala/tools/tastytest/TastyTest.scala index be64ff8ca2f3..d3e9122adbdf 100644 --- a/src/tastytest/scala/tools/tastytest/TastyTest.scala +++ b/src/tastytest/scala/tools/tastytest/TastyTest.scala @@ -14,12 +14,12 @@ import Files._ object TastyTest { - private val verbose = false + private[tastytest] val verbose = false private def log(s: => String): Unit = if (verbose) println(s) - def runSuite(src: String, srcRoot: String, pkgName: String, outDir: Option[String], additionalSettings: Seq[String], additionalDottySettings: Seq[String]): Try[Unit] = for { + def runSuite(src: String, srcRoot: String, pkgName: String, outDir: Option[String], additionalSettings: Seq[String], additionalDottySettings: Seq[String])(implicit cl: Dotc.ClassLoader): Try[Unit] = for { (pre, src2, src3) <- getRunSources(srcRoot/src) out <- outDir.fold(tempDir(pkgName))(dir) _ <- scalacPos(out, sourceRoot=srcRoot/src/"pre", additionalSettings, pre:_*) @@ -29,7 +29,7 @@ object TastyTest { _ <- runMainOn(out, testNames:_*) } yield () - def posSuite(src: String, srcRoot: String, pkgName: String, outDir: Option[String], additionalSettings: Seq[String], additionalDottySettings: Seq[String]): Try[Unit] = for { + def posSuite(src: String, srcRoot: String, pkgName: String, outDir: Option[String], additionalSettings: Seq[String], additionalDottySettings: Seq[String])(implicit cl: Dotc.ClassLoader): Try[Unit] = for { (pre, src2, src3) <- getRunSources(srcRoot/src, preFilters = Set(Scala, Java)) _ = log(s"Sources to compile under test: ${src2.map(cyan).mkString(", ")}") out <- outDir.fold(tempDir(pkgName))(dir) @@ -39,14 +39,14 @@ object TastyTest { _ <- scalacPos(out, sourceRoot=srcRoot/src/"src-2", additionalSettings, src2:_*) } yield () - def negSuite(src: String, srcRoot: String, pkgName: String, outDir: Option[String], additionalSettings: Seq[String], additionalDottySettings: Seq[String]): Try[Unit] = for { + def negSuite(src: String, srcRoot: String, pkgName: String, outDir: Option[String], additionalSettings: Seq[String], additionalDottySettings: Seq[String])(implicit cl: Dotc.ClassLoader): Try[Unit] = for { (src2, src3) <- get2And3Sources(srcRoot/src, src2Filters = Set(Scala, Check, SkipCheck)) out <- outDir.fold(tempDir(pkgName))(dir) _ <- dotcPos(out, sourceRoot=srcRoot/src/"src-3", additionalDottySettings, src3:_*) _ <- scalacNeg(out, additionalSettings, src2:_*) } yield () - def negChangePreSuite(src: String, srcRoot: String, pkgName: String, outDirs: Option[(String, String)], additionalSettings: Seq[String], additionalDottySettings: Seq[String]): Try[Unit] = for { + def negChangePreSuite(src: String, srcRoot: String, pkgName: String, outDirs: Option[(String, String)], additionalSettings: Seq[String], additionalDottySettings: Seq[String])(implicit cl: Dotc.ClassLoader): Try[Unit] = for { (preA, preB, src2, src3) <- getMovePreChangeSources(srcRoot/src, src2Filters = Set(Scala, Check, SkipCheck)) (out1, out2) <- outDirs.fold(tempDir(pkgName) *> tempDir(pkgName))(p => dir(p._1) *> dir(p._2)) _ <- scalacPos(out1, sourceRoot=srcRoot/src/"pre-A", additionalSettings, preA:_*) @@ -55,7 +55,7 @@ object TastyTest { _ <- scalacNeg(out2, additionalSettings, src2:_*) } yield () - def negSuiteIsolated(src: String, srcRoot: String, pkgName: String, outDirs: Option[(String, String)], additionalSettings: Seq[String], additionalDottySettings: Seq[String]): Try[Unit] = for { + def negSuiteIsolated(src: String, srcRoot: String, pkgName: String, outDirs: Option[(String, String)], additionalSettings: Seq[String], additionalDottySettings: Seq[String])(implicit cl: Dotc.ClassLoader): Try[Unit] = for { (src2, src3A, src3B) <- getNegIsolatedSources(srcRoot/src, src2Filters = Set(Scala, Check, SkipCheck)) (out1, out2) <- outDirs.fold(tempDir(pkgName) *> tempDir(pkgName))(p => dir(p._1) *> dir(p._2)) _ <- dotcPos(out1, sourceRoot=srcRoot/src/"src-3-A", additionalDottySettings, src3A:_*) @@ -154,11 +154,12 @@ object TastyTest { } } - def dotcPos(out: String, sourceRoot: String, additionalSettings: Seq[String], sources: String*): Try[Unit] = dotcPos(out, out, sourceRoot, additionalSettings, sources:_*) + def dotcPos(out: String, sourceRoot: String, additionalSettings: Seq[String], sources: String*)(implicit cl: Dotc.ClassLoader): Try[Unit] = dotcPos(out, out, sourceRoot, additionalSettings, sources:_*) - def dotcPos(out: String, classpath: String, sourceRoot: String, additionalSettings: Seq[String], sources: String*): Try[Unit] = { + def dotcPos(out: String, classpath: String, sourceRoot: String, additionalSettings: Seq[String], sources: String*)(implicit cl: Dotc.ClassLoader): Try[Unit] = { log(s"compiling sources in ${yellow(sourceRoot)} with dotc.") - successWhen(Dotc.dotc(out, classpath, additionalSettings, sources:_*))("dotc failed to compile sources.") + val process = Dotc.dotc(out, classpath, additionalSettings, sources:_*) + successWhen(process)("dotc failed to compile sources.") } private def getSourceAsName(path: String): String = @@ -273,7 +274,7 @@ object TastyTest { } case Failure(err) => errors += test - printerrln(s"ERROR: $test failed: ${err.getClass.getSimpleName} ${err.getMessage}") + printerrln(s"ERROR: $test failed: ${err.getClass.getSimpleName} ${err.getMessage} in ${err.getStackTrace().mkString("\n ", "\n ", "")}") } } } diff --git a/src/tastytest/scala/tools/tastytest/package.scala b/src/tastytest/scala/tools/tastytest/package.scala index 95167f2e030e..1d5d745cd066 100644 --- a/src/tastytest/scala/tools/tastytest/package.scala +++ b/src/tastytest/scala/tools/tastytest/package.scala @@ -1,11 +1,16 @@ package scala.tools +import dotty.tools.vulpix.ParallelTesting + package object tastytest { import scala.util.Try import Files.{pathSep, classpathSep} + def unlockExperimentalFeatures[T](op: => T): T = + new ParallelTesting().unlockExperimentalFeatures(op) + def printerrln(str: String): Unit = System.err.println(red(str)) def printwarnln(str: String): Unit = System.err.println(yellow(str)) def printsuccessln(str: String): Unit = System.err.println(green(str)) diff --git a/test/tasty/neg-move-macros/src-2/TestMacroCompat.check b/test/tasty/neg-move-macros/src-2/TestMacroCompat.check index f69ad0abe7b5..07deaf926e69 100644 --- a/test/tasty/neg-move-macros/src-2/TestMacroCompat.check +++ b/test/tasty/neg-move-macros/src-2/TestMacroCompat.check @@ -1,4 +1,4 @@ -TestMacroCompat_fail.scala:7: error: can't find term required by object tastytest.MacroCompat: tastytest.`package`.Macros.posImpl; perhaps it is missing from the classpath. +TestMacroCompat_fail.scala:7: error: can't find term required by object tastytest.MacroCompat: tastytest.package.Macros.posImpl; perhaps it is missing from the classpath. val result = MacroCompat.testCase("foo")(pos) ^ 1 error diff --git a/test/tasty/neg/src-2/TestCompiletimeQuoteType.check b/test/tasty/neg/src-2/TestCompiletimeQuoteType.check index c041a36c4d04..5c2f3c01b8df 100644 --- a/test/tasty/neg/src-2/TestCompiletimeQuoteType.check +++ b/test/tasty/neg/src-2/TestCompiletimeQuoteType.check @@ -1,4 +1,4 @@ -TestCompiletimeQuoteType_fail.scala:4: error: Unsupported Scala 3 context function type in result: scala.quoted.Quotes ?=> scala.quoted.Type[T]; found in method of in object scala.quoted.Type. +TestCompiletimeQuoteType_fail.scala:4: error: could not find implicit value for evidence parameter of type scala.quoted.Type[Int] def test = CompiletimeQuoteType.f[Int] ^ 1 error diff --git a/test/tasty/neg/src-3/ErasedTypes.scala b/test/tasty/neg/src-3/ErasedTypes.scala index bafb95891012..a535369ebbdb 100644 --- a/test/tasty/neg/src-3/ErasedTypes.scala +++ b/test/tasty/neg/src-3/ErasedTypes.scala @@ -1,5 +1,7 @@ package tastytest +import language.experimental.erasedDefinitions + object ErasedTypes { trait Foo { diff --git a/test/tasty/run/pre/tastytest/package.scala b/test/tasty/run/pre/tastytest/package.scala index ccfd109a5f3a..fca544cff4fb 100644 --- a/test/tasty/run/pre/tastytest/package.scala +++ b/test/tasty/run/pre/tastytest/package.scala @@ -4,6 +4,29 @@ import scala.reflect.macros.blackbox.Context package object tastytest { + def anyObj[T]: T = null.asInstanceOf[T] + + trait Aspect { + def applyTo(op: => Unit): Unit + } + + implicit class AspectOps(op: => Unit) { + def @@(aspect: Aspect): Unit = aspect.applyTo(op) + } + + object ExpectCastOrNull extends Aspect { + def applyTo(op: => Unit): Unit = { + try { + op + throw new AssertionError("expected a failure") + } + catch { + case npe: NullPointerException => // swallow + case cce: ClassCastException => // swallow + } + } + } + implicit final class SafeEq[T](private val t: T) extends AnyVal { final def ===[U](u: U)(implicit ev: T =:= U): Boolean = t == u } diff --git a/test/tasty/run/pre/tastytest/reflectshims/Context.scala b/test/tasty/run/pre/tastytest/reflectshims/Context.scala new file mode 100644 index 000000000000..55c883114a9a --- /dev/null +++ b/test/tasty/run/pre/tastytest/reflectshims/Context.scala @@ -0,0 +1,9 @@ +package tastytest.reflectshims + +trait Context { + + type TreeShim = universe.TreeShim + + val universe: Universe + +} diff --git a/test/tasty/run/pre/tastytest/reflectshims/Universe.scala b/test/tasty/run/pre/tastytest/reflectshims/Universe.scala new file mode 100644 index 000000000000..722a4b5a70e6 --- /dev/null +++ b/test/tasty/run/pre/tastytest/reflectshims/Universe.scala @@ -0,0 +1,8 @@ +package tastytest.reflectshims + +abstract class Universe { + type TreeShim >: Null <: AnyRef with TreeShimApi + trait TreeShimApi extends Product { this: TreeShim => } + + val EmptyTree: TreeShim +} diff --git a/test/tasty/run/pre/tastytest/reflectshims/impl/Context.scala b/test/tasty/run/pre/tastytest/reflectshims/impl/Context.scala new file mode 100644 index 000000000000..1ed77e3e3be0 --- /dev/null +++ b/test/tasty/run/pre/tastytest/reflectshims/impl/Context.scala @@ -0,0 +1,17 @@ +package tastytest.reflectshims.impl + +import tastytest.reflectshims + +object Context extends reflectshims.Context { + + object universe extends reflectshims.Universe { + + abstract class TreeShimImpl extends TreeShimApi with Product + + type TreeShim = TreeShimImpl + + case object EmptyTree extends TreeShimImpl + + } + +} diff --git a/test/tasty/run/pre/tastytest/scala2Erasure/api.scala b/test/tasty/run/pre/tastytest/scala2Erasure/api.scala new file mode 100644 index 000000000000..1797273250a4 --- /dev/null +++ b/test/tasty/run/pre/tastytest/scala2Erasure/api.scala @@ -0,0 +1,250 @@ +package tastytest + +// Keep synchronized with src-3/tastytest/dottyErasureApi/api.scala +package scala2Erasure + +class foo extends scala.annotation.StaticAnnotation + +trait A +trait B +trait SubB extends B +trait C +trait Cov[+T] +trait Univ extends Any + +class D + +class VC(val self: A) extends AnyVal +class VC2(val self: A) extends AnyVal + +class Outer { + class E + trait F extends E +} + +object OpaqueHolder { + type Q[T] = Cov[T] + type Y[T] = Cov[T] +} +import OpaqueHolder._ + +// The parameter type of `a_XX` should erase to A, `b_XX` to `B`, etc. +// This is enforced by dottyApp/Main.scala +class Z { self => + def a_01(a: A with B): Unit = {} + def b_02X(b: B with A): Unit = {} + def a_02(a: A with B with A): Unit = {} + def a_03(a: A with (B with A)): Unit = {} + def b_04(b: A with (B with A) @foo): Unit = {} + def b_04X(b: A with (B with C) @foo): Unit = {} + def b_05(b: A with (B with A) @foo with (C with B with A) @foo): Unit = {} + + type T1 <: A with B + def a_06(a: T1): Unit = {} + + type S <: B with T1 + def a_07(a: S): Unit = {} + + type T2 <: B with A + type U <: T2 with S + def b_08(b: U): Unit = {} + + val singB: B = new B {} + def a_09(a: A with singB.type): Unit = {} + def b_10(b: singB.type with A): Unit = {} + + type V >: SubB <: B + def b_11(b: V): Unit = {} + def b_12(b: V with SubB): Unit = {} + + def d_13(d: D with A): Unit = {} + def d_14(d: A with D): Unit = {} + + val singD: D = new D {} + def d_13x(d: singD.type with A): Unit = {} + def d_14x(d: A with singD.type): Unit = {} + + type DEq = D + def d_15(d: A with DEq): Unit = {} + def d_16(d: A with (DEq @foo)): Unit = {} + def d_17(d: DEq with A): Unit = {} + def d_18(d: (DEq @foo) with A): Unit = {} + + val singDEq: DEq @foo = new D {} + def d_15b(d: A with singDEq.type): Unit = {} + def d_16b(d: A with (singDEq.type @foo)): Unit = {} + + type DSub <: D + def a_19(a: A with DSub): Unit = {} + def d_19x(d: DSub with A): Unit = {} + def z_20(z: DSub with Z): Unit = {} + + type W1 <: A with Cov[Any] + type X1 <: Cov[Int] with W1 + def a_21(a: X1): Unit = {} + + type W2 <: A with Cov[Any] + type X2 <: Cov[Int] with W2 + def a_22(a: X2): Unit = {} + + def z_23(z: A with this.type): Unit = {} + def z_24(z: this.type with A): Unit = {} + + def b_25(b: A with (B { type T })): Unit = {} + def a_26(a: (A { type T }) with ((B with A) { type T })): Unit = {} + + def a_27(a: VC with B): Unit = {} + def a_28(a: B with VC): Unit = {} + + val o1: Outer = new Outer + val o2: Outer = new Outer + def f_29(f: o1.E with o1.F): Unit = {} + def f_30(f: o1.F with o1.E): Unit = {} + def f_31(f: o1.E with o2.F): Unit = {} + def f_32(f: o2.F with o1.E): Unit = {} + def f_33(f: Outer#E with Outer#F): Unit = {} + def f_34(f: Outer#F with Outer#E): Unit = {} + + val structural1: { type DSub <: D } = new { type DSub <: D } + def a_35(a: A with structural1.DSub): Unit = {} + def d_36(a: structural1.DSub with A): Unit = {} + def z_37(z: Z with structural1.DSub): Unit = {} + def z_38(z: structural1.DSub with Z): Unit = {} + + val structural2: { type SubCB <: C with B } = new { type SubCB <: C with B } + def c_39(c: structural2.SubCB with B): Unit = {} + def c_40(c: B with structural2.SubCB): Unit = {} + + val structural3a: { type SubB <: B; type SubCB <: C with SubB } = new { type SubB <: B; type SubCB <: C with SubB } + val structural3b: { type SubB <: B; type SubCB <: C with SubB } = new { type SubB <: B; type SubCB <: C with SubB } + def c_41(c: structural3a.SubB with structural3a.SubCB): Unit = {} + def c_42(c: structural3a.SubCB with structural3a.SubB): Unit = {} + def b_43(b: structural3a.SubB with structural3b.SubCB): Unit = {} + def c_44(c: structural3b.SubCB with structural3a.SubB): Unit = {} + + type SubStructural <: C with structural3a.SubB + def c_45(x: structural3a.SubB with SubStructural): Unit = {} + def b_46(x: structural3b.SubB with SubStructural): Unit = {} + + type Rec1 <: A with B + type Rec2 <: C with Rec1 + def c_47(a: A with B with Rec2): Unit = {} + def a_48(a: (A with B) @foo with Rec2): Unit = {} + + type F1 = A with B + type F2 = A with B + type Rec3 <: F1 + type Rec4 <: C with Rec3 + def c_49(a: F1 @foo with Rec4): Unit = {} + def c_50(a: F1 with Rec4): Unit = {} + def a_51(a: F2 @foo with Rec4): Unit = {} + def c_52(a: F2 with Rec4): Unit = {} + + type AA = A + type F3 = AA with B + type Rec5 <: F3 + type Rec6 <: C with Rec5 + def a_53(a: F3 @foo with Rec6): Unit = {} + def c_54(a: F3 with Rec6): Unit = {} + + val structural4a: { type M[X] <: A } = new { type M[X] <: A } + val structural4b: { type N <: B with structural4a.M[Int] } = new { type N <: B with structural4a.M[Int] } + def b_55(x: structural4a.M[Any] with structural4b.N): Unit = {} + + type Bla = A { type M[X] <: A } + def b_56(x: Bla#M[Any] with ({ type N <: B with Bla#M[Int] })#N): Unit = {} + type AEq = A + type Bla2 = AEq { type M[X] <: A } + def a_57(x: Bla2#M[Any] with ({ type N <: B with Bla2#M[Int] })#N): Unit = {} + + def int_58(x: Int with Singleton): Unit = {} + def int_59(x: Singleton with Int): Unit = {} + def int_60(x: Int with Any): Unit = {} + def int_61(x: Any with Int): Unit = {} + def int_62(x: Int with AnyVal): Unit = {} + def int_63(x: AnyVal with Int): Unit = {} + + def intARRAY_64(x: Array[Int with Singleton]): Unit = {} + def object_65(x: Array[_ <: Int]): Unit = {} + def object_66(x: Array[_ <: Int with Singleton]): Unit = {} + def object_67(x: Array[_ <: Singleton with Int]): Unit = {} + def object_68(x: Array[_ <: Int with Any]): Unit = {} + def object_69(x: Array[_ <: Any with Int]): Unit = {} + def object_70(x: Array[_ <: Int with AnyVal]): Unit = {} + def object_71(x: Array[_ <: AnyVal with Int]): Unit = {} + + def stringARRAY_72(x: Array[String with Singleton]): Unit = {} + def stringARRAY_73(x: Array[_ <: String]): Unit = {} + def stringARRAY_74(x: Array[_ <: String with Singleton]): Unit = {} + def stringARRAY_75(x: Array[_ <: Singleton with String]): Unit = {} + def stringARRAY_76(x: Array[_ <: String with Any]): Unit = {} + def stringARRAY_77(x: Array[_ <: Any with String]): Unit = {} + def stringARRAY_78(x: Array[_ <: String with AnyRef]): Unit = {} + def stringARRAY_79(x: Array[_ <: AnyRef with String]): Unit = {} + + def object_80(x: Array[_ <: Singleton]): Unit = {} + def object_81(x: Array[_ <: AnyVal]): Unit = {} + def objectARRAY_82(x: Array[_ <: AnyRef]): Unit = {} + def object_83(x: Array[_ <: Any]): Unit = {} + + def object_84(x: Array[_ <: Serializable]): Unit = {} + def object_85(x: Array[_ <: Univ]): Unit = {} + def aARRAY_86(x: Array[_ <: A]): Unit = {} + def aARRAY_87(x: Array[_ <: A with B]): Unit = {} + + def objectARRAY_88(x: Array[Any]): Unit = {} + def objectARRAY_89(x: Array[AnyRef]): Unit = {} + def objectARRAY_90(x: Array[AnyVal]): Unit = {} + + def stringARRAY_91(x: Array[_ <: ({ type Foo <: String with Singleton })#Foo]): Unit = {} + def stringARRAY_92(x: Array[({ type Foo <: String with Singleton })#Foo]): Unit = {} + def stringARRAY_93(x: Array[({ type Id[T] = T })#Id[String with Singleton]]): Unit = {} + + def covARRAY_94(x: Array[Q[String]]): Unit = {} // cant define opaque type in scala 2, so it is ordinary type + + def aARRAY_95(x: Array[(A with B { type L <: String }) with C]): Unit = {} + def aARRAY_96(x: Array[A { type L <: String }]): Unit = {} + def zARRAY_97(x: Array[self.type]): Unit = {} + def aARRAY_98(x: Array[(A { type L <: String }) with B]): Unit = {} + def stringARRAY_99[Arg <: String](x: Array[Arg]): Unit = {} + def aARRAY_100(x: Array[Bla2#M[Any] with ({ type N <: B with Bla2#M[Int] })#N]): Unit = {} + def zARRAY_101(x: Array[structural1.DSub with Z]): Unit = {} + def aARRAY_102(x: Array[F3 @foo with Rec6]): Unit = {} + def aARRAY_103(x: Array[A @foo]): Unit = {} + def dARRAY_104(x: Array[singD.type]): Unit = {} + def intARRAY_105(x: Array[3]): Unit = {} + def vcARRAY_106(x: Array[VC]): Unit = {} + def listARRAY_107(x: Array[List[_]]): Unit = {} + def intARRAY_108(x: Array[Int with String]): Unit = {} + def stringARRAY_109(x: Array[String with Int]): Unit = {} + + def a_110(x: VC with VC2): Unit = {} + def a_111(x: VC2 with VC): Unit = {} + def aARRAY_112(x: Array[VC2 with VC]): Unit = {} // this should not erase to Array[A]??? + def aARRAY_113(x: Array[VC with VC2]): Unit = {} // this should not erase to Array[A]??? + def a_114(x: VC with D): Unit = {} + def d_115(x: D with VC): Unit = {} + def d_116(x: D with B with VC): Unit = {} + def d_117(x: B with D with VC): Unit = {} + def a_118(x: VC with B with D): Unit = {} + def a_119(x: VC with Int): Unit = {} + def int_120(x: Int with VC): Unit = {} + + def object_121[T](x: Array[T]): Unit = {} + def object_122(x: Array[_ <: AnyVal with Singleton]): Unit = {} + def objectARRAY_123(x: Array[AnyVal with Singleton]): Unit = {} + def objectARRAY_124[T, U](x: Array[T with U]): Unit = {} + def objectARRAY_125(x: Array[({ type W <: String }) with ({ type X <: Int })]): Unit = {} + def covARRAY_126(x: Array[Q[B] with Y[SubB]]): Unit = {} + def covARRAY_127(x: Array[Q[B] with Y[SubB] { type X <: Cov[String] }]): Unit = {} + + type SubAny <: Any + type SubAnyVal <: AnyVal + + def objectARRAY_128(x: Array[SubAny with SubAnyVal]): Unit = {} + def intARRAYARRAY_129(x: Array[Array[Int]]): Unit = {} + def intARRAYARRAY_130(x: Array[_ <: Array[Int]]): Unit = {} + def objectARRAY_130(x: Array[_ <: Array[_ <: AnyVal]]): Unit = {} + def stringARRAY_131(x: Array[String] with Array[Int]): Unit = {} + +} diff --git a/test/tasty/run/src-2/tastytest/TestErasure.scala b/test/tasty/run/src-2/tastytest/TestErasure.scala new file mode 100644 index 000000000000..9c25c85416f1 --- /dev/null +++ b/test/tasty/run/src-2/tastytest/TestErasure.scala @@ -0,0 +1,172 @@ +package tastytest + +import tastytest.{dottyErasure => dotc, scala2Erasure => nsc} + +object TestErasure extends Suite("TestErasure") { + + val z = new dotc.Z + + test("erasure of scala 3 from scala 2") { + z.a_01(anyObj) + z.a_02(anyObj) + z.a_02X(anyObj) + z.a_03(anyObj) + z.a_04(anyObj) + z.a_04X(anyObj) + z.a_05(anyObj) + z.a_06(anyObj) + z.a_07(anyObj) + z.a_08(anyObj) + z.a_09(anyObj) + z.a_10(anyObj) + z.b_11(anyObj) + z.subb_12(anyObj) + z.d_13(anyObj) + z.d_13x(anyObj) + z.d_14(anyObj) + z.d_14x(anyObj) + z.d_15(anyObj) + z.d_15b(anyObj) + z.d_16(anyObj) + z.d_16b(anyObj) + z.d_17(anyObj) + z.d_18(anyObj) + z.d_19(anyObj) + z.d_19x(anyObj) + z.d_20(anyObj) + z.a_21(anyObj) + z.a_22(anyObj) + z.z_23(anyObj) + z.z_24(anyObj) + z.a_25(anyObj) + z.a_26(anyObj) + z.a_27(anyObj) @@ ExpectCastOrNull + z.a_28(anyObj) @@ ExpectCastOrNull + z.e_29(anyObj) + z.e_30(anyObj) + z.e_31(anyObj) + z.e_32(anyObj) + z.e_33(anyObj) + z.e_34(anyObj) + z.d_35(anyObj) + z.d_36(anyObj) + z.d_37(anyObj) + z.d_38(anyObj) + z.b_39(anyObj) + z.b_40(anyObj) + z.b_41(anyObj) + z.b_42(anyObj) + z.b_43(anyObj) + z.b_44(anyObj) + z.b_45(anyObj) + z.b_46(anyObj) + z.a_47(anyObj) + z.a_48(anyObj) + z.a_49(anyObj) + z.a_50(anyObj) + z.a_51(anyObj) + z.a_52(anyObj) + z.a_53(anyObj) + z.a_54(anyObj) + z.a_55(anyObj) + z.a_56(anyObj) + z.a_57(anyObj) + z.int_58(1) + z.int_59(1) + z.int_60(1) + z.int_61(1) + z.int_62(1) + z.int_63(1) + z.intARRAY_64(anyObj) + z.intARRAY_65(anyObj) + z.intARRAY_66(anyObj) + z.intARRAY_67(anyObj) + z.intARRAY_68(anyObj) + z.intARRAY_69(anyObj) + z.intARRAY_70(anyObj) + z.intARRAY_71(anyObj) + // z.intARRAY_71a(anyObj) // illegal union type + // z.intARRAY_71b(anyObj) // illegal union type + z.stringARRAY_72(anyObj) + z.stringARRAY_73(anyObj) + z.stringARRAY_74(anyObj) + z.stringARRAY_75(anyObj) + z.stringARRAY_76(anyObj) + z.stringARRAY_77(anyObj) + z.stringARRAY_78(anyObj) + z.stringARRAY_79(anyObj) + // z.stringARRAY_79a(anyObj) // illegal union type + // z.stringARRAY_79b(anyObj) // illegal union type + z.object_80(anyObj) + z.object_81(anyObj) + z.objectARRAY_82(anyObj) + z.object_83(anyObj) + z.object_83a(anyObj) + // z.object_83b(anyObj) // illegal union type + // z.object_83c(anyObj) // illegal union type + // z.object_83d(anyObj) // illegal union type + // z.object_83e(anyObj) // illegal union type + z.serializableARRAY_84(anyObj) + z.univARRAY_85(anyObj) + z.aARRAY_86(anyObj) + z.aARRAY_87(anyObj) + z.objectARRAY_88(anyObj) + z.objectARRAY_89(anyObj) + z.objectARRAY_90(anyObj) + z.stringARRAY_91(anyObj) + z.stringARRAY_92(anyObj) + z.stringARRAY_93(anyObj) + z.covARRAY_94(anyObj) + z.aARRAY_95(anyObj) + z.aARRAY_96(anyObj) + z.zARRAY_97(anyObj) + z.aARRAY_98(anyObj) + z.stringARRAY_99(anyObj) + z.aARRAY_100(anyObj) + z.dARRAY_101(anyObj) + z.aARRAY_102(anyObj) + z.aARRAY_103(anyObj) + z.dARRAY_104(anyObj) + z.intARRAY_105(anyObj) + z.vcARRAY_106(anyObj) + z.listARRAY_107(anyObj) + z.intARRAY_108(anyObj) + z.intARRAY_109(anyObj) + z.a_110(anyObj) @@ ExpectCastOrNull + z.a_111(anyObj) @@ ExpectCastOrNull + z.vcARRAY_112(anyObj) + z.vcARRAY_113(anyObj) + z.a_114(anyObj) @@ ExpectCastOrNull + z.a_115(anyObj) @@ ExpectCastOrNull + z.a_116(anyObj) @@ ExpectCastOrNull + z.a_117(anyObj) @@ ExpectCastOrNull + z.a_118(anyObj) @@ ExpectCastOrNull + z.a_119(anyObj) @@ ExpectCastOrNull + z.a_120(anyObj) @@ ExpectCastOrNull + z.object_121(anyObj) + z.object_122(anyObj) + z.objectARRAY_123(anyObj) + z.object_124(anyObj) + z.objectARRAY_125(anyObj) + z.covARRAY_126(anyObj) + z.covARRAY_127(anyObj) + z.object_128(anyObj) + z.intARRAYARRAY_129(anyObj) + z.intARRAYARRAY_130(anyObj) + z.objectARRAY_130(anyObj) + z.intARRAY_131(anyObj) + } + + test("erasure matches name") { + val methods = classOf[nsc.Z].getDeclaredMethods.toList ++ classOf[dotc.Z].getDeclaredMethods.toList + methods.foreach { m => + m.getName match { + case s"${prefix}_${suffix}" => + val paramClass = m.getParameterTypes()(0).getSimpleName.toLowerCase.replaceAll("""\[\]""", "ARRAY") + assert(prefix == paramClass, s"Method `$m` erased to `$paramClass` which does not match its prefix `$prefix`") + case _ => + } + } + } + +} diff --git a/test/tasty/run/src-2/tastytest/TestIntersectionErasure.scala b/test/tasty/run/src-2/tastytest/TestIntersectionErasure.scala new file mode 100644 index 000000000000..96023bea76c4 --- /dev/null +++ b/test/tasty/run/src-2/tastytest/TestIntersectionErasure.scala @@ -0,0 +1,12 @@ +package tastytest + +import IntersectionErasure.{universe => u} + +object TestIntersectionErasure extends Suite("TestIntersectionErasure") { + + test { + val sam: u.IntersectionSAM = x => x + assert(sam(u.EmptyTree) === (u.EmptyTree: u.TreeShimSAM)) + } + +} diff --git a/test/tasty/run/src-2/tastytest/TestReflection.scala b/test/tasty/run/src-2/tastytest/TestReflection.scala new file mode 100644 index 000000000000..4c4582d5d6e6 --- /dev/null +++ b/test/tasty/run/src-2/tastytest/TestReflection.scala @@ -0,0 +1,18 @@ +package tastytest + +import tastytest.reflectshims.impl.Context +import Context.universe.EmptyTree +import Context.TreeShim + +object TestReflection extends Suite("TestReflection") { + + test(assert(Reflection.reflectionInvokerIdentity(Context)(EmptyTree) === (EmptyTree: TreeShim))) + test(assert(new Reflection.Invoker(Context)(EmptyTree).tree === (EmptyTree: TreeShim))) + + // TODO [tasty]: enable due to missing type ctx.TreeShim + // test { + // val invoker = new Reflection.InvokerSAM(Context) + // val id: invoker.TreeFn = x => x + // assert(id(EmptyTree) === (EmptyTree: TreeShim)) + // } +} diff --git a/test/tasty/run/src-2/tastytest/TestSAMErasure.scala b/test/tasty/run/src-2/tastytest/TestSAMErasure.scala new file mode 100644 index 000000000000..4aa5e88b1535 --- /dev/null +++ b/test/tasty/run/src-2/tastytest/TestSAMErasure.scala @@ -0,0 +1,23 @@ +package tastytest + +import SAMErasure._ + +object TestSAMErasure extends Suite("TestSAMErasure") { + + def f = ((x: TreeShimSAM) => x): FunTreeShimSAM + + def g = ((xs: Array[TreeShimSAM]) => xs): FunTreeShimSAM2 + + case object EmptyTree extends TreeShimSAMApi + val tree = EmptyTree.asInstanceOf[TreeShimSAM] + + test { + assert(f(tree) == tree) + } + + test { + val trees = Array(tree) + assert(g(trees) == trees) + } + +} diff --git a/test/tasty/run/src-3/tastytest/IntersectionErasure.scala b/test/tasty/run/src-3/tastytest/IntersectionErasure.scala new file mode 100644 index 000000000000..0825e307517e --- /dev/null +++ b/test/tasty/run/src-3/tastytest/IntersectionErasure.scala @@ -0,0 +1,28 @@ +package tastytest + +object IntersectionErasure { + + trait Universe { + + type TreeShimSAM >: Null <: AnyRef with TreeShimSAMApi + trait TreeShimSAMApi extends Product { this: TreeShimSAM => } + + val EmptyTree: TreeShimSAM + + @FunctionalInterface + abstract class IntersectionSAM { + def apply(tree: TreeShimSAM): TreeShimSAM + } + + } + + object universe extends Universe { + + abstract class TreeShimSAMImpl extends TreeShimSAMApi with Product + type TreeShimSAM = TreeShimSAMImpl + case object EmptyTree extends TreeShimSAMImpl + + } + + +} diff --git a/test/tasty/run/src-3/tastytest/Reflection.scala b/test/tasty/run/src-3/tastytest/Reflection.scala new file mode 100644 index 000000000000..a40b842d9721 --- /dev/null +++ b/test/tasty/run/src-3/tastytest/Reflection.scala @@ -0,0 +1,23 @@ +package tastytest + +import tastytest.reflectshims + +object Reflection { + + def reflectionInvokerIdentity(ctx: reflectshims.Context)(tree: ctx.TreeShim): ctx.TreeShim = tree + + class Invoker[C <: reflectshims.Context with Singleton](val ctx: C)(root: ctx.TreeShim) { + def tree: ctx.TreeShim = root + } + + // TODO [tasty]: enable due to missing type ctx.TreeShim + // class InvokerSAM[C <: reflectshims.Context with Singleton](val ctx: C) { + + // @FunctionalInterface + // trait TreeFn { + // def apply(tree: ctx.TreeShim): ctx.TreeShim + // } + + // } + +} diff --git a/test/tasty/run/src-3/tastytest/SAMErasure.scala b/test/tasty/run/src-3/tastytest/SAMErasure.scala new file mode 100644 index 000000000000..00a471cd95e0 --- /dev/null +++ b/test/tasty/run/src-3/tastytest/SAMErasure.scala @@ -0,0 +1,18 @@ +package tastytest + +object SAMErasure { + + trait TreeShimSAMApi extends Product + + type TreeShimSAM >: Null <: AnyRef with TreeShimSAMApi + + implicit val TreeShimSAMTag: reflect.ClassTag[TreeShimSAM] = + reflect.classTag[TreeShimSAMApi].asInstanceOf[reflect.ClassTag[TreeShimSAM]] + + @FunctionalInterface + trait FunTreeShimSAM { def apply(a: TreeShimSAM): TreeShimSAM } + + @FunctionalInterface + trait FunTreeShimSAM2 { def apply(a: Array[TreeShimSAM]): Array[TreeShimSAM] } + +} diff --git a/test/tasty/run/src-3/tastytest/dottyErasure/api.scala b/test/tasty/run/src-3/tastytest/dottyErasure/api.scala new file mode 100644 index 000000000000..3073189c6f44 --- /dev/null +++ b/test/tasty/run/src-3/tastytest/dottyErasure/api.scala @@ -0,0 +1,259 @@ +package tastytest + +// Keep synchronized with pre/tastytest/scala2ErasureApi/api.scala +package dottyErasure + +class foo extends scala.annotation.StaticAnnotation + +trait A +trait B +trait SubB extends B +trait C +trait Cov[+T] +trait Univ extends Any + +class D + +class VC(val self: A) extends AnyVal +class VC2(val self: A) extends AnyVal + +class Outer { + class E + trait F extends E +} + +object OpaqueHolder { + opaque type Q[T] <: Any = Cov[T] + opaque type Y[T] <: Any = Cov[T] +} +import OpaqueHolder._ + +// The parameter type of `a_XX` should erase to A, `b_XX` to `B`, etc. +// This is enforced by dottyApp/Main.scala +class Z { self => + def a_01(a: A with B): Unit = {} + def a_02X(b: B with A): Unit = {} + def a_02(a: A with B with A): Unit = {} + def a_03(a: A with (B with A)): Unit = {} + def a_04(b: A with (B with A) @foo): Unit = {} + def a_04X(b: A with (B with C) @foo): Unit = {} + def a_05(b: A with (B with A) @foo with (C with B with A) @foo): Unit = {} + + type T1 <: A with B + def a_06(a: T1): Unit = {} + + type S <: B with T1 + def a_07(a: S): Unit = {} + + type T2 <: B with A + type U <: T2 with S + def a_08(b: U): Unit = {} + + val singB: B = new B {} + def a_09(a: A with singB.type): Unit = {} + def a_10(b: singB.type with A): Unit = {} + + type V >: SubB <: B + def b_11(b: V): Unit = {} + def subb_12(b: V with SubB): Unit = {} + + def d_13(d: D with A): Unit = {} + def d_14(d: A with D): Unit = {} + + val singD: D = new D {} + def d_13x(d: singD.type with A): Unit = {} + def d_14x(d: A with singD.type): Unit = {} + + type DEq = D + def d_15(d: A with DEq): Unit = {} + def d_16(d: A with (DEq @foo)): Unit = {} + def d_17(d: DEq with A): Unit = {} + def d_18(d: (DEq @foo) with A): Unit = {} + + val singDEq: DEq @foo = new D {} + def d_15b(d: A with singDEq.type): Unit = {} + def d_16b(d: A with (singDEq.type @foo)): Unit = {} + + type DSub <: D + def d_19(a: A with DSub): Unit = {} + def d_19x(d: DSub with A): Unit = {} + def d_20(z: DSub with Z): Unit = {} + + type W1 <: A with Cov[Any] + type X1 <: Cov[Int] with W1 + def a_21(a: X1): Unit = {} + + type W2 <: A with Cov[Any] + type X2 <: Cov[Int] with W2 + def a_22(a: X2): Unit = {} + + def z_23(z: A with this.type): Unit = {} + def z_24(z: this.type with A): Unit = {} + + def a_25(b: A with (B { type T })): Unit = {} + def a_26(a: (A { type T }) with ((B with A) { type T })): Unit = {} + + def a_27(a: VC with B): Unit = {} + def a_28(a: B with VC): Unit = {} + + val o1: Outer = new Outer + val o2: Outer = new Outer + def e_29(f: o1.E with o1.F): Unit = {} + def e_30(f: o1.F with o1.E): Unit = {} + def e_31(f: o1.E with o2.F): Unit = {} + def e_32(f: o2.F with o1.E): Unit = {} + def e_33(f: Outer#E with Outer#F): Unit = {} + def e_34(f: Outer#F with Outer#E): Unit = {} + + val structural1: { type DSub <: D } = new { type DSub <: D } + def d_35(a: A with structural1.DSub): Unit = {} + def d_36(a: structural1.DSub with A): Unit = {} + def d_37(z: Z with structural1.DSub): Unit = {} + def d_38(z: structural1.DSub with Z): Unit = {} + + val structural2: { type SubCB <: C with B } = new { type SubCB <: C with B } + def b_39(c: structural2.SubCB with B): Unit = {} + def b_40(c: B with structural2.SubCB): Unit = {} + + val structural3a: { type SubB <: B; type SubCB <: C with SubB } = new { type SubB <: B; type SubCB <: C with SubB } + val structural3b: { type SubB <: B; type SubCB <: C with SubB } = new { type SubB <: B; type SubCB <: C with SubB } + def b_41(c: structural3a.SubB with structural3a.SubCB): Unit = {} + def b_42(c: structural3a.SubCB with structural3a.SubB): Unit = {} + def b_43(b: structural3a.SubB with structural3b.SubCB): Unit = {} + def b_44(c: structural3b.SubCB with structural3a.SubB): Unit = {} + + type SubStructural <: C with structural3a.SubB + def b_45(x: structural3a.SubB with SubStructural): Unit = {} + def b_46(x: structural3b.SubB with SubStructural): Unit = {} + + type Rec1 <: A with B + type Rec2 <: C with Rec1 + def a_47(a: A with B with Rec2): Unit = {} + def a_48(a: (A with B) @foo with Rec2): Unit = {} + + type F1 = A with B + type F2 = A with B + type Rec3 <: F1 + type Rec4 <: C with Rec3 + def a_49(a: F1 @foo with Rec4): Unit = {} + def a_50(a: F1 with Rec4): Unit = {} + def a_51(a: F2 @foo with Rec4): Unit = {} + def a_52(a: F2 with Rec4): Unit = {} + + type AA = A + type F3 = AA with B + type Rec5 <: F3 + type Rec6 <: C with Rec5 + def a_53(a: F3 @foo with Rec6): Unit = {} + def a_54(a: F3 with Rec6): Unit = {} + + val structural4a: { type M[X] <: A } = new { type M[X] <: A } + val structural4b: { type N <: B with structural4a.M[Int] } = new { type N <: B with structural4a.M[Int] } + def a_55(x: structural4a.M[Any] with structural4b.N): Unit = {} + + type Bla = A { type M[X] <: A } + def a_56(x: Bla#M[Any] with ({ type N <: B with Bla#M[Int] })#N): Unit = {} + type AEq = A + type Bla2 = AEq { type M[X] <: A } + def a_57(x: Bla2#M[Any] with ({ type N <: B with Bla2#M[Int] })#N): Unit = {} + + def int_58(x: Int with Singleton): Unit = {} + def int_59(x: Singleton with Int): Unit = {} + def int_60(x: Int with Any): Unit = {} + def int_61(x: Any with Int): Unit = {} + def int_62(x: Int with AnyVal): Unit = {} + def int_63(x: AnyVal with Int): Unit = {} + + def intARRAY_64(x: Array[Int with Singleton]): Unit = {} + def intARRAY_65(x: Array[_ <: Int]): Unit = {} + def intARRAY_66(x: Array[_ <: Int with Singleton]): Unit = {} + def intARRAY_67(x: Array[_ <: Singleton with Int]): Unit = {} + def intARRAY_68(x: Array[_ <: Int with Any]): Unit = {} + def intARRAY_69(x: Array[_ <: Any with Int]): Unit = {} + def intARRAY_70(x: Array[_ <: Int with AnyVal]): Unit = {} + def intARRAY_71(x: Array[_ <: AnyVal with Int]): Unit = {} + def intARRAY_71a(x: Array[_ <: Int | Int]): Unit = {} + def intARRAY_71b(x: Array[_ <: 1 | 2]): Unit = {} + + def stringARRAY_72(x: Array[String with Singleton]): Unit = {} + def stringARRAY_73(x: Array[_ <: String]): Unit = {} + def stringARRAY_74(x: Array[_ <: String with Singleton]): Unit = {} + def stringARRAY_75(x: Array[_ <: Singleton with String]): Unit = {} + def stringARRAY_76(x: Array[_ <: String with Any]): Unit = {} + def stringARRAY_77(x: Array[_ <: Any with String]): Unit = {} + def stringARRAY_78(x: Array[_ <: String with AnyRef]): Unit = {} + def stringARRAY_79(x: Array[_ <: AnyRef with String]): Unit = {} + def stringARRAY_79a(x: Array[_ <: String | String]): Unit = {} + def stringARRAY_79b(x: Array[_ <: "a" | "b"]): Unit = {} + + def object_80(x: Array[_ <: Singleton]): Unit = {} + def object_81(x: Array[_ <: AnyVal]): Unit = {} + def objectARRAY_82(x: Array[_ <: AnyRef]): Unit = {} + def object_83(x: Array[_ <: Any]): Unit = {} + def object_83a(x: Array[_ <: Matchable]): Unit = {} + def object_83b(x: Array[_ <: Int | Double]): Unit = {} + def object_83c(x: Array[_ <: String | Int]): Unit = {} + def object_83d(x: Array[_ <: Int | Matchable]): Unit = {} + def object_83e(x: Array[_ <: AnyRef | AnyVal]): Unit = {} + + def serializableARRAY_84(x: Array[_ <: Serializable]): Unit = {} + def univARRAY_85(x: Array[_ <: Univ]): Unit = {} + def aARRAY_86(x: Array[_ <: A]): Unit = {} + def aARRAY_87(x: Array[_ <: A with B]): Unit = {} + + def objectARRAY_88(x: Array[Any]): Unit = {} + def objectARRAY_89(x: Array[AnyRef]): Unit = {} + def objectARRAY_90(x: Array[AnyVal]): Unit = {} + + def stringARRAY_91(x: Array[_ <: ({ type Foo <: String with Singleton })#Foo]): Unit = {} + def stringARRAY_92(x: Array[({ type Foo <: String with Singleton })#Foo]): Unit = {} + def stringARRAY_93(x: Array[({ type Id[T] = T })#Id[String with Singleton]]): Unit = {} + + def covARRAY_94(x: Array[Q[String]]): Unit = {} + + def aARRAY_95(x: Array[(A with B { type L <: String }) with C]): Unit = {} + def aARRAY_96(x: Array[A { type L <: String }]): Unit = {} + def zARRAY_97(x: Array[self.type]): Unit = {} + def aARRAY_98(x: Array[(A { type L <: String }) with B]): Unit = {} + def stringARRAY_99[Arg <: String](x: Array[Arg]): Unit = {} + def aARRAY_100(x: Array[Bla2#M[Any] with ({ type N <: B with Bla2#M[Int] })#N]): Unit = {} + def dARRAY_101(x: Array[structural1.DSub with Z]): Unit = {} + def aARRAY_102(x: Array[F3 @foo with Rec6]): Unit = {} + def aARRAY_103(x: Array[A @foo]): Unit = {} + def dARRAY_104(x: Array[singD.type]): Unit = {} + def intARRAY_105(x: Array[3]): Unit = {} + def vcARRAY_106(x: Array[VC]): Unit = {} + def listARRAY_107(x: Array[List[_]]): Unit = {} + def intARRAY_108(x: Array[Int with String]): Unit = {} + def intARRAY_109(x: Array[String with Int]): Unit = {} + + def a_110(x: VC with VC2): Unit = {} + def a_111(x: VC2 with VC): Unit = {} + def vcARRAY_112(x: Array[VC2 with VC]): Unit = {} + def vcARRAY_113(x: Array[VC with VC2]): Unit = {} + def a_114(x: VC with D): Unit = {} + def a_115(x: D with VC): Unit = {} + def a_116(x: D with B with VC): Unit = {} + def a_117(x: B with D with VC): Unit = {} + def a_118(x: VC with B with D): Unit = {} + def a_119(x: VC with Int): Unit = {} + def a_120(x: Int with VC): Unit = {} + + def object_121[T](x: Array[T]): Unit = {} + def object_122(x: Array[_ <: AnyVal with Singleton]): Unit = {} + def objectARRAY_123(x: Array[AnyVal with Singleton]): Unit = {} + def object_124[T, U](x: Array[T with U]): Unit = {} + def objectARRAY_125(x: Array[({ type W <: String }) with ({ type X <: Int })]): Unit = {} + def covARRAY_126(x: Array[Q[B] with Y[SubB]]): Unit = {} + def covARRAY_127(x: Array[Q[B] with Y[SubB] { type X <: Cov[String] }]): Unit = {} + + type SubAny <: Any + type SubAnyVal <: AnyVal + + def object_128(x: Array[SubAny with SubAnyVal]): Unit = {} + def intARRAYARRAY_129(x: Array[Array[Int]]): Unit = {} + def intARRAYARRAY_130(x: Array[_ <: Array[Int]]): Unit = {} + def objectARRAY_130(x: Array[_ <: Array[_ <: AnyVal]]): Unit = {} + def intARRAY_131(x: Array[String] with Array[Int]): Unit = {} + +} diff --git a/test/tasty/test/scala/tools/tastytest/TastyTestJUnit.scala b/test/tasty/test/scala/tools/tastytest/TastyTestJUnit.scala index a267db9b6cc7..67410d20e02b 100644 --- a/test/tasty/test/scala/tools/tastytest/TastyTestJUnit.scala +++ b/test/tasty/test/scala/tools/tastytest/TastyTestJUnit.scala @@ -1,6 +1,6 @@ package scala.tools.tastytest -import org.junit.{Test => test} +import org.junit.{Test => test, BeforeClass => setup, AfterClass => teardown} import org.junit.Assert._ import scala.util.{Try, Failure, Properties} @@ -63,17 +63,29 @@ class TastyTestJUnit { additionalDottySettings = Nil ).eval - val propSrc = "tastytest.src" - val propPkgName = "tastytest.packageName" + val propSrc = "tastytest.src" + val propPkgName = "tastytest.packageName" def assertPropIsSet(prop: String): String = { Properties.propOrNull(prop).ensuring(_ != null, s"-D$prop is not set") } } -import scala.reflect.runtime.ReflectionUtils - object TastyTestJUnit { + + private[this] var _dottyClassLoader: Dotc.ClassLoader = _ + implicit def dottyClassLoader: Dotc.ClassLoader = _dottyClassLoader + + @setup + def init(): Unit = { + _dottyClassLoader = Dotc.initClassloader().get + } + + @teardown + def finish(): Unit = { + _dottyClassLoader = null + } + final implicit class TryOps(val op: Try[Unit]) extends AnyVal { def eval: Unit = op match { case Failure(err) => fail(err.toString) From 652b4e34faa0b9a0d20d01da4760d995cdac4d3c Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Mon, 26 Apr 2021 17:28:54 +0200 Subject: [PATCH 166/769] test intersection erasure with value class parameters, - enable scala 3 erasure for Type#erasure - test intersection erasure with enums --- .../scala/tools/nsc/tasty/TreeUnpickler.scala | 5 ++- .../tools/nsc/tasty/bridge/ContextOps.scala | 8 +++-- .../tools/nsc/tasty/bridge/FlagOps.scala | 1 + .../tools/nsc/tasty/bridge/SymbolOps.scala | 2 +- .../tools/nsc/tasty/bridge/TypeOps.scala | 1 + .../reflect/internal/StdAttachments.scala | 3 +- .../reflect/internal/transform/Erasure.scala | 20 +++++++---- .../internal/transform/Transforms.scala | 7 ++-- .../reflect/runtime/JavaUniverseForce.scala | 4 ++- .../run/pre/tastytest/scala2Erasure/api.scala | 13 +++++++ .../run/src-2/tastytest/TestErasure.scala | 6 ++++ .../tastytest/TestIntersectionErasure.scala | 36 ++++++++++++++++--- .../src-3/tastytest/IntersectionErasure.scala | 29 +++++++-------- .../src-3/tastytest/dottyErasure/api.scala | 12 +++++++ .../tools/tastytest/TastyTestJUnit.scala | 5 ++- 15 files changed, 116 insertions(+), 36 deletions(-) diff --git a/src/compiler/scala/tools/nsc/tasty/TreeUnpickler.scala b/src/compiler/scala/tools/nsc/tasty/TreeUnpickler.scala index 2e2b742b5490..a64ca795cc32 100644 --- a/src/compiler/scala/tools/nsc/tasty/TreeUnpickler.scala +++ b/src/compiler/scala/tools/nsc/tasty/TreeUnpickler.scala @@ -752,7 +752,6 @@ class TreeUnpickler[Tasty <: TastyUniverse]( } val valueParamss = normalizeIfConstructor(vparamss, isCtor) val resType = effectiveResultType(sym, typeParams, tpt.tpe) - ctx.markAsMethod(sym) ctx.setInfo(sym, defn.DefDefType(if (isCtor) Nil else typeParams, valueParamss, resType)) } @@ -825,6 +824,9 @@ class TreeUnpickler[Tasty <: TastyUniverse]( case TYPEDEF | TYPEPARAM => TypeDef(repr, localCtx) case PARAM => TermParam(repr, localCtx) } + if (sym.isTerm) { + ctx.markAsTerm(sym) + } } try { @@ -906,6 +908,7 @@ class TreeUnpickler[Tasty <: TastyUniverse]( } val parentTypes = ctx.adjustParents(cls, parents) setInfoWithParents(tparams, parentTypes) + ctx.markAsClass(cls) } inIndexScopedStatsContext(traverseTemplate()(_)) diff --git a/src/compiler/scala/tools/nsc/tasty/bridge/ContextOps.scala b/src/compiler/scala/tools/nsc/tasty/bridge/ContextOps.scala index f3485f0ea3bd..26929fd7c7cf 100644 --- a/src/compiler/scala/tools/nsc/tasty/bridge/ContextOps.scala +++ b/src/compiler/scala/tools/nsc/tasty/bridge/ContextOps.scala @@ -379,6 +379,7 @@ trait ContextOps { self: TastyUniverse => if (decl.isParamAccessor) decl.makeNotPrivate(cls) if (!decl.isClassConstructor) { val extensionMeth = decl.newExtensionMethodSymbol(cls.companion, u.NoPosition) + markAsTerm(extensionMeth) extensionMeth setInfo u.extensionMethInfo(cls, extensionMeth, decl.info, cls) } } @@ -429,8 +430,11 @@ trait ContextOps { self: TastyUniverse => final def markAsEnumSingleton(sym: Symbol): Unit = sym.updateAttachment(new u.DottyEnumSingleton(sym.name.toString)) - final def markAsMethod(sym: Symbol): Unit = - sym.updateAttachment(u.DottyMethod) + final def markAsTerm(sym: Symbol): Unit = + sym.updateAttachment(u.DottyTerm) + + final def markAsClass(sym: Symbol): Unit = + sym.updateAttachment(u.DottyClass) final def markAsOpaqueType(sym: Symbol, alias: Type): Unit = sym.updateAttachment(new u.DottyOpaqueTypeAlias(alias)) diff --git a/src/compiler/scala/tools/nsc/tasty/bridge/FlagOps.scala b/src/compiler/scala/tools/nsc/tasty/bridge/FlagOps.scala index 8bdd53a0c655..5d295733b062 100644 --- a/src/compiler/scala/tools/nsc/tasty/bridge/FlagOps.scala +++ b/src/compiler/scala/tools/nsc/tasty/bridge/FlagOps.scala @@ -34,6 +34,7 @@ trait FlagOps { self: TastyUniverse => val SingletonEnumFlags: TastyFlagSet = SingletonEnumInitFlags | Stable val FieldAccessorFlags: TastyFlagSet = FieldAccessor | Stable val LocalFieldFlags: TastyFlagSet = Private | Local + val Scala2MacroFlags: TastyFlagSet = Erased | Macro } /**encodes a `TastyFlagSet` as `scala.reflect` flags and will ignore flags that can't be converted, such as diff --git a/src/compiler/scala/tools/nsc/tasty/bridge/SymbolOps.scala b/src/compiler/scala/tools/nsc/tasty/bridge/SymbolOps.scala index 004a14cefa1f..40ab180a02df 100644 --- a/src/compiler/scala/tools/nsc/tasty/bridge/SymbolOps.scala +++ b/src/compiler/scala/tools/nsc/tasty/bridge/SymbolOps.scala @@ -48,7 +48,7 @@ trait SymbolOps { self: TastyUniverse => implicit final class SymbolDecorator(val sym: Symbol) { def isScala3Inline: Boolean = repr.originalFlagSet.is(Inline) - def isScala2Macro: Boolean = repr.originalFlagSet.is(Erased | Macro) + def isScala2Macro: Boolean = repr.originalFlagSet.is(FlagSets.Scala2MacroFlags) def isPureMixinCtor: Boolean = isMixinCtor && repr.originalFlagSet.is(Stable) def isMixinCtor: Boolean = u.nme.MIXIN_CONSTRUCTOR == sym.name && sym.owner.isTrait diff --git a/src/compiler/scala/tools/nsc/tasty/bridge/TypeOps.scala b/src/compiler/scala/tools/nsc/tasty/bridge/TypeOps.scala index e67636a66753..e5bf9746c19a 100644 --- a/src/compiler/scala/tools/nsc/tasty/bridge/TypeOps.scala +++ b/src/compiler/scala/tools/nsc/tasty/bridge/TypeOps.scala @@ -422,6 +422,7 @@ trait TypeOps { self: TastyUniverse => override final def complete(sym: Symbol): Unit = { underlying.ensureCompleted() sym.info = underlying.tpe + underlying.attachments.all.foreach(sym.updateAttachment(_)) } } diff --git a/src/reflect/scala/reflect/internal/StdAttachments.scala b/src/reflect/scala/reflect/internal/StdAttachments.scala index d13fd027586a..4bad51a16c3a 100644 --- a/src/reflect/scala/reflect/internal/StdAttachments.scala +++ b/src/reflect/scala/reflect/internal/StdAttachments.scala @@ -122,7 +122,8 @@ trait StdAttachments { class DottyOpaqueTypeAlias(val tpe: Type) - case object DottyMethod extends PlainAttachment + case object DottyTerm extends PlainAttachment + case object DottyClass extends PlainAttachment class QualTypeSymAttachment(val sym: Symbol) diff --git a/src/reflect/scala/reflect/internal/transform/Erasure.scala b/src/reflect/scala/reflect/internal/transform/Erasure.scala index 055234ada4f6..d20c6915f5b7 100644 --- a/src/reflect/scala/reflect/internal/transform/Erasure.scala +++ b/src/reflect/scala/reflect/internal/transform/Erasure.scala @@ -99,11 +99,13 @@ trait Erasure { def erasedValueClassArg(tref: TypeRef): Type = { assert(!phase.erasedTypes, "Types are erased") val clazz = tref.sym + val isDotty = clazz.hasAttachment[DottyClass.type] if (valueClassIsParametric(clazz)) { - val underlying = tref.memberType(clazz.derivedValueClassUnbox).resultType - boxingErasure(underlying) + val erasureMap = if (isDotty) boxing3Erasure else boxingErasure + erasureMap(tref.memberType(clazz.derivedValueClassUnbox).resultType) } else { - scalaErasure(underlyingOfValueClass(clazz)) + val erasureMap = if (isDotty) scala3Erasure else scalaErasure + erasureMap(underlyingOfValueClass(clazz)) } } @@ -250,7 +252,7 @@ trait Erasure { if (verifyJavaErasure && sym.isMethod) verifiedJavaErasure else javaErasure } - else if (sym.hasAttachment[DottyMethod.type]) scala3Erasure + else if (sym.hasAttachment[DottyTerm.type]) scala3Erasure else scalaErasure /** This is used as the Scala erasure during the erasure phase itself @@ -376,7 +378,7 @@ trait Erasure { object specialScala3Erasure extends Scala3ErasureMap with SpecialScalaErasure def specialScalaErasureFor(sym: Symbol): ErasureMap = { - if (sym.hasAttachment[DottyMethod.type]) specialScala3Erasure + if (sym.hasAttachment[DottyTerm.type]) specialScala3Erasure else specialScalaErasure } @@ -392,7 +394,8 @@ trait Erasure { } } - object boxingErasure extends ScalaErasureMap { + trait BoxingErasure extends ErasureMap { + private[this] var boxPrimitives = true override def applyInArray(tp: Type): Type = { @@ -405,10 +408,15 @@ trait Erasure { override def eraseNormalClassRef(tref: TypeRef) = if (boxPrimitives && isPrimitiveValueClass(tref.sym)) boxedClass(tref.sym).tpe else super.eraseNormalClassRef(tref) + override def eraseDerivedValueClassRef(tref: TypeRef) = super.eraseNormalClassRef(tref) + } + object boxingErasure extends ScalaErasureMap with BoxingErasure + object boxing3Erasure extends Scala3ErasureMap with BoxingErasure + /** The intersection dominator (SLS 3.7) of a list of types is computed as follows. * * - If the list contains one or more occurrences of scala.Array with diff --git a/src/reflect/scala/reflect/internal/transform/Transforms.scala b/src/reflect/scala/reflect/internal/transform/Transforms.scala index 8a4bc08c0a45..0ea9b3b49a8b 100644 --- a/src/reflect/scala/reflect/internal/transform/Transforms.scala +++ b/src/reflect/scala/reflect/internal/transform/Transforms.scala @@ -49,7 +49,10 @@ trait Transforms { self: SymbolTable => erasure.transformInfo(sym, uncurry.transformInfo(sym, sym.info))) - def transformedType(tpe: Type) = - postErasure.elimErasedValueType(erasure.scalaErasure(uncurry.uncurry(tpe))) + def transformedType(tpe: Type) = { + val symbol = tpe.widen.typeSymbol + val erasureMap = if (symbol.hasAttachment[DottyTerm.type]) erasure.scala3Erasure else erasure.scalaErasure + postErasure.elimErasedValueType(erasureMap(uncurry.uncurry(tpe))) + } } diff --git a/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala b/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala index 5ca00953eff4..f56bd8114eab 100644 --- a/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala +++ b/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala @@ -67,7 +67,8 @@ trait JavaUniverseForce { self: runtime.JavaUniverse => this.UseInvokeSpecial this.TypeParamVarargsAttachment this.KnownDirectSubclassesCalled - this.DottyMethod + this.DottyTerm + this.DottyClass this.ConstructorNeedsFence this.MultiargInfixAttachment this.NullaryOverrideAdapted @@ -527,5 +528,6 @@ trait JavaUniverseForce { self: runtime.JavaUniverse => erasure.javaErasure erasure.verifiedJavaErasure erasure.boxingErasure + erasure.boxing3Erasure } } diff --git a/test/tasty/run/pre/tastytest/scala2Erasure/api.scala b/test/tasty/run/pre/tastytest/scala2Erasure/api.scala index 1797273250a4..f7999bd8fbd4 100644 --- a/test/tasty/run/pre/tastytest/scala2Erasure/api.scala +++ b/test/tasty/run/pre/tastytest/scala2Erasure/api.scala @@ -28,6 +28,12 @@ object OpaqueHolder { } import OpaqueHolder._ +sealed abstract class Enumerated +object Enumerated { + final val C1: Enumerated with A = new Enumerated with A {} + final val C2: Enumerated with B = new Enumerated with B {} +} + // The parameter type of `a_XX` should erase to A, `b_XX` to `B`, etc. // This is enforced by dottyApp/Main.scala class Z { self => @@ -247,4 +253,11 @@ class Z { self => def objectARRAY_130(x: Array[_ <: Array[_ <: AnyVal]]): Unit = {} def stringARRAY_131(x: Array[String] with Array[Int]): Unit = {} + def enumerated_132(x: Enumerated.C1.type with Enumerated.C2.type): Unit = {} + def enumerated_133(x: Enumerated.C2.type with Enumerated.C1.type): Unit = {} + def enumerated_134(x: Enumerated.C1.type): Unit = {} + def enumeratedARRAY_135(x: Array[Enumerated.C1.type]): Unit = {} + def enumeratedARRAY_136(x: Array[Enumerated.C2.type with Enumerated.C1.type]): Unit = {} + def enumeratedARRAY_137(x: Array[Enumerated.C1.type with Enumerated.C2.type]): Unit = {} + } diff --git a/test/tasty/run/src-2/tastytest/TestErasure.scala b/test/tasty/run/src-2/tastytest/TestErasure.scala index 9c25c85416f1..1ba9cc7ae3d2 100644 --- a/test/tasty/run/src-2/tastytest/TestErasure.scala +++ b/test/tasty/run/src-2/tastytest/TestErasure.scala @@ -155,6 +155,12 @@ object TestErasure extends Suite("TestErasure") { z.intARRAYARRAY_130(anyObj) z.objectARRAY_130(anyObj) z.intARRAY_131(anyObj) + z.enumerated_132(anyObj) + z.enumerated_133(anyObj) + z.enumerated_134(anyObj) + z.enumeratedARRAY_135(anyObj) + z.enumeratedARRAY_136(anyObj) + z.enumeratedARRAY_137(anyObj) } test("erasure matches name") { diff --git a/test/tasty/run/src-2/tastytest/TestIntersectionErasure.scala b/test/tasty/run/src-2/tastytest/TestIntersectionErasure.scala index 96023bea76c4..3137a9500034 100644 --- a/test/tasty/run/src-2/tastytest/TestIntersectionErasure.scala +++ b/test/tasty/run/src-2/tastytest/TestIntersectionErasure.scala @@ -1,12 +1,40 @@ package tastytest -import IntersectionErasure.{universe => u} +import IntersectionErasure._ object TestIntersectionErasure extends Suite("TestIntersectionErasure") { - test { - val sam: u.IntersectionSAM = x => x - assert(sam(u.EmptyTree) === (u.EmptyTree: u.TreeShimSAM)) + def boxedId[T](t: T): T = t + + val bWithA: B with A = new B with A {} // dotc erases to A, scalac to B + + test("SAM bridges") { + val sam: IntersectionSAM = x => x + assert(sam(bWithA) === bWithA) } + test("VC param")( + assert(boxedId(new IntersectionVC(bWithA)).unwrapped == bWithA) + ) + + test("VC method unboxed")( + assert(boxedId(new IntersectionVC(bWithA)).matchesInternal(bWithA)) + ) + + test("VC method boxed")( + assert(boxedId(new IntersectionVC(bWithA)).matches(new IntersectionVC(bWithA))) + ) + + test("VC parametric param")( + assert(boxedId(new IntersectionVCParametric(bWithA)).unwrapped == bWithA) + ) + + test("VC parametric method unboxed")( + assert(boxedId(new IntersectionVCParametric(bWithA)).matchesInternal(bWithA)) + ) + + test("VC parametric method boxed")( + assert(boxedId(new IntersectionVCParametric(bWithA)).matches(new IntersectionVCParametric(bWithA))) + ) + } diff --git a/test/tasty/run/src-3/tastytest/IntersectionErasure.scala b/test/tasty/run/src-3/tastytest/IntersectionErasure.scala index 0825e307517e..8a75f53056ed 100644 --- a/test/tasty/run/src-3/tastytest/IntersectionErasure.scala +++ b/test/tasty/run/src-3/tastytest/IntersectionErasure.scala @@ -2,27 +2,22 @@ package tastytest object IntersectionErasure { - trait Universe { - - type TreeShimSAM >: Null <: AnyRef with TreeShimSAMApi - trait TreeShimSAMApi extends Product { this: TreeShimSAM => } - - val EmptyTree: TreeShimSAM - - @FunctionalInterface - abstract class IntersectionSAM { - def apply(tree: TreeShimSAM): TreeShimSAM - } + trait A + trait B + @FunctionalInterface + abstract class IntersectionSAM { + def apply(arg: B with A): B with A } - object universe extends Universe { - - abstract class TreeShimSAMImpl extends TreeShimSAMApi with Product - type TreeShimSAM = TreeShimSAMImpl - case object EmptyTree extends TreeShimSAMImpl - + final class IntersectionVC(val unwrapped: B with A) extends AnyVal { + def matchesInternal(that: B with A): Boolean = that == unwrapped + def matches(that: IntersectionVC): Boolean = this == that } + final class IntersectionVCParametric[T <: B with A](val unwrapped: T) extends AnyVal { + def matchesInternal(that: T): Boolean = that == unwrapped + def matches(that: IntersectionVCParametric[T]): Boolean = this == that + } } diff --git a/test/tasty/run/src-3/tastytest/dottyErasure/api.scala b/test/tasty/run/src-3/tastytest/dottyErasure/api.scala index 3073189c6f44..5d563fb8eaf8 100644 --- a/test/tasty/run/src-3/tastytest/dottyErasure/api.scala +++ b/test/tasty/run/src-3/tastytest/dottyErasure/api.scala @@ -28,6 +28,11 @@ object OpaqueHolder { } import OpaqueHolder._ +enum Enumerated { + case C1 extends Enumerated with A + case C2 extends Enumerated with B +} + // The parameter type of `a_XX` should erase to A, `b_XX` to `B`, etc. // This is enforced by dottyApp/Main.scala class Z { self => @@ -256,4 +261,11 @@ class Z { self => def objectARRAY_130(x: Array[_ <: Array[_ <: AnyVal]]): Unit = {} def intARRAY_131(x: Array[String] with Array[Int]): Unit = {} + def enumerated_132(x: Enumerated.C1.type with Enumerated.C2.type): Unit = {} + def enumerated_133(x: Enumerated.C2.type with Enumerated.C1.type): Unit = {} + def enumerated_134(x: Enumerated.C1.type): Unit = {} + def enumeratedARRAY_135(x: Array[Enumerated.C1.type]): Unit = {} + def enumeratedARRAY_136(x: Array[Enumerated.C2.type with Enumerated.C1.type]): Unit = {} + def enumeratedARRAY_137(x: Array[Enumerated.C1.type with Enumerated.C2.type]): Unit = {} + } diff --git a/test/tasty/test/scala/tools/tastytest/TastyTestJUnit.scala b/test/tasty/test/scala/tools/tastytest/TastyTestJUnit.scala index 67410d20e02b..71b901161da1 100644 --- a/test/tasty/test/scala/tools/tastytest/TastyTestJUnit.scala +++ b/test/tasty/test/scala/tools/tastytest/TastyTestJUnit.scala @@ -67,7 +67,10 @@ class TastyTestJUnit { val propPkgName = "tastytest.packageName" def assertPropIsSet(prop: String): String = { - Properties.propOrNull(prop).ensuring(_ != null, s"-D$prop is not set") + Properties.propOrElse(prop, { + fail(s"-D$prop is not set") + "(unknown)" + }) } } From a6e6e04ed4fd7af5e26788bfc01a2aefcf6d0aea Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Thu, 29 Apr 2021 20:08:36 +0200 Subject: [PATCH 167/769] add SCALA3X flag and optimise erasure - optimise dotty enum singleton erasure - optimise dotty trait param checks --- .../nsc/backend/jvm/BCodeBodyBuilder.scala | 11 ++- .../scala/tools/nsc/tasty/TreeUnpickler.scala | 30 ++++---- .../tools/nsc/tasty/bridge/ContextOps.scala | 68 +++++++++---------- .../tools/nsc/tasty/bridge/FlagOps.scala | 32 ++++++--- .../tools/nsc/tasty/bridge/SymbolOps.scala | 10 +-- .../tools/nsc/tasty/bridge/TypeOps.scala | 8 ++- .../scala/tools/nsc/typechecker/Typers.scala | 4 +- .../scala/reflect/internal/Flags.scala | 11 +-- .../reflect/internal/StdAttachments.scala | 5 +- .../scala/reflect/internal/Symbols.scala | 2 + .../reflect/internal/transform/Erasure.scala | 43 ++++++------ .../internal/transform/Transforms.scala | 2 +- .../reflect/runtime/JavaUniverseForce.scala | 3 +- .../src-2/tastytest/TestOperatorToken.scala | 7 ++ .../run/src-3/tastytest/OperatorToken.scala | 7 ++ 15 files changed, 136 insertions(+), 107 deletions(-) create mode 100644 test/tasty/run/src-2/tastytest/TestOperatorToken.scala create mode 100644 test/tasty/run/src-3/tastytest/OperatorToken.scala diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala index 6bba2f75190d..a40c04e6a527 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala @@ -953,12 +953,11 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder { mbt.descriptor ) } - module.attachments.get[DottyEnumSingleton] match { // TODO [tasty]: dotty enum singletons are not modules. - case Some(enumAttach) => - val enumCompanion = symInfoTK(module.originalOwner).asClassBType - visitAccess(enumCompanion, enumAttach.name) - - case _ => visitAccess(mbt, strMODULE_INSTANCE_FIELD) + if (module.isScala3Defined && module.hasAttachment[DottyEnumSingleton.type]) { // TODO [tasty]: dotty enum singletons are not modules. + val enumCompanion = symInfoTK(module.originalOwner).asClassBType + visitAccess(enumCompanion, module.rawname.toString) + } else { + visitAccess(mbt, strMODULE_INSTANCE_FIELD) } } } diff --git a/src/compiler/scala/tools/nsc/tasty/TreeUnpickler.scala b/src/compiler/scala/tools/nsc/tasty/TreeUnpickler.scala index a64ca795cc32..fd7a54bfa8c8 100644 --- a/src/compiler/scala/tools/nsc/tasty/TreeUnpickler.scala +++ b/src/compiler/scala/tools/nsc/tasty/TreeUnpickler.scala @@ -36,7 +36,6 @@ class TreeUnpickler[Tasty <: TastyUniverse]( nameAtRef: NameRef => TastyName)(implicit val tasty: Tasty) { self => import tasty._ - import FlagSets._ import TreeUnpickler._ import MaybeCycle._ import TastyModes._ @@ -415,20 +414,23 @@ class TreeUnpickler[Tasty <: TastyUniverse]( if (isType) prior.toTypeName else prior } - private def normalizeFlags(tag: Int, tastyFlags: TastyFlagSet, name: TastyName, isAbsType: Boolean, isClass: Boolean, rhsIsEmpty: Boolean)(implicit ctx: Context): TastyFlagSet = { + private def addInferredFlags(tag: Int, tastyFlags: TastyFlagSet, name: TastyName, isAbsType: Boolean, isClass: Boolean, rhsIsEmpty: Boolean)(implicit ctx: Context): TastyFlagSet = { var flags = tastyFlags val lacksDefinition = rhsIsEmpty && - name.isTermName && !name.isConstructorName && !flags.isOneOf(TermParamOrAccessor) || + name.isTermName && !name.isConstructorName && !flags.isOneOf(FlagSets.TermParamOrAccessor) || isAbsType || flags.is(Opaque) && !isClass if (lacksDefinition && tag != PARAM) flags |= Deferred if (isClass && flags.is(Trait)) flags |= Abstract if (tag === DEFDEF) flags |= Method if (tag === VALDEF) { - if (flags.is(Inline) || ctx.owner.is(Trait)) flags |= FieldAccessor - if (flags.not(Mutable)) flags |= Stable - if (flags.is(SingletonEnumInitFlags)) flags |= Object | Stable // we will encode dotty enum constants as objects (this needs to be corrected in bytecode) + if (flags.is(Inline) || ctx.owner.is(Trait)) + flags |= FieldAccessor + if (flags.not(Mutable)) + flags |= Stable + if (flags.is(Case | Static | Enum)) // singleton enum case + flags |= Object | Stable // encode as a module (this needs to be corrected in bytecode) } if (ctx.owner.isClass) { if (tag === TYPEPARAM) flags |= Param @@ -439,7 +441,7 @@ class TreeUnpickler[Tasty <: TastyUniverse]( } } else if (isParamTag(tag)) flags |= Param - if (flags.is(Object)) flags |= (if (tag === VALDEF) ObjectCreationFlags else ObjectClassCreationFlags) + if (flags.is(Object)) flags |= (if (tag === VALDEF) FlagSets.Creation.ObjectDef else FlagSets.Creation.ObjectClassDef) flags } @@ -491,7 +493,7 @@ class TreeUnpickler[Tasty <: TastyUniverse]( val (parsedFlags, annotations, privateWithin) = readModifiers(end, readTypedAnnot, readTypedWithin, noSymbol) val name = normalizeName(isTypeTag, parsedName) - val flags = normalizeFlags(tag, parsedFlags, name, isAbsType, isClass, rhsIsEmpty) + val flags = addInferredFlags(tag, parsedFlags, name, isAbsType, isClass, rhsIsEmpty) (name, flags, annotations, privateWithin) } def isTypeParameter = flags.is(Param) && isTypeTag @@ -515,7 +517,7 @@ class TreeUnpickler[Tasty <: TastyUniverse]( val completer = new Completer(isClass, subReader(start, end), flags)(ctx.retractMode(IndexScopedStats)) ctx.findRootSymbol(roots, name) match { case Some(rootd) => - ctx.adjustSymbol(rootd, flags, completer, privateWithin) // dotty "removes one completion" here from the flags, which is not possible in nsc + ctx.redefineSymbol(rootd, flags, completer, privateWithin) // dotty "removes one completion" here from the flags, which is not possible in nsc ctx.log(s"$start replaced info of ${showSym(rootd)}") rootd case _ => @@ -524,7 +526,7 @@ class TreeUnpickler[Tasty <: TastyUniverse]( } } }.ensuring(isSymbol(_), s"${ctx.classRoot}: Could not create symbol at $start") - if (tag == VALDEF && flags.is(SingletonEnumFlags)) + if (tag == VALDEF && flags.is(FlagSets.SingletonEnum)) ctx.markAsEnumSingleton(sym) registerSym(start, sym) if (canEnterInClass && ctx.owner.isClass) @@ -760,13 +762,13 @@ class TreeUnpickler[Tasty <: TastyUniverse]( checkUnsupportedFlags(repr.tastyOnlyFlags &~ (Enum | Extension | Exported)) val tpe = readTpt()(localCtx).tpe ctx.setInfo(sym, - if (repr.originalFlagSet.is(SingletonEnumFlags)) { + if (repr.originalFlagSet.is(FlagSets.SingletonEnum)) { val enumClass = sym.objectImplementation val selfTpe = defn.SingleType(sym.owner.thisPrefix, sym) val ctor = ctx.unsafeNewSymbol( owner = enumClass, name = TastyName.Constructor, - flags = Method, + flags = FlagSets.Creation.CtorDef, info = defn.DefDefType(Nil, Nil :: Nil, selfTpe) ) enumClass.typeOfThis = selfTpe @@ -824,9 +826,6 @@ class TreeUnpickler[Tasty <: TastyUniverse]( case TYPEDEF | TYPEPARAM => TypeDef(repr, localCtx) case PARAM => TermParam(repr, localCtx) } - if (sym.isTerm) { - ctx.markAsTerm(sym) - } } try { @@ -908,7 +907,6 @@ class TreeUnpickler[Tasty <: TastyUniverse]( } val parentTypes = ctx.adjustParents(cls, parents) setInfoWithParents(tparams, parentTypes) - ctx.markAsClass(cls) } inIndexScopedStatsContext(traverseTemplate()(_)) diff --git a/src/compiler/scala/tools/nsc/tasty/bridge/ContextOps.scala b/src/compiler/scala/tools/nsc/tasty/bridge/ContextOps.scala index 26929fd7c7cf..1ef810e741f2 100644 --- a/src/compiler/scala/tools/nsc/tasty/bridge/ContextOps.scala +++ b/src/compiler/scala/tools/nsc/tasty/bridge/ContextOps.scala @@ -190,7 +190,7 @@ trait ContextOps { self: TastyUniverse => final def newLocalDummy: Symbol = owner.newLocalDummy(u.NoPosition) final def newWildcardSym(info: Type): Symbol = - owner.newTypeParameter(u.nme.WILDCARD.toTypeName, u.NoPosition, u.NoFlags).setInfo(info) + owner.newTypeParameter(u.nme.WILDCARD.toTypeName, u.NoPosition, FlagSets.Creation.Wildcard).setInfo(info) final def findRootSymbol(roots: Set[Symbol], name: TastyName): Option[Symbol] = { import TastyName.TypeName @@ -217,7 +217,8 @@ trait ContextOps { self: TastyUniverse => final def newRefinementSymbol(parent: Type, owner: Symbol, name: TastyName, tpe: Type): Symbol = { val overridden = parent.member(encodeTastyName(name)) val isOverride = isSymbol(overridden) - var flags = if (isOverride && overridden.isType) Override else EmptyTastyFlags + var flags = EmptyTastyFlags + if (isOverride && overridden.isType) flags |= Override val info = { if (name.isTermName) { flags |= Method | Deferred @@ -246,7 +247,7 @@ trait ContextOps { self: TastyUniverse => if (completer.originalFlagSet.is(Object)) { val sourceObject = findObject(owner, encodeTermName(name)) if (isSymbol(sourceObject)) - adjustSymbol(sourceObject, completer.originalFlagSet, completer, privateWithin) + redefineSymbol(sourceObject, completer.originalFlagSet, completer, privateWithin) else default() } @@ -262,7 +263,7 @@ trait ContextOps { self: TastyUniverse => if (completer.originalFlagSet.is(Object)) { val sourceObject = findObject(owner, encodeTermName(typeName.toTermName)) if (isSymbol(sourceObject)) - adjustSymbol(sourceObject.objectImplementation, completer.originalFlagSet, completer, privateWithin) + redefineSymbol(sourceObject.objectImplementation, completer.originalFlagSet, completer, privateWithin) else default() } @@ -293,64 +294,66 @@ trait ContextOps { self: TastyUniverse => /** Unsafe to call for creation of a object val, prefer `delayCompletion` if info is a LazyType */ final def unsafeNewSymbol(owner: Symbol, name: TastyName, flags: TastyFlagSet, info: Type, privateWithin: Symbol = noSymbol): Symbol = - adjustSymbol(unsafeNewUntypedSymbol(owner, name, flags), info, privateWithin) + unsafeSetInfoAndPrivate(unsafeNewUntypedSymbol(owner, name, flags), info, privateWithin) /** Unsafe to call for creation of a object class, prefer `delayClassCompletion` if info is a LazyType */ final def unsafeNewClassSymbol(owner: Symbol, typeName: TastyName.TypeName, flags: TastyFlagSet, info: Type, privateWithin: Symbol): Symbol = - adjustSymbol(unsafeNewUntypedClassSymbol(owner, typeName, flags), info, privateWithin) + unsafeSetInfoAndPrivate(unsafeNewUntypedClassSymbol(owner, typeName, flags), info, privateWithin) private final def unsafeNewUntypedSymbol(owner: Symbol, name: TastyName, flags: TastyFlagSet): Symbol = { if (flags.isOneOf(Param | ParamSetter)) { if (name.isTypeName) { - owner.newTypeParameter(encodeTypeName(name.toTypeName), u.NoPosition, encodeFlagSet(flags)) + owner.newTypeParameter(encodeTypeName(name.toTypeName), u.NoPosition, newSymbolFlagSet(flags)) } else { - if (owner.isClass && flags.is(FlagSets.FieldAccessorFlags)) { - val fieldFlags = flags &~ FlagSets.FieldAccessorFlags | FlagSets.LocalFieldFlags + if (owner.isClass && flags.is(FlagSets.FieldAccessor)) { + val fieldFlags = flags &~ FlagSets.FieldAccessor | FlagSets.LocalField val termName = encodeTermName(name) - val getter = owner.newMethodSymbol(termName, u.NoPosition, encodeFlagSet(flags)) - val fieldSym = owner.newValue(termName, u.NoPosition, encodeFlagSet(fieldFlags)) + val getter = owner.newMethodSymbol(termName, u.NoPosition, newSymbolFlagSet(flags)) + val fieldSym = owner.newValue(termName, u.NoPosition, newSymbolFlagSet(fieldFlags)) fieldSym.info = defn.CopyInfo(getter, fieldFlags) owner.rawInfo.decls.enter(fieldSym) getter } else { - owner.newValueParameter(encodeTermName(name), u.NoPosition, encodeFlagSet(flags)) + owner.newValueParameter(encodeTermName(name), u.NoPosition, newSymbolFlagSet(flags)) } } } else if (name === TastyName.Constructor) { - owner.newConstructor(u.NoPosition, encodeFlagSet(flags &~ Stable)) + owner.newConstructor(u.NoPosition, newSymbolFlagSet(flags &~ Stable)) } else if (name === TastyName.MixinConstructor) { - owner.newMethodSymbol(u.nme.MIXIN_CONSTRUCTOR, u.NoPosition, encodeFlagSet(flags &~ Stable)) + owner.newMethodSymbol(u.nme.MIXIN_CONSTRUCTOR, u.NoPosition, newSymbolFlagSet(flags &~ Stable)) } - else if (flags.is(FlagSets.ObjectCreationFlags)) { + else if (flags.is(FlagSets.Creation.ObjectDef)) { log(s"!!! visited module value $name first") assert(!owner.rawInfo.decls.lookupAll(encodeTermName(name)).exists(_.isModule)) - val module = owner.newModule(encodeTermName(name), u.NoPosition, encodeFlagSet(flags)) + val module = owner.newModule(encodeTermName(name), u.NoPosition, newSymbolFlagSet(flags)) module.moduleClass.info = defn.DefaultInfo + module.moduleClass.flags = newSymbolFlagSet(FlagSets.Creation.ObjectClassDef) module } else if (name.isTypeName) { - owner.newTypeSymbol(encodeTypeName(name.toTypeName), u.NoPosition, encodeFlagSet(flags)) + owner.newTypeSymbol(encodeTypeName(name.toTypeName), u.NoPosition, newSymbolFlagSet(flags)) } else { - owner.newMethodSymbol(encodeTermName(name), u.NoPosition, encodeFlagSet(flags)) + owner.newMethodSymbol(encodeTermName(name), u.NoPosition, newSymbolFlagSet(flags)) } } private final def unsafeNewUntypedClassSymbol(owner: Symbol, typeName: TastyName.TypeName, flags: TastyFlagSet): Symbol = { - if (flags.is(FlagSets.ObjectClassCreationFlags)) { + if (flags.is(FlagSets.Creation.ObjectClassDef)) { log(s"!!! visited module class $typeName first") - val module = owner.newModule(encodeTermName(typeName), u.NoPosition, encodeFlagSet(FlagSets.ObjectCreationFlags)) + // TODO [tasty]: test private access modifiers here + val module = owner.newModule(encodeTermName(typeName), u.NoPosition, newSymbolFlagSet(FlagSets.Creation.ObjectDef)) module.info = defn.DefaultInfo - module.moduleClass.flags = encodeFlagSet(flags) + module.moduleClass.flags = newSymbolFlagSet(flags) module.moduleClass } else { - owner.newClassSymbol(encodeTypeName(typeName), u.NoPosition, encodeFlagSet(flags)) + owner.newClassSymbol(encodeTypeName(typeName), u.NoPosition, newSymbolFlagSet(flags)) } } @@ -379,7 +382,6 @@ trait ContextOps { self: TastyUniverse => if (decl.isParamAccessor) decl.makeNotPrivate(cls) if (!decl.isClassConstructor) { val extensionMeth = decl.newExtensionMethodSymbol(cls.companion, u.NoPosition) - markAsTerm(extensionMeth) extensionMeth setInfo u.extensionMethInfo(cls, extensionMeth, decl.info, cls) } } @@ -389,15 +391,17 @@ trait ContextOps { self: TastyUniverse => } final def removeFlags(symbol: Symbol, flags: TastyFlagSet): symbol.type = - symbol.resetFlag(encodeFlagSet(flags)) + symbol.resetFlag(unsafeEncodeTastyFlagSet(flags)) final def addFlags(symbol: Symbol, flags: TastyFlagSet): symbol.type = - symbol.setFlag(encodeFlagSet(flags)) + symbol.setFlag(unsafeEncodeTastyFlagSet(flags)) - final def adjustSymbol(symbol: Symbol, flags: TastyFlagSet, info: Type, privateWithin: Symbol): symbol.type = - adjustSymbol(addFlags(symbol, flags), info, privateWithin) + final def redefineSymbol(symbol: Symbol, flags: TastyFlagSet, completer: TastyCompleter, privateWithin: Symbol): symbol.type = { + symbol.flags = newSymbolFlagSet(flags) + unsafeSetInfoAndPrivate(symbol, completer, privateWithin) + } - final def adjustSymbol(symbol: Symbol, info: Type, privateWithin: Symbol): symbol.type = { + private def unsafeSetInfoAndPrivate(symbol: Symbol, info: Type, privateWithin: Symbol): symbol.type = { symbol.privateWithin = privateWithin symbol.info = info symbol @@ -428,13 +432,7 @@ trait ContextOps { self: TastyUniverse => final def setInfo(sym: Symbol, info: Type): Unit = sym.info = info final def markAsEnumSingleton(sym: Symbol): Unit = - sym.updateAttachment(new u.DottyEnumSingleton(sym.name.toString)) - - final def markAsTerm(sym: Symbol): Unit = - sym.updateAttachment(u.DottyTerm) - - final def markAsClass(sym: Symbol): Unit = - sym.updateAttachment(u.DottyClass) + sym.updateAttachment(u.DottyEnumSingleton) final def markAsOpaqueType(sym: Symbol, alias: Type): Unit = sym.updateAttachment(new u.DottyOpaqueTypeAlias(alias)) diff --git a/src/compiler/scala/tools/nsc/tasty/bridge/FlagOps.scala b/src/compiler/scala/tools/nsc/tasty/bridge/FlagOps.scala index 5d295733b062..6f9f41b58ac3 100644 --- a/src/compiler/scala/tools/nsc/tasty/bridge/FlagOps.scala +++ b/src/compiler/scala/tools/nsc/tasty/bridge/FlagOps.scala @@ -23,24 +23,36 @@ trait FlagOps { self: TastyUniverse => import self.{symbolTable => u} object FlagSets { + val TastyOnlyFlags: TastyFlagSet = ( Erased | Internal | Inline | InlineProxy | Opaque | Extension | Given | Exported | Transparent | Enum | Infix | Open | ParamAlias | Invisible ) + + object Creation { + val ObjectDef: TastyFlagSet = Object | Lazy | Final | Stable + val ObjectClassDef: TastyFlagSet = Object | Final + val CtorDef: TastyFlagSet = Method | Stable + val HKTyParam: u.FlagSet = newSymbolFlagSet(Deferred) + val TyParam: u.FlagSet = HKTyParam + val Wildcard: u.FlagSet = newSymbolFlagSet(EmptyTastyFlags) + } + val SingletonEnum: TastyFlagSet = Case | Static | Enum | Stable val TermParamOrAccessor: TastyFlagSet = Param | ParamSetter - val ObjectCreationFlags: TastyFlagSet = Object | Lazy | Final | Stable - val ObjectClassCreationFlags: TastyFlagSet = Object | Final - val SingletonEnumInitFlags: TastyFlagSet = Case | Static | Enum - val SingletonEnumFlags: TastyFlagSet = SingletonEnumInitFlags | Stable - val FieldAccessorFlags: TastyFlagSet = FieldAccessor | Stable - val LocalFieldFlags: TastyFlagSet = Private | Local - val Scala2MacroFlags: TastyFlagSet = Erased | Macro + val FieldAccessor: TastyFlagSet = FieldAccessor | Stable + val LocalField: TastyFlagSet = Private | Local + val Scala2Macro: TastyFlagSet = Erased | Macro } - /**encodes a `TastyFlagSet` as `scala.reflect` flags and will ignore flags that can't be converted, such as - * members of `FlagSets.TastyOnlyFlags` + /** Obtain a `symbolTable.FlagSet` that can be used to create a new Tasty definition. */ + private[bridge] def newSymbolFlagSet(tflags: TastyFlagSet): u.FlagSet = + unsafeEncodeTastyFlagSet(tflags) | ModifierFlags.SCALA3X + + /** **Do Not Use When Creating New Symbols** + * + * encodes a `TastyFlagSet` as a `symbolTable.FlagSet`, the flags in `FlagSets.TastyOnlyFlags` are ignored. */ - private[bridge] def encodeFlagSet(tflags: TastyFlagSet): u.FlagSet = { + private[bridge] def unsafeEncodeTastyFlagSet(tflags: TastyFlagSet): u.FlagSet = { import u.Flag var flags = u.NoFlags if (tflags.is(Private)) flags |= Flag.PRIVATE diff --git a/src/compiler/scala/tools/nsc/tasty/bridge/SymbolOps.scala b/src/compiler/scala/tools/nsc/tasty/bridge/SymbolOps.scala index 40ab180a02df..7369650581d1 100644 --- a/src/compiler/scala/tools/nsc/tasty/bridge/SymbolOps.scala +++ b/src/compiler/scala/tools/nsc/tasty/bridge/SymbolOps.scala @@ -48,7 +48,7 @@ trait SymbolOps { self: TastyUniverse => implicit final class SymbolDecorator(val sym: Symbol) { def isScala3Inline: Boolean = repr.originalFlagSet.is(Inline) - def isScala2Macro: Boolean = repr.originalFlagSet.is(FlagSets.Scala2MacroFlags) + def isScala2Macro: Boolean = repr.originalFlagSet.is(FlagSets.Scala2Macro) def isPureMixinCtor: Boolean = isMixinCtor && repr.originalFlagSet.is(Stable) def isMixinCtor: Boolean = u.nme.MIXIN_CONSTRUCTOR == sym.name && sym.owner.isTrait @@ -56,7 +56,7 @@ trait SymbolOps { self: TastyUniverse => def isTraitParamAccessor: Boolean = sym.owner.isTrait && repr.originalFlagSet.is(FieldAccessor|ParamSetter) def isParamGetter: Boolean = - sym.isMethod && sym.repr.originalFlagSet.is(FlagSets.FieldAccessorFlags) + sym.isMethod && sym.repr.originalFlagSet.is(FlagSets.FieldAccessor) /** A computed property that should only be called on a symbol which is known to have been initialised by the * Tasty Unpickler and is not yet completed. @@ -89,14 +89,14 @@ trait SymbolOps { self: TastyUniverse => def set(mask: TastyFlagSet)(implicit ctx: Context): sym.type = ctx.addFlags(sym, mask) def reset(mask: TastyFlagSet)(implicit ctx: Context): sym.type = ctx.removeFlags(sym, mask) - def isOneOf(mask: TastyFlagSet): Boolean = sym.hasFlag(encodeFlagSet(mask)) - def is(mask: TastyFlagSet): Boolean = sym.hasAllFlags(encodeFlagSet(mask)) + def isOneOf(mask: TastyFlagSet): Boolean = sym.hasFlag(unsafeEncodeTastyFlagSet(mask)) + def is(mask: TastyFlagSet): Boolean = sym.hasAllFlags(unsafeEncodeTastyFlagSet(mask)) def is(mask: TastyFlagSet, butNot: TastyFlagSet): Boolean = if (!butNot) sym.is(mask) else sym.is(mask) && sym.not(butNot) - def not(mask: TastyFlagSet): Boolean = sym.hasNoFlags(encodeFlagSet(mask)) + def not(mask: TastyFlagSet): Boolean = sym.hasNoFlags(unsafeEncodeTastyFlagSet(mask)) } /** if isConstructor, make sure it has one non-implicit parameter list */ diff --git a/src/compiler/scala/tools/nsc/tasty/bridge/TypeOps.scala b/src/compiler/scala/tools/nsc/tasty/bridge/TypeOps.scala index e5bf9746c19a..a3701bc196d9 100644 --- a/src/compiler/scala/tools/nsc/tasty/bridge/TypeOps.scala +++ b/src/compiler/scala/tools/nsc/tasty/bridge/TypeOps.scala @@ -619,7 +619,8 @@ trait TypeOps { self: TastyUniverse => val paramInfos: List[Type] = paramInfosOp() override val params: List[Symbol] = paramNames.lazyZip(paramInfos).map { - case (name, argInfo) => ctx.owner.newValueParameter(name, u.NoPosition, encodeFlagSet(defaultFlags)).setInfo(argInfo) + case (name, argInfo) => + ctx.owner.newValueParameter(name, u.NoPosition, newSymbolFlagSet(defaultFlags)).setInfo(argInfo) } val resType: Type = resultTypeOp() @@ -647,7 +648,7 @@ trait TypeOps { self: TastyUniverse => override val typeParams: List[Symbol] = paramNames.lazyZip(paramInfos).map { case (name, bounds) => val argInfo = normaliseIfBounds(bounds) - ctx.owner.newTypeParameter(name, u.NoPosition, u.Flag.DEFERRED).setInfo(argInfo) + ctx.owner.newTypeParameter(name, u.NoPosition, FlagSets.Creation.HKTyParam).setInfo(argInfo) } val resType: Type = lambdaResultType(resultTypeOp()) @@ -674,7 +675,8 @@ trait TypeOps { self: TastyUniverse => val paramInfos: List[Type] = paramInfosOp() override val typeParams: List[Symbol] = paramNames.lazyZip(paramInfos).map { - case (name, argInfo) => ctx.owner.newTypeParameter(name, u.NoPosition, u.Flag.DEFERRED).setInfo(argInfo) + case (name, argInfo) => + ctx.owner.newTypeParameter(name, u.NoPosition, FlagSets.Creation.TyParam).setInfo(argInfo) } val resType: Type = resultTypeOp() // potentially need to flatten? (probably not, happens in typer in dotty) diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index 851994cf47c1..137978786f8d 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -1803,10 +1803,12 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper if (!ps.isEmpty && !superclazz.isSubClass(ps.head.typeSymbol)) pending += ParentSuperSubclassError(parent, superclazz, ps.head.typeSymbol, psym) if (!clazzIsTrait) { + def hasTraitParams(sym: Symbol) = + sym.isScala3Defined && sym.isTrait && sym.hasAttachment[DottyParameterisedTrait] // TODO perhaps there can be a flag to skip this when we know there can be no Scala 3 definitions // or otherwise use an optimised representation for trait parameters (parent.tpe :: ps).collectFirst { - case p if p.typeSymbol.hasAttachment[DottyParameterisedTrait] => + case p if hasTraitParams(p.typeSymbol) => p.typeSymbol.attachments.get[DottyParameterisedTrait].foreach( attach => pending += ParentIsScala3TraitError(parent, p.typeSymbol, attach.params, psym) ) diff --git a/src/reflect/scala/reflect/internal/Flags.scala b/src/reflect/scala/reflect/internal/Flags.scala index d366c7fce8fb..3df0c63373c3 100644 --- a/src/reflect/scala/reflect/internal/Flags.scala +++ b/src/reflect/scala/reflect/internal/Flags.scala @@ -80,7 +80,7 @@ package internal // 57: notOVERRIDE // 58: notPRIVATE // 59: -// 60: +// 60: SCALA3X // 61: // 62: // 63: @@ -113,6 +113,7 @@ class ModifierFlags { final val LOCAL = 1L << 19 // symbol is local to current class (i.e. private[this] or protected[this] // pre: PRIVATE or PROTECTED are also set final val JAVA = 1L << 20 // symbol was defined by a Java class + final val SCALA3X = 1L << 60 // class was defined in Scala 3 final val STATIC = 1L << 23 // static field, method or class final val CASEACCESSOR = 1L << 24 // symbol is a case parameter (or its accessor, or a GADT skolem) final val TRAIT = 1L << 25 // symbol is a trait @@ -202,7 +203,7 @@ class Flags extends ModifierFlags { // The flags (1L << 59) to (1L << 63) are currently unused. If added to the InitialFlags mask, // they could be used as normal flags. - final val InitialFlags = 0x0007FFFFFFFFFFFFL // normal flags, enabled from the first phase: 1L to (1L << 50) + final val InitialFlags = 0x1007FFFFFFFFFFFFL // normal flags, enabled from the first phase: 1L to (1L << 50) + (1L << 60) final val LateFlags = 0x00F8000000000000L // flags that override flags in (1L << 4) to (1L << 8): DEFERRED, FINAL, INTERFACE, METHOD, MODULE final val AntiFlags = 0x0700000000000000L // flags that cancel flags in 1L to (1L << 2): PROTECTED, OVERRIDE, PRIVATE final val LateShift = 47 @@ -320,7 +321,7 @@ class Flags extends ModifierFlags { /** These flags are not pickled */ - final val FlagsNotPickled = IS_ERROR | OVERLOADED | LIFTED | TRANS_FLAG | LOCKED | TRIEDCOOKING + final val FlagsNotPickled = IS_ERROR | OVERLOADED | LIFTED | TRANS_FLAG | LOCKED | TRIEDCOOKING | SCALA3X // A precaution against future additions to FlagsNotPickled turning out // to be overloaded flags thus not-pickling more than intended. @@ -477,8 +478,8 @@ class Flags extends ModifierFlags { case `notPROTECTED` => "" // (1L << 56) case 0x200000000000000L => "" // (1L << 57) case `notPRIVATE` => "" // (1L << 58) - case NEEDS_TREES => "" // (1L << 59) - case 0x1000000000000000L => "" // (1L << 60) + case NEEDS_TREES => "" // (1L << 59) + case SCALA3X => "" // (1L << 60) case 0x2000000000000000L => "" // (1L << 61) case 0x4000000000000000L => "" // (1L << 62) case 0x8000000000000000L => "" // (1L << 63) diff --git a/src/reflect/scala/reflect/internal/StdAttachments.scala b/src/reflect/scala/reflect/internal/StdAttachments.scala index 4bad51a16c3a..0c8af3b7601f 100644 --- a/src/reflect/scala/reflect/internal/StdAttachments.scala +++ b/src/reflect/scala/reflect/internal/StdAttachments.scala @@ -116,15 +116,12 @@ trait StdAttachments { */ case object KnownDirectSubclassesCalled extends PlainAttachment - class DottyEnumSingleton(val name: String) extends PlainAttachment + case object DottyEnumSingleton extends PlainAttachment class DottyParameterisedTrait(val params: List[Symbol]) class DottyOpaqueTypeAlias(val tpe: Type) - case object DottyTerm extends PlainAttachment - case object DottyClass extends PlainAttachment - class QualTypeSymAttachment(val sym: Symbol) case object ConstructorNeedsFence extends PlainAttachment diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala index 519f46ba4baa..fbe713758bcd 100644 --- a/src/reflect/scala/reflect/internal/Symbols.scala +++ b/src/reflect/scala/reflect/internal/Symbols.scala @@ -972,6 +972,8 @@ trait Symbols extends api.Symbols { self: SymbolTable => final def isModuleVar = hasFlag(MODULEVAR) + final def isScala3Defined = hasFlag(SCALA3X) + /** * Is this symbol static (i.e. with no outer instance)? * Q: When exactly is a sym marked as STATIC? diff --git a/src/reflect/scala/reflect/internal/transform/Erasure.scala b/src/reflect/scala/reflect/internal/transform/Erasure.scala index d20c6915f5b7..b64493428c34 100644 --- a/src/reflect/scala/reflect/internal/transform/Erasure.scala +++ b/src/reflect/scala/reflect/internal/transform/Erasure.scala @@ -99,12 +99,11 @@ trait Erasure { def erasedValueClassArg(tref: TypeRef): Type = { assert(!phase.erasedTypes, "Types are erased") val clazz = tref.sym - val isDotty = clazz.hasAttachment[DottyClass.type] if (valueClassIsParametric(clazz)) { - val erasureMap = if (isDotty) boxing3Erasure else boxingErasure + val erasureMap = if (clazz.isScala3Defined) boxing3Erasure else boxingErasure erasureMap(tref.memberType(clazz.derivedValueClassUnbox).resultType) } else { - val erasureMap = if (isDotty) scala3Erasure else scalaErasure + val erasureMap = if (clazz.isScala3Defined) scala3Erasure else scalaErasure erasureMap(underlyingOfValueClass(clazz)) } } @@ -143,7 +142,7 @@ trait Erasure { apply(st.supertype) case tref @ TypeRef(pre, sym, args) => def isDottyEnumSingleton(sym: Symbol): Boolean = - sym.isModuleClass && sym.sourceModule.hasAttachment[DottyEnumSingleton] + sym.isScala3Defined && sym.isModuleClass && sym.sourceModule.hasAttachment[DottyEnumSingleton.type] if (sym eq ArrayClass) eraseArray(tp, pre, args) else if ((sym eq AnyClass) || (sym eq AnyValClass) || (sym eq SingletonClass)) ObjectTpe else if (sym eq UnitClass) BoxedUnitTpe @@ -151,10 +150,7 @@ trait Erasure { else if (sym.isDerivedValueClass) eraseDerivedValueClassRef(tref) else if (isDottyEnumSingleton(sym)) apply(intersectionType(tp.parents)) // TODO [tasty]: dotty enum singletons are not modules. else if (sym.isClass) eraseNormalClassRef(tref) - else sym.attachments.get[DottyOpaqueTypeAlias] match { - case Some(alias: DottyOpaqueTypeAlias) => apply(alias.tpe.asSeenFrom(pre, sym.owner)) // TODO [tasty]: refactor if we build-in opaque types - case _ => apply(sym.info.asSeenFrom(pre, sym.owner)) // alias type or abstract type - } + else apply(transparentDealias(sym, pre, sym.owner)) // alias type or abstract type (including opaque type) case PolyType(tparams, restpe) => apply(restpe) case ExistentialType(tparams, restpe) => @@ -246,14 +242,16 @@ trait Erasure { * parents |Ps|, but with duplicate references of Object removed. * - for all other types, the type itself (with any sub-components erased) */ - def erasure(sym: Symbol): ErasureMap = - if (sym == NoSymbol) scalaErasure - else if (sym.enclClass.isJavaDefined) { + def erasure(sym: Symbol): ErasureMap = { + if (sym == NoSymbol) return scalaErasure + val enclosing = sym.enclClass + if (enclosing.isJavaDefined) { if (verifyJavaErasure && sym.isMethod) verifiedJavaErasure else javaErasure } - else if (sym.hasAttachment[DottyTerm.type]) scala3Erasure + else if (enclosing.isScala3Defined) scala3Erasure else scalaErasure + } /** This is used as the Scala erasure during the erasure phase itself * It differs from normal erasure in that value classes are erased to ErasedValueTypes which @@ -378,7 +376,7 @@ trait Erasure { object specialScala3Erasure extends Scala3ErasureMap with SpecialScalaErasure def specialScalaErasureFor(sym: Symbol): ErasureMap = { - if (sym.hasAttachment[DottyTerm.type]) specialScala3Erasure + if (sym.isScala3Defined) specialScala3Erasure else specialScalaErasure } @@ -532,6 +530,17 @@ trait Erasure { components.min((t, u) => compareErasedGlb(t, u)) } + def transparentDealias(sym: Symbol, pre: Type, owner: Symbol) = { + @inline def visible(tp: Type) = tp.asSeenFrom(pre, owner) + + if (sym.isScala3Defined && !sym.isClass) + sym.attachments.get[DottyOpaqueTypeAlias] + .map(alias => visible(alias.tpe)) + .getOrElse(visible(sym.info)) + else + visible(sym.info) + } + /** Dotty implementation of Array Erasure: * * Is `Array[tp]` a generic Array that needs to be erased to `Object`? @@ -551,11 +560,7 @@ trait Erasure { } def translucentSuperType(tp: Type): Type = tp match { - case tp: TypeRef => - tp.sym.attachments.get[DottyOpaqueTypeAlias] match { - case Some(alias) => alias.tpe.asSeenFrom(tp.pre, tp.sym.owner) - case None => tp.sym.info.asSeenFrom(tp.pre, tp.sym.owner) - } + case tp: TypeRef => transparentDealias(tp.sym, tp.pre, tp.sym.owner) case tp: SingleType => tp.underlying case tp: ThisType => tp.sym.typeOfThis case tp: ConstantType => tp.value.tpe @@ -610,7 +615,7 @@ trait Erasure { /** Can one of the JVM Array type store all possible values of type `t`? */ def fitsInJVMArray(tp: Type): Boolean = arrayUpperBound(tp) ne NoSymbol - def isOpaque(sym: Symbol) = !sym.isClass && sym.hasAttachment[DottyOpaqueTypeAlias] + def isOpaque(sym: Symbol) = sym.isScala3Defined && !sym.isClass && sym.hasAttachment[DottyOpaqueTypeAlias] tp.dealias match { case tp: TypeRef if !isOpaque(tp.sym) => diff --git a/src/reflect/scala/reflect/internal/transform/Transforms.scala b/src/reflect/scala/reflect/internal/transform/Transforms.scala index 0ea9b3b49a8b..eecc286f2044 100644 --- a/src/reflect/scala/reflect/internal/transform/Transforms.scala +++ b/src/reflect/scala/reflect/internal/transform/Transforms.scala @@ -51,7 +51,7 @@ trait Transforms { self: SymbolTable => def transformedType(tpe: Type) = { val symbol = tpe.widen.typeSymbol - val erasureMap = if (symbol.hasAttachment[DottyTerm.type]) erasure.scala3Erasure else erasure.scalaErasure + val erasureMap = if (symbol.isScala3Defined) erasure.scala3Erasure else erasure.scalaErasure postErasure.elimErasedValueType(erasureMap(uncurry.uncurry(tpe))) } diff --git a/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala b/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala index f56bd8114eab..0f4ed0b100b1 100644 --- a/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala +++ b/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala @@ -67,8 +67,7 @@ trait JavaUniverseForce { self: runtime.JavaUniverse => this.UseInvokeSpecial this.TypeParamVarargsAttachment this.KnownDirectSubclassesCalled - this.DottyTerm - this.DottyClass + this.DottyEnumSingleton this.ConstructorNeedsFence this.MultiargInfixAttachment this.NullaryOverrideAdapted diff --git a/test/tasty/run/src-2/tastytest/TestOperatorToken.scala b/test/tasty/run/src-2/tastytest/TestOperatorToken.scala new file mode 100644 index 000000000000..ed7fc22cf327 --- /dev/null +++ b/test/tasty/run/src-2/tastytest/TestOperatorToken.scala @@ -0,0 +1,7 @@ +package tastytest + +object TestOperatorToken extends Suite("TestOperatorToken") { + test(assert(OperatorToken.<:< != null)) + test(assert(OperatorToken.=:= != null)) + test(assert(OperatorToken.<*> != null)) +} diff --git a/test/tasty/run/src-3/tastytest/OperatorToken.scala b/test/tasty/run/src-3/tastytest/OperatorToken.scala new file mode 100644 index 000000000000..8ac355db1dd7 --- /dev/null +++ b/test/tasty/run/src-3/tastytest/OperatorToken.scala @@ -0,0 +1,7 @@ +package tastytest + +enum OperatorToken { + case <:< + case =:= + case <*> +} From f42fd66b7bb241c6024e800053d67bac790c6878 Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Thu, 29 Apr 2021 21:44:24 +0200 Subject: [PATCH 168/769] select non-privatelocal values. - It can be possible for a private local value to overload its field getter, so only select non-private local values - also some refactorings --- .../scala/tools/nsc/tasty/TreeUnpickler.scala | 7 +------ .../scala/tools/nsc/tasty/bridge/ContextOps.scala | 15 +++++++++++---- .../scala/tools/nsc/tasty/bridge/FlagOps.scala | 3 ++- .../scala/tools/nsc/tasty/bridge/SymbolOps.scala | 8 ++++++-- src/reflect/scala/reflect/internal/Symbols.scala | 2 -- .../reflect/internal/transform/Erasure.scala | 2 +- src/tastytest/scala/tools/tastytest/Dotc.scala | 8 ++++---- .../run/src-2/tastytest/TestReflection.scala | 2 +- .../run/src-2/tastytest/TestSuperTypes.scala | 9 ++++----- test/tasty/run/src-3/tastytest/Reflection.scala | 13 ++++++------- test/tasty/run/src-3/tastytest/SuperTypes.scala | 2 +- 11 files changed, 37 insertions(+), 34 deletions(-) diff --git a/src/compiler/scala/tools/nsc/tasty/TreeUnpickler.scala b/src/compiler/scala/tools/nsc/tasty/TreeUnpickler.scala index fd7a54bfa8c8..68f9628a35f2 100644 --- a/src/compiler/scala/tools/nsc/tasty/TreeUnpickler.scala +++ b/src/compiler/scala/tools/nsc/tasty/TreeUnpickler.scala @@ -765,12 +765,7 @@ class TreeUnpickler[Tasty <: TastyUniverse]( if (repr.originalFlagSet.is(FlagSets.SingletonEnum)) { val enumClass = sym.objectImplementation val selfTpe = defn.SingleType(sym.owner.thisPrefix, sym) - val ctor = ctx.unsafeNewSymbol( - owner = enumClass, - name = TastyName.Constructor, - flags = FlagSets.Creation.CtorDef, - info = defn.DefDefType(Nil, Nil :: Nil, selfTpe) - ) + val ctor = ctx.newConstructor(enumClass, selfTpe) enumClass.typeOfThis = selfTpe ctx.setInfo(enumClass, defn.ClassInfoType(intersectionParts(tpe), ctor :: Nil, enumClass)) prefixedRef(sym.owner.thisPrefix, enumClass) diff --git a/src/compiler/scala/tools/nsc/tasty/bridge/ContextOps.scala b/src/compiler/scala/tools/nsc/tasty/bridge/ContextOps.scala index 1ef810e741f2..afe332893942 100644 --- a/src/compiler/scala/tools/nsc/tasty/bridge/ContextOps.scala +++ b/src/compiler/scala/tools/nsc/tasty/bridge/ContextOps.scala @@ -192,6 +192,13 @@ trait ContextOps { self: TastyUniverse => final def newWildcardSym(info: Type): Symbol = owner.newTypeParameter(u.nme.WILDCARD.toTypeName, u.NoPosition, FlagSets.Creation.Wildcard).setInfo(info) + final def newConstructor(owner: Symbol, resType: Type): Symbol = unsafeNewSymbol( + owner = owner, + name = TastyName.Constructor, + flags = FlagSets.Creation.CtorDef, + info = defn.DefDefType(Nil, Nil :: Nil, resType) + ) + final def findRootSymbol(roots: Set[Symbol], name: TastyName): Option[Symbol] = { import TastyName.TypeName @@ -293,12 +300,12 @@ trait ContextOps { self: TastyUniverse => /** Unsafe to call for creation of a object val, prefer `delayCompletion` if info is a LazyType */ - final def unsafeNewSymbol(owner: Symbol, name: TastyName, flags: TastyFlagSet, info: Type, privateWithin: Symbol = noSymbol): Symbol = + private def unsafeNewSymbol(owner: Symbol, name: TastyName, flags: TastyFlagSet, info: Type, privateWithin: Symbol = noSymbol): Symbol = unsafeSetInfoAndPrivate(unsafeNewUntypedSymbol(owner, name, flags), info, privateWithin) /** Unsafe to call for creation of a object class, prefer `delayClassCompletion` if info is a LazyType */ - final def unsafeNewClassSymbol(owner: Symbol, typeName: TastyName.TypeName, flags: TastyFlagSet, info: Type, privateWithin: Symbol): Symbol = + private def unsafeNewClassSymbol(owner: Symbol, typeName: TastyName.TypeName, flags: TastyFlagSet, info: Type, privateWithin: Symbol): Symbol = unsafeSetInfoAndPrivate(unsafeNewUntypedClassSymbol(owner, typeName, flags), info, privateWithin) private final def unsafeNewUntypedSymbol(owner: Symbol, name: TastyName, flags: TastyFlagSet): Symbol = { @@ -307,8 +314,8 @@ trait ContextOps { self: TastyUniverse => owner.newTypeParameter(encodeTypeName(name.toTypeName), u.NoPosition, newSymbolFlagSet(flags)) } else { - if (owner.isClass && flags.is(FlagSets.FieldAccessor)) { - val fieldFlags = flags &~ FlagSets.FieldAccessor | FlagSets.LocalField + if (owner.isClass && flags.is(FlagSets.FieldGetter)) { + val fieldFlags = flags &~ FlagSets.FieldGetter | FlagSets.LocalField val termName = encodeTermName(name) val getter = owner.newMethodSymbol(termName, u.NoPosition, newSymbolFlagSet(flags)) val fieldSym = owner.newValue(termName, u.NoPosition, newSymbolFlagSet(fieldFlags)) diff --git a/src/compiler/scala/tools/nsc/tasty/bridge/FlagOps.scala b/src/compiler/scala/tools/nsc/tasty/bridge/FlagOps.scala index 6f9f41b58ac3..3990a85ec829 100644 --- a/src/compiler/scala/tools/nsc/tasty/bridge/FlagOps.scala +++ b/src/compiler/scala/tools/nsc/tasty/bridge/FlagOps.scala @@ -39,7 +39,8 @@ trait FlagOps { self: TastyUniverse => } val SingletonEnum: TastyFlagSet = Case | Static | Enum | Stable val TermParamOrAccessor: TastyFlagSet = Param | ParamSetter - val FieldAccessor: TastyFlagSet = FieldAccessor | Stable + val FieldGetter: TastyFlagSet = FieldAccessor | Stable + val ParamGetter: TastyFlagSet = FieldGetter | ParamSetter val LocalField: TastyFlagSet = Private | Local val Scala2Macro: TastyFlagSet = Erased | Macro } diff --git a/src/compiler/scala/tools/nsc/tasty/bridge/SymbolOps.scala b/src/compiler/scala/tools/nsc/tasty/bridge/SymbolOps.scala index 7369650581d1..72aefb91f0f0 100644 --- a/src/compiler/scala/tools/nsc/tasty/bridge/SymbolOps.scala +++ b/src/compiler/scala/tools/nsc/tasty/bridge/SymbolOps.scala @@ -56,7 +56,7 @@ trait SymbolOps { self: TastyUniverse => def isTraitParamAccessor: Boolean = sym.owner.isTrait && repr.originalFlagSet.is(FieldAccessor|ParamSetter) def isParamGetter: Boolean = - sym.isMethod && sym.repr.originalFlagSet.is(FlagSets.FieldAccessor) + sym.isMethod && sym.repr.originalFlagSet.is(FlagSets.ParamGetter) /** A computed property that should only be called on a symbol which is known to have been initialised by the * Tasty Unpickler and is not yet completed. @@ -130,7 +130,11 @@ trait SymbolOps { self: TastyUniverse => space.member(selector).orElse(lookInTypeCtor) } } - else space.member(encodeTermName(tname)) + else { + val firstTry = space.member(encodeTermName(tname)) + if (firstTry.isOverloaded) firstTry.filter(!_.isPrivateLocal) + else firstTry + } } if (isSymbol(member) && hasType(member)) member else errorMissing(space, tname) diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala index fbe713758bcd..ab6cd57d7228 100644 --- a/src/reflect/scala/reflect/internal/Symbols.scala +++ b/src/reflect/scala/reflect/internal/Symbols.scala @@ -263,8 +263,6 @@ trait Symbols extends api.Symbols { self: SymbolTable => final val id = nextId() // identity displayed when -uniqid // assert(id != 11924, initName) - def debugTasty = s"Symbol($this, #$id, ${flagString})" - private[this] var _validTo: Period = NoPeriod if (traceSymbolActivity) diff --git a/src/reflect/scala/reflect/internal/transform/Erasure.scala b/src/reflect/scala/reflect/internal/transform/Erasure.scala index b64493428c34..108f7afc1654 100644 --- a/src/reflect/scala/reflect/internal/transform/Erasure.scala +++ b/src/reflect/scala/reflect/internal/transform/Erasure.scala @@ -148,7 +148,7 @@ trait Erasure { else if (sym eq UnitClass) BoxedUnitTpe else if (sym.isRefinementClass) apply(mergeParents(tp.parents)) else if (sym.isDerivedValueClass) eraseDerivedValueClassRef(tref) - else if (isDottyEnumSingleton(sym)) apply(intersectionType(tp.parents)) // TODO [tasty]: dotty enum singletons are not modules. + else if (isDottyEnumSingleton(sym)) apply(mergeParents(tp.parents)) // TODO [tasty]: dotty enum singletons are not modules. else if (sym.isClass) eraseNormalClassRef(tref) else apply(transparentDealias(sym, pre, sym.owner)) // alias type or abstract type (including opaque type) case PolyType(tparams, restpe) => diff --git a/src/tastytest/scala/tools/tastytest/Dotc.scala b/src/tastytest/scala/tools/tastytest/Dotc.scala index 2e9d3b68a2cb..e36399c5e08a 100644 --- a/src/tastytest/scala/tools/tastytest/Dotc.scala +++ b/src/tastytest/scala/tools/tastytest/Dotc.scala @@ -76,18 +76,18 @@ object Dotc extends Script.Command { val describe: String = s"$commandName " def process(args: String*): Int = { - if (args.length != 2) { - println(red(s"please provide two arguments in sub-command: $describe")) + if (args.length < 2) { + println(red(s"please provide at least two arguments in sub-command: $describe")) return 1 } - val Seq(out, src) = args: @unchecked + val Seq(out, src, additional @ _*) = args: @unchecked implicit val scala3classloader: Dotc.ClassLoader = initClassloader() match { case Success(cl) => cl case Failure(err) => println(red(s"could not initialise Scala 3 classpath: $err")) return 1 } - val success = dotc(out, out, Nil, src).get + val success = dotc(out, out, additional, src).get if (success) 0 else 1 } diff --git a/test/tasty/run/src-2/tastytest/TestReflection.scala b/test/tasty/run/src-2/tastytest/TestReflection.scala index 4c4582d5d6e6..d292b8b7e737 100644 --- a/test/tasty/run/src-2/tastytest/TestReflection.scala +++ b/test/tasty/run/src-2/tastytest/TestReflection.scala @@ -9,7 +9,7 @@ object TestReflection extends Suite("TestReflection") { test(assert(Reflection.reflectionInvokerIdentity(Context)(EmptyTree) === (EmptyTree: TreeShim))) test(assert(new Reflection.Invoker(Context)(EmptyTree).tree === (EmptyTree: TreeShim))) - // TODO [tasty]: enable due to missing type ctx.TreeShim + // bridge method not generated (AbstractMethodError) [same if Reflection.InvokerSAM is compiled by Scala 2] // test { // val invoker = new Reflection.InvokerSAM(Context) // val id: invoker.TreeFn = x => x diff --git a/test/tasty/run/src-2/tastytest/TestSuperTypes.scala b/test/tasty/run/src-2/tastytest/TestSuperTypes.scala index b675a0ec87d0..ed552f561ac6 100644 --- a/test/tasty/run/src-2/tastytest/TestSuperTypes.scala +++ b/test/tasty/run/src-2/tastytest/TestSuperTypes.scala @@ -11,11 +11,10 @@ object TestSuperTypes extends Suite("TestSuperTypes") { assert(("" match { case bar.A(x) => x: "Foo.foo" }) === "Foo.foo") } - // TODO [tasty]: what is happening here - // test("SUPERtype in type, version 2") { - // val BarA = (new SuperTypes.Bar()).A - // assert(("" match { case BarA(x) => x: "Foo.foo" }) === "Foo.foo") - // } + test("SUPERtype in type, version 2") { + val bar = new SuperTypes.Bar() + assert(("" match { case bar.A(x) => x : bar.foo.type }) === "Foo.foo") + } test("SUPER qualified in type tree") { assert((new SuperTypes.Baz().baz: "Foo.foo") === "Foo.foo") diff --git a/test/tasty/run/src-3/tastytest/Reflection.scala b/test/tasty/run/src-3/tastytest/Reflection.scala index a40b842d9721..434cc62ee39c 100644 --- a/test/tasty/run/src-3/tastytest/Reflection.scala +++ b/test/tasty/run/src-3/tastytest/Reflection.scala @@ -10,14 +10,13 @@ object Reflection { def tree: ctx.TreeShim = root } - // TODO [tasty]: enable due to missing type ctx.TreeShim - // class InvokerSAM[C <: reflectshims.Context with Singleton](val ctx: C) { + class InvokerSAM[C <: reflectshims.Context with Singleton](val ctx: C) { - // @FunctionalInterface - // trait TreeFn { - // def apply(tree: ctx.TreeShim): ctx.TreeShim - // } + @FunctionalInterface + trait TreeFn { + def apply(tree: ctx.TreeShim): ctx.TreeShim + } - // } + } } diff --git a/test/tasty/run/src-3/tastytest/SuperTypes.scala b/test/tasty/run/src-3/tastytest/SuperTypes.scala index 90c3cb331777..3e89f38b4fa9 100644 --- a/test/tasty/run/src-3/tastytest/SuperTypes.scala +++ b/test/tasty/run/src-3/tastytest/SuperTypes.scala @@ -3,7 +3,7 @@ package tastytest object SuperTypes { class Foo { - final val foo = "Foo.foo" + final val foo: "Foo.foo" = "Foo.foo" } class Bar extends Foo { From daa16e68be6ab29171085168ea513ed9f5eda235 Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Tue, 4 May 2021 17:25:32 +0200 Subject: [PATCH 169/769] Refactoring --- build.sbt | 18 +++++++-------- .../tools/nsc/tasty/bridge/ContextOps.scala | 7 ++---- .../tools/nsc/tasty/bridge/FlagOps.scala | 22 ++++++++++++++----- .../tools/nsc/tasty/bridge/SymbolOps.scala | 12 ---------- .../tools/nsc/tasty/bridge/TypeOps.scala | 16 ++++++-------- .../scala/reflect/internal/Symbols.scala | 2 +- .../reflect/internal/transform/Erasure.scala | 4 ++++ 7 files changed, 39 insertions(+), 42 deletions(-) diff --git a/build.sbt b/build.sbt index 292a6cccb2dd..83346ee8a94f 100644 --- a/build.sbt +++ b/build.sbt @@ -616,9 +616,7 @@ lazy val tastytest = configureAsSubproject(project) .settings( name := "scala-tastytest", description := "Scala TASTy Integration Testing Tool", - libraryDependencies ++= List( - diffUtilsDep, - ), + libraryDependencies += diffUtilsDep, Compile / scalacOptions ++= Seq("-feature", "-Xlint"), ) @@ -751,14 +749,14 @@ lazy val tasty = project.in(file("test") / "tasty") ), javaOptions ++= { import java.io.File.pathSeparator - val lib = (library / Compile / classDirectory).value.getAbsoluteFile() - val ref = (reflect / Compile / classDirectory).value.getAbsoluteFile() - val classpath = (TastySupport.CompilerClasspath / managedClasspath).value.seq.map(_.data) :+ lib - val libraryClasspath = (TastySupport.LibraryClasspath / managedClasspath).value.seq.map(_.data) :+ lib + val scalaLibrary = (library / Compile / classDirectory).value.getAbsoluteFile() + val scalaReflect = (reflect / Compile / classDirectory).value.getAbsoluteFile() + val dottyCompiler = (TastySupport.CompilerClasspath / managedClasspath).value.seq.map(_.data) :+ scalaLibrary + val dottyLibrary = (TastySupport.LibraryClasspath / managedClasspath).value.seq.map(_.data) :+ scalaLibrary Seq( - s"-Dtastytest.classpaths.dottyCompiler=${classpath.mkString(pathSeparator)}", - s"-Dtastytest.classpaths.dottyLibrary=${libraryClasspath.mkString(pathSeparator)}", - s"-Dtastytest.classpaths.scalaReflect=${ref}", + s"-Dtastytest.classpaths.dottyCompiler=${dottyCompiler.mkString(pathSeparator)}", + s"-Dtastytest.classpaths.dottyLibrary=${dottyLibrary.mkString(pathSeparator)}", + s"-Dtastytest.classpaths.scalaReflect=$scalaReflect", ) }, ) diff --git a/src/compiler/scala/tools/nsc/tasty/bridge/ContextOps.scala b/src/compiler/scala/tools/nsc/tasty/bridge/ContextOps.scala index afe332893942..d50e97c5cd74 100644 --- a/src/compiler/scala/tools/nsc/tasty/bridge/ContextOps.scala +++ b/src/compiler/scala/tools/nsc/tasty/bridge/ContextOps.scala @@ -397,11 +397,8 @@ trait ContextOps { self: TastyUniverse => parentTypes } - final def removeFlags(symbol: Symbol, flags: TastyFlagSet): symbol.type = - symbol.resetFlag(unsafeEncodeTastyFlagSet(flags)) - - final def addFlags(symbol: Symbol, flags: TastyFlagSet): symbol.type = - symbol.setFlag(unsafeEncodeTastyFlagSet(flags)) + private[bridge] final def resetFlag0(symbol: Symbol, flags: u.FlagSet): symbol.type = + symbol.resetFlag(flags) final def redefineSymbol(symbol: Symbol, flags: TastyFlagSet, completer: TastyCompleter, privateWithin: Symbol): symbol.type = { symbol.flags = newSymbolFlagSet(flags) diff --git a/src/compiler/scala/tools/nsc/tasty/bridge/FlagOps.scala b/src/compiler/scala/tools/nsc/tasty/bridge/FlagOps.scala index 3990a85ec829..ff5cb6270f5f 100644 --- a/src/compiler/scala/tools/nsc/tasty/bridge/FlagOps.scala +++ b/src/compiler/scala/tools/nsc/tasty/bridge/FlagOps.scala @@ -45,15 +45,27 @@ trait FlagOps { self: TastyUniverse => val Scala2Macro: TastyFlagSet = Erased | Macro } - /** Obtain a `symbolTable.FlagSet` that can be used to create a new Tasty definition. */ + /** For purpose of symbol initialisation, encode a `TastyFlagSet` as a `symbolTable.FlagSet`. */ private[bridge] def newSymbolFlagSet(tflags: TastyFlagSet): u.FlagSet = unsafeEncodeTastyFlagSet(tflags) | ModifierFlags.SCALA3X - /** **Do Not Use When Creating New Symbols** - * - * encodes a `TastyFlagSet` as a `symbolTable.FlagSet`, the flags in `FlagSets.TastyOnlyFlags` are ignored. + implicit final class SymbolFlagOps(val sym: Symbol) { + def reset(tflags: TastyFlagSet)(implicit ctx: Context): sym.type = + ctx.resetFlag0(sym, unsafeEncodeTastyFlagSet(tflags)) + def isOneOf(mask: TastyFlagSet): Boolean = sym.hasFlag(unsafeEncodeTastyFlagSet(mask)) + def is(mask: TastyFlagSet): Boolean = sym.hasAllFlags(unsafeEncodeTastyFlagSet(mask)) + def is(mask: TastyFlagSet, butNot: TastyFlagSet): Boolean = + if (!butNot) + sym.is(mask) + else + sym.is(mask) && sym.not(butNot) + def not(mask: TastyFlagSet): Boolean = sym.hasNoFlags(unsafeEncodeTastyFlagSet(mask)) + } + + /** encodes a `TastyFlagSet` as a `symbolTable.FlagSet`, the flags in `FlagSets.TastyOnlyFlags` are ignored. + * @note Do not use directly to initialise symbol flags, use `newSymbolFlagSet` */ - private[bridge] def unsafeEncodeTastyFlagSet(tflags: TastyFlagSet): u.FlagSet = { + private def unsafeEncodeTastyFlagSet(tflags: TastyFlagSet): u.FlagSet = { import u.Flag var flags = u.NoFlags if (tflags.is(Private)) flags |= Flag.PRIVATE diff --git a/src/compiler/scala/tools/nsc/tasty/bridge/SymbolOps.scala b/src/compiler/scala/tools/nsc/tasty/bridge/SymbolOps.scala index 72aefb91f0f0..c2372f80cc56 100644 --- a/src/compiler/scala/tools/nsc/tasty/bridge/SymbolOps.scala +++ b/src/compiler/scala/tools/nsc/tasty/bridge/SymbolOps.scala @@ -85,18 +85,6 @@ trait SymbolOps { self: TastyUniverse => def termRef: Type = sym.preciseRef(u.NoPrefix) def preciseRef(pre: Type): Type = u.typeRef(pre, sym, Nil) def safeOwner: Symbol = if (sym.owner eq sym) sym else sym.owner - - def set(mask: TastyFlagSet)(implicit ctx: Context): sym.type = ctx.addFlags(sym, mask) - def reset(mask: TastyFlagSet)(implicit ctx: Context): sym.type = ctx.removeFlags(sym, mask) - - def isOneOf(mask: TastyFlagSet): Boolean = sym.hasFlag(unsafeEncodeTastyFlagSet(mask)) - def is(mask: TastyFlagSet): Boolean = sym.hasAllFlags(unsafeEncodeTastyFlagSet(mask)) - def is(mask: TastyFlagSet, butNot: TastyFlagSet): Boolean = - if (!butNot) - sym.is(mask) - else - sym.is(mask) && sym.not(butNot) - def not(mask: TastyFlagSet): Boolean = sym.hasNoFlags(unsafeEncodeTastyFlagSet(mask)) } /** if isConstructor, make sure it has one non-implicit parameter list */ diff --git a/src/compiler/scala/tools/nsc/tasty/bridge/TypeOps.scala b/src/compiler/scala/tools/nsc/tasty/bridge/TypeOps.scala index a3701bc196d9..f67e8fefdcbe 100644 --- a/src/compiler/scala/tools/nsc/tasty/bridge/TypeOps.scala +++ b/src/compiler/scala/tools/nsc/tasty/bridge/TypeOps.scala @@ -240,7 +240,7 @@ trait TypeOps { self: TastyUniverse => bounds } - private[bridge] def sameErasure(sym: Symbol)(tpe: Type, ref: ErasedTypeRef)(implicit ctx: Context) = + private[bridge] def sameErasure(sym: Symbol)(tpe: Type, ref: ErasedTypeRef) = NameErasure.sigName(tpe, sym) === ref /** This is a port from Dotty of transforming a Method type to an ErasedTypeRef @@ -254,9 +254,9 @@ trait TypeOps { self: TastyUniverse => * `from` and `to` must be static classes, both with one type parameter, and the same variance. * Do the same for by name types => From[T] and => To[T] */ - def translateParameterized(self: Type)(from: u.ClassSymbol, to: u.ClassSymbol, wildcardArg: Boolean = false)(implicit ctx: Context): Type = self match { + def translateParameterized(self: Type)(from: u.ClassSymbol, to: u.ClassSymbol, wildcardArg: Boolean): Type = self match { case self @ u.NullaryMethodType(tp) => - u.NullaryMethodType(translateParameterized(tp)(from, to, wildcardArg=false)) + u.NullaryMethodType(translateParameterized(tp)(from, to, wildcardArg = false)) case _ => if (self.typeSymbol.isSubClass(from)) { def elemType(tp: Type): Type = tp.dealiasWiden match { @@ -271,25 +271,23 @@ trait TypeOps { self: TastyUniverse => else self } - def translateFromRepeated(self: Type)(toArray: Boolean, translateWildcard: Boolean = false)(implicit ctx: Context): Type = { + def translateFromRepeated(self: Type)(toArray: Boolean): Type = { val seqClass = if (toArray) u.definitions.ArrayClass else u.definitions.SeqClass - if (translateWildcard && self === u.WildcardType) - seqClass.ref(u.WildcardType :: Nil) - else if (isRepeatedParam(self)) + if (isRepeatedParam(self)) // We want `Array[? <: T]` because arrays aren't covariant until after // erasure. See `tests/pos/i5140`. translateParameterized(self)(u.definitions.RepeatedParamClass, seqClass, wildcardArg = toArray) else self } - def sigName(tp: Type, sym: Symbol)(implicit ctx: Context): ErasedTypeRef = { + def sigName(tp: Type, sym: Symbol): ErasedTypeRef = { val normTp = translateFromRepeated(tp)(toArray = sym.isJavaDefined) erasedSigName( u.erasure.erasure(sym)(normTp) ) } - private def erasedSigName(erased: Type)(implicit ctx: Context): ErasedTypeRef = erased match { + private def erasedSigName(erased: Type): ErasedTypeRef = erased match { case erased: u.ExistentialType => erasedSigName(erased.underlying) case erased: u.TypeRef => import TastyName._ diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala index ab6cd57d7228..93098a81279c 100644 --- a/src/reflect/scala/reflect/internal/Symbols.scala +++ b/src/reflect/scala/reflect/internal/Symbols.scala @@ -261,7 +261,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => rawatt = initPos final val id = nextId() // identity displayed when -uniqid - // assert(id != 11924, initName) + //assert(id != 3390, initName) private[this] var _validTo: Period = NoPeriod diff --git a/src/reflect/scala/reflect/internal/transform/Erasure.scala b/src/reflect/scala/reflect/internal/transform/Erasure.scala index 108f7afc1654..f02bed550f03 100644 --- a/src/reflect/scala/reflect/internal/transform/Erasure.scala +++ b/src/reflect/scala/reflect/internal/transform/Erasure.scala @@ -530,6 +530,10 @@ trait Erasure { components.min((t, u) => compareErasedGlb(t, u)) } + /** For a type alias, get its info as seen from + * the current prefix and owner. + * Sees through opaque type aliases. + */ def transparentDealias(sym: Symbol, pre: Type, owner: Symbol) = { @inline def visible(tp: Type) = tp.asSeenFrom(pre, owner) From 45c1b5a7fbb6addcb9568304bd358f05323f7224 Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Wed, 5 May 2021 17:47:05 +0200 Subject: [PATCH 170/769] add regression test for object access -also generate fresh wildcard name for wildcards --- .../scala/tools/nsc/tasty/TreeUnpickler.scala | 14 ++--- .../tools/nsc/tasty/bridge/ContextOps.scala | 57 ++++++++++++------- .../tools/nsc/tasty/bridge/FlagOps.scala | 8 +-- .../tools/nsc/tasty/bridge/SymbolOps.scala | 2 +- .../tools/nsc/tasty/bridge/TypeOps.scala | 6 +- test/tasty/neg/src-2/TestDelayedPrivate.check | 4 ++ .../neg/src-2/TestDelayedPrivateInverse.check | 4 ++ .../TestDelayedPrivateInverse_fail.scala | 8 +++ .../neg/src-2/TestDelayedPrivate_fail.scala | 9 +++ test/tasty/neg/src-3/DelayedPrivate.scala | 15 +++++ .../neg/src-3/DelayedPrivateInverse.scala | 8 +++ 11 files changed, 98 insertions(+), 37 deletions(-) create mode 100644 test/tasty/neg/src-2/TestDelayedPrivate.check create mode 100644 test/tasty/neg/src-2/TestDelayedPrivateInverse.check create mode 100644 test/tasty/neg/src-2/TestDelayedPrivateInverse_fail.scala create mode 100644 test/tasty/neg/src-2/TestDelayedPrivate_fail.scala create mode 100644 test/tasty/neg/src-3/DelayedPrivate.scala create mode 100644 test/tasty/neg/src-3/DelayedPrivateInverse.scala diff --git a/src/compiler/scala/tools/nsc/tasty/TreeUnpickler.scala b/src/compiler/scala/tools/nsc/tasty/TreeUnpickler.scala index 68f9628a35f2..034f22d55def 100644 --- a/src/compiler/scala/tools/nsc/tasty/TreeUnpickler.scala +++ b/src/compiler/scala/tools/nsc/tasty/TreeUnpickler.scala @@ -763,12 +763,8 @@ class TreeUnpickler[Tasty <: TastyUniverse]( val tpe = readTpt()(localCtx).tpe ctx.setInfo(sym, if (repr.originalFlagSet.is(FlagSets.SingletonEnum)) { - val enumClass = sym.objectImplementation - val selfTpe = defn.SingleType(sym.owner.thisPrefix, sym) - val ctor = ctx.newConstructor(enumClass, selfTpe) - enumClass.typeOfThis = selfTpe - ctx.setInfo(enumClass, defn.ClassInfoType(intersectionParts(tpe), ctor :: Nil, enumClass)) - prefixedRef(sym.owner.thisPrefix, enumClass) + ctx.completeEnumSingleton(sym, tpe) + prefixedRef(sym.owner.thisPrefix, sym.objectImplementation) } else if (sym.isFinal && isConstantType(tpe)) defn.InlineExprType(tpe) else if (sym.isMethod) defn.ExprType(tpe) @@ -1079,7 +1075,7 @@ class TreeUnpickler[Tasty <: TastyUniverse]( case UNAPPLY => unsupportedTermTreeError("unapply pattern") case INLINED => unsupportedTermTreeError("inlined expression") case SELECTouter => metaprogrammingIsUnsupported // only within inline - case HOLE => assertNoMacroHole + case HOLE => abortMacroHole case _ => readPathTerm() } assert(currentAddr === end, s"$start $currentAddr $end ${astTagToString(tag)}") @@ -1096,7 +1092,7 @@ class TreeUnpickler[Tasty <: TastyUniverse]( forkAt(readAddr()).readTpt() case BLOCK => // BLOCK appears in type position when quoting a type, but only in the body of a method metaprogrammingIsUnsupported - case HOLE => assertNoMacroHole + case HOLE => abortMacroHole case tag => if (isTypeTreeTag(tag)) readTerm()(ctx.retractMode(OuterTerm)) else { @@ -1110,7 +1106,7 @@ class TreeUnpickler[Tasty <: TastyUniverse]( /** * A HOLE should never appear in TASTy for a top level class, only in quotes. */ - private def assertNoMacroHole[T]: T = assertError("Scala 3 macro hole in pickled TASTy") + private def abortMacroHole[T]: T = abortWith(msg = "Scala 3 macro hole in pickled TASTy") private def metaprogrammingIsUnsupported[T](implicit ctx: Context): T = unsupportedError("Scala 3 metaprogramming features") diff --git a/src/compiler/scala/tools/nsc/tasty/bridge/ContextOps.scala b/src/compiler/scala/tools/nsc/tasty/bridge/ContextOps.scala index d50e97c5cd74..f1658240a4b1 100644 --- a/src/compiler/scala/tools/nsc/tasty/bridge/ContextOps.scala +++ b/src/compiler/scala/tools/nsc/tasty/bridge/ContextOps.scala @@ -65,20 +65,22 @@ trait ContextOps { self: TastyUniverse => @inline final def typeError[T](msg: String): T = throw new u.TypeError(msg) - @inline final def assertError[T](msg: String): T = - throw new AssertionError(s"assertion failed: ${u.supplementErrorMessage(msg)}") + final def abortWith[T](msg: String): T = { + u.assert(false, msg) + ??? + } @inline final def assert(assertion: Boolean, msg: => Any): Unit = - if (!assertion) assertError(String.valueOf(msg)) + u.assert(assertion, msg) @inline final def assert(assertion: Boolean): Unit = - if (!assertion) assertError("") + u.assert(assertion, "") private final def findObject(owner: Symbol, name: u.Name): Symbol = { val scope = if (owner != null && owner.isClass) owner.rawInfo.decls else u.EmptyScope - val it = scope.lookupAll(name).filter(_.isModule) + val it = scope.lookupAll(name).withFilter(_.isModule) if (it.hasNext) it.next() else u.NoSymbol //throw new AssertionError(s"no module $name in ${location(owner)}") } @@ -189,14 +191,18 @@ trait ContextOps { self: TastyUniverse => final def newLocalDummy: Symbol = owner.newLocalDummy(u.NoPosition) - final def newWildcardSym(info: Type): Symbol = - owner.newTypeParameter(u.nme.WILDCARD.toTypeName, u.NoPosition, FlagSets.Creation.Wildcard).setInfo(info) + final def newWildcard(info: Type): Symbol = + owner.newTypeParameter( + name = u.freshTypeName("_$")(u.currentFreshNameCreator), + pos = u.NoPosition, + newFlags = FlagSets.Creation.Default + ).setInfo(info) - final def newConstructor(owner: Symbol, resType: Type): Symbol = unsafeNewSymbol( + final def newConstructor(owner: Symbol, info: Type): Symbol = unsafeNewSymbol( owner = owner, name = TastyName.Constructor, - flags = FlagSets.Creation.CtorDef, - info = defn.DefDefType(Nil, Nil :: Nil, resType) + flags = Method, + info = info ) final def findRootSymbol(roots: Set[Symbol], name: TastyName): Option[Symbol] = { @@ -328,23 +334,21 @@ trait ContextOps { self: TastyUniverse => } } } - else if (name === TastyName.Constructor) { - owner.newConstructor(u.NoPosition, newSymbolFlagSet(flags &~ Stable)) - } - else if (name === TastyName.MixinConstructor) { - owner.newMethodSymbol(u.nme.MIXIN_CONSTRUCTOR, u.NoPosition, newSymbolFlagSet(flags &~ Stable)) - } else if (flags.is(FlagSets.Creation.ObjectDef)) { log(s"!!! visited module value $name first") - assert(!owner.rawInfo.decls.lookupAll(encodeTermName(name)).exists(_.isModule)) val module = owner.newModule(encodeTermName(name), u.NoPosition, newSymbolFlagSet(flags)) module.moduleClass.info = defn.DefaultInfo - module.moduleClass.flags = newSymbolFlagSet(FlagSets.Creation.ObjectClassDef) module } else if (name.isTypeName) { owner.newTypeSymbol(encodeTypeName(name.toTypeName), u.NoPosition, newSymbolFlagSet(flags)) } + else if (name === TastyName.Constructor) { + owner.newConstructor(u.NoPosition, newSymbolFlagSet(flags &~ Stable)) + } + else if (name === TastyName.MixinConstructor) { + owner.newMethodSymbol(u.nme.MIXIN_CONSTRUCTOR, u.NoPosition, newSymbolFlagSet(flags &~ Stable)) + } else { owner.newMethodSymbol(encodeTermName(name), u.NoPosition, newSymbolFlagSet(flags)) } @@ -353,8 +357,7 @@ trait ContextOps { self: TastyUniverse => private final def unsafeNewUntypedClassSymbol(owner: Symbol, typeName: TastyName.TypeName, flags: TastyFlagSet): Symbol = { if (flags.is(FlagSets.Creation.ObjectClassDef)) { log(s"!!! visited module class $typeName first") - // TODO [tasty]: test private access modifiers here - val module = owner.newModule(encodeTermName(typeName), u.NoPosition, newSymbolFlagSet(FlagSets.Creation.ObjectDef)) + val module = owner.newModule(encodeTermName(typeName), u.NoPosition, FlagSets.Creation.Default) module.info = defn.DefaultInfo module.moduleClass.flags = newSymbolFlagSet(flags) module.moduleClass @@ -400,6 +403,20 @@ trait ContextOps { self: TastyUniverse => private[bridge] final def resetFlag0(symbol: Symbol, flags: u.FlagSet): symbol.type = symbol.resetFlag(flags) + final def completeEnumSingleton(sym: Symbol, tpe: Type): Unit = { + val moduleCls = sym.moduleClass + val moduleClsFlags = FlagSets.withAccess( + flags = FlagSets.Creation.ObjectClassDef, + inheritedAccess = sym.repr.originalFlagSet + ) + val selfTpe = defn.SingleType(sym.owner.thisPrefix, sym) + val ctor = newConstructor(moduleCls, selfTpe) + moduleCls.typeOfThis = selfTpe + moduleCls.flags = newSymbolFlagSet(moduleClsFlags) + moduleCls.info = defn.ClassInfoType(intersectionParts(tpe), ctor :: Nil, moduleCls) + moduleCls.privateWithin = sym.privateWithin + } + final def redefineSymbol(symbol: Symbol, flags: TastyFlagSet, completer: TastyCompleter, privateWithin: Symbol): symbol.type = { symbol.flags = newSymbolFlagSet(flags) unsafeSetInfoAndPrivate(symbol, completer, privateWithin) diff --git a/src/compiler/scala/tools/nsc/tasty/bridge/FlagOps.scala b/src/compiler/scala/tools/nsc/tasty/bridge/FlagOps.scala index ff5cb6270f5f..ba6d993dfec2 100644 --- a/src/compiler/scala/tools/nsc/tasty/bridge/FlagOps.scala +++ b/src/compiler/scala/tools/nsc/tasty/bridge/FlagOps.scala @@ -32,11 +32,11 @@ trait FlagOps { self: TastyUniverse => object Creation { val ObjectDef: TastyFlagSet = Object | Lazy | Final | Stable val ObjectClassDef: TastyFlagSet = Object | Final - val CtorDef: TastyFlagSet = Method | Stable - val HKTyParam: u.FlagSet = newSymbolFlagSet(Deferred) - val TyParam: u.FlagSet = HKTyParam - val Wildcard: u.FlagSet = newSymbolFlagSet(EmptyTastyFlags) + val Default: u.FlagSet = newSymbolFlagSet(EmptyTastyFlags) + val BoundedType: u.FlagSet = newSymbolFlagSet(Deferred) } + def withAccess(flags: TastyFlagSet, inheritedAccess: TastyFlagSet): TastyFlagSet = + flags | (inheritedAccess & (Private | Local | Protected)) val SingletonEnum: TastyFlagSet = Case | Static | Enum | Stable val TermParamOrAccessor: TastyFlagSet = Param | ParamSetter val FieldGetter: TastyFlagSet = FieldAccessor | Stable diff --git a/src/compiler/scala/tools/nsc/tasty/bridge/SymbolOps.scala b/src/compiler/scala/tools/nsc/tasty/bridge/SymbolOps.scala index c2372f80cc56..d0f6fb756b0e 100644 --- a/src/compiler/scala/tools/nsc/tasty/bridge/SymbolOps.scala +++ b/src/compiler/scala/tools/nsc/tasty/bridge/SymbolOps.scala @@ -197,5 +197,5 @@ trait SymbolOps { self: TastyUniverse => } def showSig(sig: MethodSignature[ErasedTypeRef]): String = sig.map(_.signature).show - def showSym(sym: Symbol): String = s"Symbol($sym, #${sym.id})" + def showSym(sym: Symbol): String = s"Symbol(${sym.accurateKindString} ${sym.name}, #${sym.id})" } diff --git a/src/compiler/scala/tools/nsc/tasty/bridge/TypeOps.scala b/src/compiler/scala/tools/nsc/tasty/bridge/TypeOps.scala index f67e8fefdcbe..f553f3a6b030 100644 --- a/src/compiler/scala/tools/nsc/tasty/bridge/TypeOps.scala +++ b/src/compiler/scala/tools/nsc/tasty/bridge/TypeOps.scala @@ -192,7 +192,7 @@ trait TypeOps { self: TastyUniverse => if (args.exists(tpe => tpe.isInstanceOf[u.TypeBounds] | tpe.isInstanceOf[LambdaPolyType])) { val syms = mutable.ListBuffer.empty[Symbol] def bindWildcards(tpe: Type) = tpe match { - case tpe: u.TypeBounds => ctx.newWildcardSym(tpe).tap(syms += _).pipe(_.ref) + case tpe: u.TypeBounds => ctx.newWildcard(tpe).tap(syms += _).pipe(_.ref) case tpe: LambdaPolyType => tpe.toNested case tpe => tpe } @@ -646,7 +646,7 @@ trait TypeOps { self: TastyUniverse => override val typeParams: List[Symbol] = paramNames.lazyZip(paramInfos).map { case (name, bounds) => val argInfo = normaliseIfBounds(bounds) - ctx.owner.newTypeParameter(name, u.NoPosition, FlagSets.Creation.HKTyParam).setInfo(argInfo) + ctx.owner.newTypeParameter(name, u.NoPosition, FlagSets.Creation.BoundedType).setInfo(argInfo) } val resType: Type = lambdaResultType(resultTypeOp()) @@ -674,7 +674,7 @@ trait TypeOps { self: TastyUniverse => override val typeParams: List[Symbol] = paramNames.lazyZip(paramInfos).map { case (name, argInfo) => - ctx.owner.newTypeParameter(name, u.NoPosition, FlagSets.Creation.TyParam).setInfo(argInfo) + ctx.owner.newTypeParameter(name, u.NoPosition, FlagSets.Creation.BoundedType).setInfo(argInfo) } val resType: Type = resultTypeOp() // potentially need to flatten? (probably not, happens in typer in dotty) diff --git a/test/tasty/neg/src-2/TestDelayedPrivate.check b/test/tasty/neg/src-2/TestDelayedPrivate.check new file mode 100644 index 000000000000..dbf046b62d43 --- /dev/null +++ b/test/tasty/neg/src-2/TestDelayedPrivate.check @@ -0,0 +1,4 @@ +TestDelayedPrivate_fail.scala:7: error: value Deeper is not a member of object tastytest.DelayedPrivate.Nested + DelayedPrivate.Nested.Deeper + ^ +1 error diff --git a/test/tasty/neg/src-2/TestDelayedPrivateInverse.check b/test/tasty/neg/src-2/TestDelayedPrivateInverse.check new file mode 100644 index 000000000000..9742e9453372 --- /dev/null +++ b/test/tasty/neg/src-2/TestDelayedPrivateInverse.check @@ -0,0 +1,4 @@ +TestDelayedPrivateInverse_fail.scala:6: error: value Internal is not a member of object tastytest.DelayedPrivateInverse + val _ = DelayedPrivateInverse.Internal + ^ +1 error diff --git a/test/tasty/neg/src-2/TestDelayedPrivateInverse_fail.scala b/test/tasty/neg/src-2/TestDelayedPrivateInverse_fail.scala new file mode 100644 index 000000000000..002fa21936c9 --- /dev/null +++ b/test/tasty/neg/src-2/TestDelayedPrivateInverse_fail.scala @@ -0,0 +1,8 @@ +package tastytest + +object TestDelayedPrivateInverse { + def test: DelayedPrivateInverse.Parent[Nothing] = ??? // force sealed children of parent + locally { + val _ = DelayedPrivateInverse.Internal + } +} diff --git a/test/tasty/neg/src-2/TestDelayedPrivate_fail.scala b/test/tasty/neg/src-2/TestDelayedPrivate_fail.scala new file mode 100644 index 000000000000..50c7728d8e91 --- /dev/null +++ b/test/tasty/neg/src-2/TestDelayedPrivate_fail.scala @@ -0,0 +1,9 @@ +package tastytest + +object TestDelayedPrivate { + + locally { + val _ = Nil: List[DelayedPrivate.Root] // force Root to be seen first + DelayedPrivate.Nested.Deeper + } +} diff --git a/test/tasty/neg/src-3/DelayedPrivate.scala b/test/tasty/neg/src-3/DelayedPrivate.scala new file mode 100644 index 000000000000..76c2fc949d20 --- /dev/null +++ b/test/tasty/neg/src-3/DelayedPrivate.scala @@ -0,0 +1,15 @@ +package tastytest + +object DelayedPrivate { + + sealed trait Root + + object Nested { + + private object Deeper { + final class Leaf extends Root + } + + } + +} diff --git a/test/tasty/neg/src-3/DelayedPrivateInverse.scala b/test/tasty/neg/src-3/DelayedPrivateInverse.scala new file mode 100644 index 000000000000..3d03e90fb361 --- /dev/null +++ b/test/tasty/neg/src-3/DelayedPrivateInverse.scala @@ -0,0 +1,8 @@ +package tastytest + +object DelayedPrivateInverse { + private object Internal { + final class Impl extends DelayedPrivateInverse.Parent[Nothing] + } + sealed trait Parent[T] +} From 3a45c22ffbbba30f397ccf49224b0c2c1f439bfc Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Thu, 6 May 2021 01:07:18 +0200 Subject: [PATCH 171/769] handle invisible flag also document constructor type params --- .../scala/tools/nsc/tasty/TreeUnpickler.scala | 81 ++++++++++++------- .../tools/nsc/tasty/bridge/ContextOps.scala | 15 ++-- .../tools/nsc/tasty/bridge/FlagOps.scala | 15 ++-- .../tools/nsc/tasty/bridge/SymbolOps.scala | 14 ++-- test/tasty/neg/src-2/TestInvisibleDefs.check | 13 +++ .../neg/src-2/TestInvisibleDefs_fail.scala | 15 ++++ test/tasty/neg/src-3/InvisibleDefs.scala | 16 ++++ .../src-2/tastytest/TestInvisibleDefs.scala | 15 ++++ .../run/src-3/tastytest/InvisibleDefs.scala | 16 ++++ 9 files changed, 153 insertions(+), 47 deletions(-) create mode 100644 test/tasty/neg/src-2/TestInvisibleDefs.check create mode 100644 test/tasty/neg/src-2/TestInvisibleDefs_fail.scala create mode 100644 test/tasty/neg/src-3/InvisibleDefs.scala create mode 100644 test/tasty/run/src-2/tastytest/TestInvisibleDefs.scala create mode 100644 test/tasty/run/src-3/tastytest/InvisibleDefs.scala diff --git a/src/compiler/scala/tools/nsc/tasty/TreeUnpickler.scala b/src/compiler/scala/tools/nsc/tasty/TreeUnpickler.scala index 034f22d55def..257ac7aeb8f0 100644 --- a/src/compiler/scala/tools/nsc/tasty/TreeUnpickler.scala +++ b/src/compiler/scala/tools/nsc/tasty/TreeUnpickler.scala @@ -69,8 +69,12 @@ class TreeUnpickler[Tasty <: TastyUniverse]( //---------------- unpickling trees ---------------------------------------------------------------------------------- - private def registerSym(addr: Addr, sym: Symbol)(implicit ctx: Context) = { - ctx.log(s"$addr registered ${showSym(sym)} in ${location(sym.owner)}") + private def registerSym(addr: Addr, sym: Symbol, rejected: Boolean)(implicit ctx: Context) = { + assert(!(rejected && isSymbol(sym)), "expected no symbol when rejected") + ctx.log( + if (isSymbol(sym)) s"$addr registered ${showSym(sym)} in ${location(sym.owner)}" + else s"$addr registering symbol was rejected" + ) symAtAddr(addr) = sym } @@ -464,7 +468,7 @@ class TreeUnpickler[Tasty <: TastyUniverse]( createMemberSymbol() case TEMPLATE => val localDummy = ctx.newLocalDummy - registerSym(currentAddr, localDummy) + registerSym(currentAddr, localDummy, rejected = false) localDummy case tag => assert(tag != BIND, "bind pattern symbol creation from TASTy") @@ -475,12 +479,23 @@ class TreeUnpickler[Tasty <: TastyUniverse]( * @return the created symbol */ def createMemberSymbol()(implicit ctx: Context): Symbol = { + + def rejectSymbol(owner: Symbol, name: TastyName, flags: TastyFlagSet): Boolean = { + def isPureMixinCtor = + name == TastyName.MixinConstructor && owner.isTrait && flags.is(Stable) + def isInvisible = + flags.is(Invisible) + + isPureMixinCtor || isInvisible + } + val start = currentAddr val tag = readByte() def isTypeTag = tag === TYPEDEF || tag === TYPEPARAM val end = readEnd() val parsedName: TastyName = readTastyName() - ctx.log(s"$start ::: => create ${astTagToString(tag)} ${parsedName.debug}") + def debugSymCreate: String = s"${astTagToString(tag)} ${parsedName.debug}" + ctx.log(s"$start ::: => create $debugSymCreate") skipParams() val ttag = nextUnsharedTag val isAbsType = isAbstractType(ttag) @@ -489,13 +504,11 @@ class TreeUnpickler[Tasty <: TastyUniverse]( skipTree() // tpt val rhsIsEmpty = nothingButMods(end) if (!rhsIsEmpty) skipTree() - val (name, flags, annotations, privateWithin) = { - val (parsedFlags, annotations, privateWithin) = - readModifiers(end, readTypedAnnot, readTypedWithin, noSymbol) - val name = normalizeName(isTypeTag, parsedName) - val flags = addInferredFlags(tag, parsedFlags, name, isAbsType, isClass, rhsIsEmpty) - (name, flags, annotations, privateWithin) - } + val (parsedFlags0, annotations, privateWithin) = + readModifiers(end, readTypedAnnot, readTypedWithin, noSymbol) + val name = normalizeName(isTypeTag, parsedName) + val flags = addInferredFlags(tag, parsedFlags0, name, isAbsType, isClass, rhsIsEmpty) + def mkCompleter = new Completer(isClass, subReader(start, end), flags)(ctx.retractMode(IndexScopedStats)) def isTypeParameter = flags.is(Param) && isTypeTag def canEnterInClass = !isTypeParameter ctx.log { @@ -509,34 +522,46 @@ class TreeUnpickler[Tasty <: TastyUniverse]( } s"""$start parsed flags $debugFlags""" } + val rejected = rejectSymbol(ctx.owner, name, flags) val sym = { if (tag === TYPEPARAM && ctx.owner.isConstructor) { + // TASTy encodes type parameters for constructors + // nsc only has class type parameters ctx.findOuterClassTypeParameter(name.toTypeName) } else { - val completer = new Completer(isClass, subReader(start, end), flags)(ctx.retractMode(IndexScopedStats)) ctx.findRootSymbol(roots, name) match { case Some(rootd) => - ctx.redefineSymbol(rootd, flags, completer, privateWithin) // dotty "removes one completion" here from the flags, which is not possible in nsc - ctx.log(s"$start replaced info of ${showSym(rootd)}") - rootd + roots -= rootd + if (rejected) { + ctx.evict(rootd) + noSymbol + } + else { + ctx.redefineSymbol(rootd, flags, mkCompleter, privateWithin) + ctx.log(s"$start replaced info of ${showSym(rootd)}") + rootd + } case _ => - if (isClass) ctx.delayClassCompletion(ctx.owner, name.toTypeName, completer, privateWithin) - else ctx.delayCompletion(ctx.owner, name, completer, privateWithin) + if (rejected) noSymbol + else if (isClass) ctx.delayClassCompletion(ctx.owner, name.toTypeName, mkCompleter, privateWithin) + else ctx.delayCompletion(ctx.owner, name, mkCompleter, privateWithin) } } - }.ensuring(isSymbol(_), s"${ctx.classRoot}: Could not create symbol at $start") - if (tag == VALDEF && flags.is(FlagSets.SingletonEnum)) - ctx.markAsEnumSingleton(sym) - registerSym(start, sym) - if (canEnterInClass && ctx.owner.isClass) - ctx.enterIfUnseen(sym) - if (isClass) { - val localCtx = ctx.withOwner(sym) - forkAt(templateStart).indexTemplateParams()(localCtx) + } + registerSym(start, sym, rejected) + if (isSymbol(sym)) { + if (tag == VALDEF && flags.is(FlagSets.SingletonEnum)) + ctx.markAsEnumSingleton(sym) + if (canEnterInClass && ctx.owner.isClass) + ctx.enterIfUnseen(sym) + if (isClass) { + val localCtx = ctx.withOwner(sym) + forkAt(templateStart).indexTemplateParams()(localCtx) + } + ctx.adjustAnnotations(sym, annotations) } goto(start) - ctx.adjustAnnotations(sym, annotations) sym } @@ -1026,7 +1051,7 @@ class TreeUnpickler[Tasty <: TastyUniverse]( tpd.SeqLiteral(until(end)(readTerm()), elemtpt) case REFINEDtpt => val refineCls = symAtAddr.getOrElse(start, ctx.newRefinementClassSymbol) - registerSym(start, refineCls) + registerSym(start, refineCls, rejected = false) typeAtAddr(start) = refineCls.ref val parent = readTpt() ctx.withOwner(refineCls).enterRefinement(parent.tpe) { refinedCtx => diff --git a/src/compiler/scala/tools/nsc/tasty/bridge/ContextOps.scala b/src/compiler/scala/tools/nsc/tasty/bridge/ContextOps.scala index f1658240a4b1..ca1052bb7241 100644 --- a/src/compiler/scala/tools/nsc/tasty/bridge/ContextOps.scala +++ b/src/compiler/scala/tools/nsc/tasty/bridge/ContextOps.scala @@ -59,7 +59,8 @@ trait ContextOps { self: TastyUniverse => } final def location(owner: Symbol): String = { - if (owner.isClass) s"${owner.kindString} ${owner.fullNameString}" + if (!isSymbol(owner)) "" + else if (owner.isClass) s"${owner.kindString} ${owner.fullNameString}" else s"${describeOwner(owner)} in ${location(owner.owner)}" } @@ -148,7 +149,6 @@ trait ContextOps { self: TastyUniverse => final def ignoreAnnotations: Boolean = u.settings.YtastyNoAnnotations def requiresLatentEntry(decl: Symbol): Boolean = decl.isScala3Inline - def neverEntered(decl: Symbol): Boolean = decl.isPureMixinCtor def canEnterOverload(decl: Symbol): Boolean = { !(decl.isModule && isSymbol(findObject(thisCtx.owner, decl.name))) @@ -285,11 +285,16 @@ trait ContextOps { self: TastyUniverse => } } + def evict(sym: Symbol): Unit = { + sym.owner.rawInfo.decls.unlink(sym) + sym.info = u.NoType + } + final def enterIfUnseen(sym: Symbol): Unit = { - if (mode.is(IndexScopedStats)) - initialContext.collectLatentEvidence(owner, sym) val decl = declaringSymbolOf(sym) - if (!(requiresLatentEntry(decl) || neverEntered(decl))) + if (mode.is(IndexScopedStats)) + initialContext.collectLatentEvidence(owner, decl) + if (!requiresLatentEntry(decl)) enterIfUnseen0(owner.rawInfo.decls, decl) } diff --git a/src/compiler/scala/tools/nsc/tasty/bridge/FlagOps.scala b/src/compiler/scala/tools/nsc/tasty/bridge/FlagOps.scala index ba6d993dfec2..c732138681e5 100644 --- a/src/compiler/scala/tools/nsc/tasty/bridge/FlagOps.scala +++ b/src/compiler/scala/tools/nsc/tasty/bridge/FlagOps.scala @@ -16,8 +16,8 @@ import scala.tools.tasty.TastyFlags._ import scala.tools.nsc.tasty.TastyUniverse import scala.reflect.internal.{Flags, ModifierFlags} -/**Handles encoding of `TastyFlagSet` to `scala.reflect` flags and witnessing which flags do not map directly - * from TASTy. +/** Handles encoding of `TastyFlagSet` to `scala.reflect` flags and witnessing which flags do not map directly + * from TASTy. */ trait FlagOps { self: TastyUniverse => import self.{symbolTable => u} @@ -26,7 +26,7 @@ trait FlagOps { self: TastyUniverse => val TastyOnlyFlags: TastyFlagSet = ( Erased | Internal | Inline | InlineProxy | Opaque | Extension | Given | Exported | Transparent - | Enum | Infix | Open | ParamAlias | Invisible + | Enum | Infix | Open | ParamAlias | Invisible ) object Creation { @@ -52,14 +52,17 @@ trait FlagOps { self: TastyUniverse => implicit final class SymbolFlagOps(val sym: Symbol) { def reset(tflags: TastyFlagSet)(implicit ctx: Context): sym.type = ctx.resetFlag0(sym, unsafeEncodeTastyFlagSet(tflags)) - def isOneOf(mask: TastyFlagSet): Boolean = sym.hasFlag(unsafeEncodeTastyFlagSet(mask)) - def is(mask: TastyFlagSet): Boolean = sym.hasAllFlags(unsafeEncodeTastyFlagSet(mask)) + def isOneOf(mask: TastyFlagSet): Boolean = + sym.hasFlag(unsafeEncodeTastyFlagSet(mask)) + def is(mask: TastyFlagSet): Boolean = + sym.hasAllFlags(unsafeEncodeTastyFlagSet(mask)) def is(mask: TastyFlagSet, butNot: TastyFlagSet): Boolean = if (!butNot) sym.is(mask) else sym.is(mask) && sym.not(butNot) - def not(mask: TastyFlagSet): Boolean = sym.hasNoFlags(unsafeEncodeTastyFlagSet(mask)) + def not(mask: TastyFlagSet): Boolean = + sym.hasNoFlags(unsafeEncodeTastyFlagSet(mask)) } /** encodes a `TastyFlagSet` as a `symbolTable.FlagSet`, the flags in `FlagSets.TastyOnlyFlags` are ignored. diff --git a/src/compiler/scala/tools/nsc/tasty/bridge/SymbolOps.scala b/src/compiler/scala/tools/nsc/tasty/bridge/SymbolOps.scala index d0f6fb756b0e..21afc92da34f 100644 --- a/src/compiler/scala/tools/nsc/tasty/bridge/SymbolOps.scala +++ b/src/compiler/scala/tools/nsc/tasty/bridge/SymbolOps.scala @@ -49,10 +49,6 @@ trait SymbolOps { self: TastyUniverse => def isScala3Inline: Boolean = repr.originalFlagSet.is(Inline) def isScala2Macro: Boolean = repr.originalFlagSet.is(FlagSets.Scala2Macro) - - def isPureMixinCtor: Boolean = isMixinCtor && repr.originalFlagSet.is(Stable) - def isMixinCtor: Boolean = u.nme.MIXIN_CONSTRUCTOR == sym.name && sym.owner.isTrait - def isTraitParamAccessor: Boolean = sym.owner.isTrait && repr.originalFlagSet.is(FieldAccessor|ParamSetter) def isParamGetter: Boolean = @@ -176,10 +172,12 @@ trait SymbolOps { self: TastyUniverse => val paramSyms = meth0.paramss.flatten val resTpe = meth0.finalResultType val sameParamSize = paramSyms.length === paramRefs.length - def sameTyParamSize = tyParamCount === ( - if (qual === TastyName.Constructor) member.owner.typeParams.length - else sym.typeParams.length - ) + def sameTyParamSize = tyParamCount === ({ + // the signature of a class/mixin constructor includes + // type parameters, in nsc these come from the parent. + val tyParamOwner = if (qual.isConstructorName) member.owner else sym + tyParamOwner.typeParams.length + }) def sameParams = paramSyms.lazyZip(paramRefs).forall({ case (paramSym, paramRef) => sameErasure(sym)(paramSym.tpe, paramRef) }) diff --git a/test/tasty/neg/src-2/TestInvisibleDefs.check b/test/tasty/neg/src-2/TestInvisibleDefs.check new file mode 100644 index 000000000000..9ce3bf4804cd --- /dev/null +++ b/test/tasty/neg/src-2/TestInvisibleDefs.check @@ -0,0 +1,13 @@ +TestInvisibleDefs_fail.scala:5: error: type argIsHello is not a member of package tastytest + def foo: tastytest.argIsHello = ??? // has invisible flag so should not be seen + ^ +TestInvisibleDefs_fail.scala:6: error: type argIsHello is not a member of package tastytest + def bar: tastytest.argIsHello = ??? // second try on same type + ^ +TestInvisibleDefs_fail.scala:11: error: value getStatus is not a member of tastytest.InvisibleDefs.MyBean + mybean.getStatus() // error + ^ +TestInvisibleDefs_fail.scala:12: error: value setStatus is not a member of tastytest.InvisibleDefs.MyBean + mybean.setStatus("closed") // error + ^ +4 errors diff --git a/test/tasty/neg/src-2/TestInvisibleDefs_fail.scala b/test/tasty/neg/src-2/TestInvisibleDefs_fail.scala new file mode 100644 index 000000000000..d8e681206150 --- /dev/null +++ b/test/tasty/neg/src-2/TestInvisibleDefs_fail.scala @@ -0,0 +1,15 @@ +package tastytest + +object TestInvisibleDefs { + + def foo: tastytest.argIsHello = ??? // has invisible flag so should not be seen + def bar: tastytest.argIsHello = ??? // second try on same type + + def testBean = { + val mybean = new InvisibleDefs.MyBean + mybean.status = "open" + mybean.getStatus() // error + mybean.setStatus("closed") // error + } + +} diff --git a/test/tasty/neg/src-3/InvisibleDefs.scala b/test/tasty/neg/src-3/InvisibleDefs.scala new file mode 100644 index 000000000000..5bd0190c28e1 --- /dev/null +++ b/test/tasty/neg/src-3/InvisibleDefs.scala @@ -0,0 +1,16 @@ +package tastytest + +import scala.beans.BeanProperty + +object InvisibleDefs { + + @main def argIsHello(arg: String): Unit = assert(arg == "Hello") + + class MyBean { + + @BeanProperty + var status = "" + + } + +} diff --git a/test/tasty/run/src-2/tastytest/TestInvisibleDefs.scala b/test/tasty/run/src-2/tastytest/TestInvisibleDefs.scala new file mode 100644 index 000000000000..4962af12bbe4 --- /dev/null +++ b/test/tasty/run/src-2/tastytest/TestInvisibleDefs.scala @@ -0,0 +1,15 @@ +package tastytest + +object TestInvisibleDefs extends Suite("TestInvisibleDefs") { + + test("invoke '@main def argIsHello'") { + InvisibleDefs.argIsHello("Hello") + } + + test("update bean.status") { + val mybean = new InvisibleDefs.MyBean + mybean.status = "open" + assert(mybean.status === "open") + } + +} diff --git a/test/tasty/run/src-3/tastytest/InvisibleDefs.scala b/test/tasty/run/src-3/tastytest/InvisibleDefs.scala new file mode 100644 index 000000000000..5bd0190c28e1 --- /dev/null +++ b/test/tasty/run/src-3/tastytest/InvisibleDefs.scala @@ -0,0 +1,16 @@ +package tastytest + +import scala.beans.BeanProperty + +object InvisibleDefs { + + @main def argIsHello(arg: String): Unit = assert(arg == "Hello") + + class MyBean { + + @BeanProperty + var status = "" + + } + +} From 466c5107d46c2c23f8ec8f84028699f59de56aec Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Fri, 7 May 2021 19:22:39 +0200 Subject: [PATCH 172/769] add escape hatch for scala 3.0.0 --- .../tools/tasty/TastyHeaderUnpickler.scala | 18 +++++++++++++++--- 1 file changed, 15 insertions(+), 3 deletions(-) diff --git a/src/compiler/scala/tools/tasty/TastyHeaderUnpickler.scala b/src/compiler/scala/tools/tasty/TastyHeaderUnpickler.scala index 783fc41bb5c5..57c36d0ffb29 100644 --- a/src/compiler/scala/tools/tasty/TastyHeaderUnpickler.scala +++ b/src/compiler/scala/tools/tasty/TastyHeaderUnpickler.scala @@ -51,7 +51,7 @@ class TastyHeaderUnpickler(reader: TastyReader) { compilerMajor = MajorVersion, compilerMinor = MinorVersion, compilerExperimental = ExperimentalVersion - ) + ) || scala3finalException(fileMajor, fileMinor, fileExperimental) check(validVersion, { val signature = signatureString(fileMajor, fileMinor, fileExperimental) @@ -69,8 +69,6 @@ class TastyHeaderUnpickler(reader: TastyReader) { } } - def isAtEnd: Boolean = reader.isAtEnd - private def check(cond: Boolean, msg: => String): Unit = { if (!cond) throw new UnpickleException(msg) } @@ -78,6 +76,20 @@ class TastyHeaderUnpickler(reader: TastyReader) { object TastyHeaderUnpickler { + /** This escape hatch allows 28.0.3 compiler to read + * 28.0.0 TASTy files (aka produced by Scala 3.0.0 final) + * @note this should be removed if we are able to test against + * Scala 3.0.0 before releasing Scala 2.13.6 + */ + private def scala3finalException( + fileMajor: Int, + fileMinor: Int, + fileExperimental: Int): Boolean = ( + MajorVersion == 28 && fileMajor == 28 + && MinorVersion == 0 && fileMinor == 0 + && ExperimentalVersion == 3 && fileExperimental == 0 + ) + private def toolingAddendum = ( if (ExperimentalVersion > 0) "\nNote that your tooling is currently using an unstable TASTy version." From 40e2ab5aa56ad07bd903b78ea1b2a547629a8523 Mon Sep 17 00:00:00 2001 From: Dale Wijnand Date: Mon, 10 May 2021 11:36:42 +0100 Subject: [PATCH 173/769] Handle Singleton types in patmat's outer prefix align testing Singleton abstract types are isStable, but they don't have a term symbol. But, at least in the given test case, they do have a type symbol (the type parameter symbol) so we can construct a fresh singleton type using that type symbol and use that to determine that no outer test is needed for the prefix. That hinges on the assumption that the machinery around Singleton is successfully enforcing that it all ends up deriving from a single value and two types are never from different prefixes. Alternatively we can just use `pre.typeSymbol == NoSymbol` as a guard and always emit an outer test on the prefix. That undermines that Singleton abstract types are stable, but I can't tell in the context of outer tests whether that's the right or the wrong choice... --- .../nsc/transform/patmat/MatchTreeMaking.scala | 3 ++- test/files/pos/t12392.scala | 14 ++++++++++++++ 2 files changed, 16 insertions(+), 1 deletion(-) create mode 100644 test/files/pos/t12392.scala diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala index 27749a6035d8..e6ac5f16d358 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala @@ -394,6 +394,7 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging { false case TypeRef(pre, sym, args) => val testedBinderClass = testedBinder.info.upperBound.typeSymbol + // alternatively..... = testedBinder.info.baseClasses.find(_.isClass).getOrElse(NoSymbol) val testedBinderType = testedBinder.info.baseType(testedBinderClass) val testedPrefixIsExpectedTypePrefix = pre =:= testedBinderType.prefix @@ -402,7 +403,7 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging { case ThisType(thissym) => ThisType(thissym.cloneSymbol(thissym.owner)) case _ => - val preSym = pre.termSymbol + val preSym = pre.termSymbol.orElse(pre.typeSymbol) val freshPreSym = preSym.cloneSymbol(preSym.owner).setInfo(preSym.info) singleType(pre.prefix, freshPreSym) } diff --git a/test/files/pos/t12392.scala b/test/files/pos/t12392.scala new file mode 100644 index 000000000000..78496e1aa392 --- /dev/null +++ b/test/files/pos/t12392.scala @@ -0,0 +1,14 @@ +import scala.reflect.api.Universe + +object Test { + type SingletonUniverse = Universe with Singleton + def deepIntersectionTypeMembers[U <: SingletonUniverse](targetType: U#Type): List[U#Type] = { + def go(tpe: U#Type): List[U#Type] = { + tpe match { + case r: U#RefinedTypeApi => r.parents.flatMap(t => deepIntersectionTypeMembers[U]((t.dealias): U#Type)) + case _ => List(tpe) + } + } + go(targetType).distinct + } +} From e8e7cca88ee1bd4571e0659219d233f55e562fc8 Mon Sep 17 00:00:00 2001 From: Diego Alonso Date: Sun, 16 Feb 2020 20:00:00 +0000 Subject: [PATCH 174/769] Mutable Symbol substitution to `cloneSymbols` and `copyRefinedType We found allocation hotspots of `SubstSymMap` objects in methods `cloneSymbols`, and `copyRefinedType`. To cool them, we introduce Reusable Instances with mutable symbol substitution map. To introduce this mutable symbol substitutions, we use two middle "abstract" classes, `AbstractSubstTypeMap` and `AbstractSubstMap`, that keep as much of the logic as possible in this template. The logic depends on a "find" method, which looks for a symbol (key) in the "map", which in the common caseb is implemented as two lists. --- .../scala/reflect/internal/Symbols.scala | 16 +++++- .../scala/reflect/internal/Types.scala | 12 +++-- .../scala/reflect/internal/tpe/TypeMaps.scala | 51 +++++++++++++++---- 3 files changed, 63 insertions(+), 16 deletions(-) diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala index 20f75fa7f14c..e5be8c03d471 100644 --- a/src/reflect/scala/reflect/internal/Symbols.scala +++ b/src/reflect/scala/reflect/internal/Symbols.scala @@ -20,7 +20,7 @@ package internal import scala.collection.immutable import scala.collection.mutable.ListBuffer -import util.{ Statistics, shortClassOfInstance } +import util.{ ReusableInstance, Statistics, shortClassOfInstance } import Flags._ import scala.annotation.tailrec import scala.reflect.io.{AbstractFile, NoAbstractFile} @@ -3760,7 +3760,19 @@ trait Symbols extends api.Symbols { self: SymbolTable => /** Convenience functions which derive symbols by cloning. */ def cloneSymbols(syms: List[Symbol]): List[Symbol] = - deriveSymbols(syms, _.cloneSymbol) + if (syms.isEmpty) Nil + else { + val syms1 = mapList(syms)(_.cloneSymbol) + cloneSymbolsSubstSymMap.using { (msm: MutableSubstSymMap) => + msm.reset(syms, syms1) + syms1.foreach(_.modifyInfo(msm)) + } + syms1 + } + + private[this] val cloneSymbolsSubstSymMap: ReusableInstance[MutableSubstSymMap] = + ReusableInstance[MutableSubstSymMap]( new MutableSubstSymMap()) + def cloneSymbolsAtOwner(syms: List[Symbol], owner: Symbol): List[Symbol] = deriveSymbols(syms, _ cloneSymbol owner) diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala index b96fe784a704..c354ab242089 100644 --- a/src/reflect/scala/reflect/internal/Types.scala +++ b/src/reflect/scala/reflect/internal/Types.scala @@ -22,7 +22,7 @@ import mutable.{ListBuffer, LinkedHashSet} import Flags._ import scala.util.control.ControlThrowable import scala.annotation.{tailrec, unused} -import util.Statistics +import util.{ReusableInstance, Statistics} import util.ThreeValues._ import Variance._ import Depth._ @@ -4041,6 +4041,9 @@ trait Types def refinedType(parents: List[Type], owner: Symbol): Type = refinedType(parents, owner, newScope, owner.pos) + private[this] val copyRefinedTypeSSM: ReusableInstance[MutableSubstSymMap] = + ReusableInstance[MutableSubstSymMap](new MutableSubstSymMap()) + def copyRefinedType(original: RefinedType, parents: List[Type], decls: Scope) = if ((parents eq original.parents) && (decls eq original.decls)) original else { @@ -4055,9 +4058,10 @@ trait Types val syms2 = result.decls.toList val resultThis = result.typeSymbol.thisType val substThisMap = new SubstThisMap(original.typeSymbol, resultThis) - val substMap = new SubstSymMap(syms1, syms2) - for (sym <- syms2) - sym.modifyInfo(info => substMap.apply(substThisMap.apply(info))) + copyRefinedTypeSSM.using { (msm: MutableSubstSymMap) => + msm.reset(syms1, syms2) + syms2.foreach(_.modifyInfo(info => msm.apply(substThisMap.apply(info)))) + } } result } diff --git a/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala b/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala index 5604e7d88e86..4e25828a6a56 100644 --- a/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala +++ b/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala @@ -664,20 +664,31 @@ private[internal] trait TypeMaps { } /** A base class to compute all substitutions */ - abstract class SubstMap[T](from: List[Symbol], to: List[T]) extends TypeMap { - // OPT this check was 2-3% of some profiles, demoted to -Xdev - if (isDeveloper) assert(sameLength(from, to), "Unsound substitution from "+ from +" to "+ to) + abstract class AbstractSubstMap[T >: Null] extends TypeMap { + protected def from: List[Symbol] = Nil + protected def to: List[T] = Nil private[this] var fromHasTermSymbol = false private[this] var fromMin = Int.MaxValue private[this] var fromMax = Int.MinValue private[this] var fromSize = 0 - from.foreach { - sym => + + protected def reload(): Unit = { + // OPT this check was 2-3% of some profiles, demoted to -Xdev + if (isDeveloper) assert(sameLength(from, to), "Unsound substitution from "+ from +" to "+ to) + + fromHasTermSymbol = false + fromMin = Int.MaxValue + fromMax = Int.MinValue + fromSize = 0 + + from.foreach { + sym => fromMin = math.min(fromMin, sym.id) fromMax = math.max(fromMax, sym.id) fromSize += 1 if (sym.isTerm) fromHasTermSymbol = true + } } /** Are `sym` and `sym1` the same? Can be tuned by subclasses. */ @@ -759,9 +770,7 @@ private[internal] trait TypeMaps { } } - /** A map to implement the `substSym` method. */ - class SubstSymMap(from: List[Symbol], to: List[Symbol]) extends SubstMap(from, to) { - def this(pairs: (Symbol, Symbol)*) = this(pairs.toList.map(_._1), pairs.toList.map(_._2)) + abstract class AbstractSubstSymMap extends AbstractSubstMap[Symbol] { protected def toType(fromTpe: Type, sym: Symbol) = fromTpe match { case TypeRef(pre, _, args) => copyTypeRef(fromTpe, pre, sym, args) @@ -821,9 +830,31 @@ private[internal] trait TypeMaps { mapTreeSymbols.transform(tree) } + /** A map to implement the `substSym` method. */ + class SubstSymMap(override val from: List[Symbol], override val to: List[Symbol]) extends AbstractSubstSymMap { + reload() + + def this(pairs: (Symbol, Symbol)*) = this(pairs.toList.map(_._1), pairs.toList.map(_._2)) + } + + class MutableSubstSymMap extends AbstractSubstSymMap { + private[this] var _from: List[Symbol] = Nil + private[this] var _to: List[Symbol] = Nil + + override def from: List[Symbol] = _from + override def to : List[Symbol] = _to + + def reset(nfrom: List[Symbol], nto: List[Symbol]): Unit = { + _from = nfrom + _to = nto + reload() + } + } + /** A map to implement the `subst` method. */ - class SubstTypeMap(val from: List[Symbol], val to: List[Type]) extends SubstMap(from, to) { - protected def toType(fromtp: Type, tp: Type) = tp + class SubstTypeMap(override val from: List[Symbol], override val to: List[Type]) extends AbstractSubstMap[Type] { + super.reload() + override protected def toType(fromtp: Type, tp: Type) = tp override def mapOver(tree: Tree, giveup: () => Nothing): Tree = { object trans extends TypeMapTransformer { From 48f75af555e2777e2c9d23c8899effe779de0d0c Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Mon, 10 May 2021 16:05:07 +0200 Subject: [PATCH 175/769] remove INTERNAL flag it was a no-op in dotty --- src/compiler/scala/tools/nsc/tasty/TreeUnpickler.scala | 1 - src/compiler/scala/tools/nsc/tasty/bridge/FlagOps.scala | 3 +-- src/compiler/scala/tools/tasty/TastyFlags.scala | 4 +--- src/compiler/scala/tools/tasty/TastyFormat.scala | 3 --- 4 files changed, 2 insertions(+), 9 deletions(-) diff --git a/src/compiler/scala/tools/nsc/tasty/TreeUnpickler.scala b/src/compiler/scala/tools/nsc/tasty/TreeUnpickler.scala index 257ac7aeb8f0..45ae91f1fc67 100644 --- a/src/compiler/scala/tools/nsc/tasty/TreeUnpickler.scala +++ b/src/compiler/scala/tools/nsc/tasty/TreeUnpickler.scala @@ -581,7 +581,6 @@ class TreeUnpickler[Tasty <: TastyUniverse]( } nextByte match { case PRIVATE => addFlag(Private) - case INTERNAL => addFlag(Internal) case PROTECTED => addFlag(Protected) case ABSTRACT => readByte() diff --git a/src/compiler/scala/tools/nsc/tasty/bridge/FlagOps.scala b/src/compiler/scala/tools/nsc/tasty/bridge/FlagOps.scala index c732138681e5..cc49e5131a71 100644 --- a/src/compiler/scala/tools/nsc/tasty/bridge/FlagOps.scala +++ b/src/compiler/scala/tools/nsc/tasty/bridge/FlagOps.scala @@ -25,7 +25,7 @@ trait FlagOps { self: TastyUniverse => object FlagSets { val TastyOnlyFlags: TastyFlagSet = ( - Erased | Internal | Inline | InlineProxy | Opaque | Extension | Given | Exported | Transparent + Erased | Inline | InlineProxy | Opaque | Extension | Given | Exported | Transparent | Enum | Infix | Open | ParamAlias | Invisible ) @@ -108,7 +108,6 @@ trait FlagOps { self: TastyUniverse => else { val sb = collection.mutable.ArrayBuffer.empty[String] if (flags.is(Erased)) sb += "erased" - if (flags.is(Internal)) sb += "" if (flags.is(Inline)) sb += "inline" if (flags.is(InlineProxy)) sb += "" if (flags.is(Opaque)) sb += "opaque" diff --git a/src/compiler/scala/tools/tasty/TastyFlags.scala b/src/compiler/scala/tools/tasty/TastyFlags.scala index 62e71e614859..f4e66b066c50 100644 --- a/src/compiler/scala/tools/tasty/TastyFlags.scala +++ b/src/compiler/scala/tools/tasty/TastyFlags.scala @@ -47,8 +47,7 @@ object TastyFlags { final val Deferred = Param.next final val Method = Deferred.next final val Erased = Method.next - final val Internal = Erased.next - final val Inline = Internal.next + final val Inline = Erased.next final val InlineProxy = Inline.next final val Opaque = InlineProxy.next final val Extension = Opaque.next @@ -124,7 +123,6 @@ object TastyFlags { if (is(Deferred)) sb += "Deferred" if (is(Method)) sb += "Method" if (is(Erased)) sb += "Erased" - if (is(Internal)) sb += "Internal" if (is(Inline)) sb += "Inline" if (is(InlineProxy)) sb += "InlineProxy" if (is(Opaque)) sb += "Opaque" diff --git a/src/compiler/scala/tools/tasty/TastyFormat.scala b/src/compiler/scala/tools/tasty/TastyFormat.scala index cc5d320d1dce..8ca2ecd50203 100644 --- a/src/compiler/scala/tools/tasty/TastyFormat.scala +++ b/src/compiler/scala/tools/tasty/TastyFormat.scala @@ -187,7 +187,6 @@ object TastyFormat { final val TRUEconst = 4 final val NULLconst = 5 final val PRIVATE = 6 - final val INTERNAL = 7 final val PROTECTED = 8 final val ABSTRACT = 9 final val FINAL = 10 @@ -352,7 +351,6 @@ object TastyFormat { def isModifierTag(tag: Int): Boolean = tag match { case PRIVATE - | INTERNAL | PROTECTED | ABSTRACT | FINAL @@ -416,7 +414,6 @@ object TastyFormat { case TRUEconst => "TRUEconst" case NULLconst => "NULLconst" case PRIVATE => "PRIVATE" - case INTERNAL => "INTERNAL" case PROTECTED => "PROTECTED" case ABSTRACT => "ABSTRACT" case FINAL => "FINAL" From da96798ca8066220673932785fe6053dcd64b4ae Mon Sep 17 00:00:00 2001 From: Dale Wijnand Date: Mon, 10 May 2021 14:14:20 +0100 Subject: [PATCH 176/769] Fix an infinite loop bug in ExplicitOuter --- .../tools/nsc/transform/ExplicitOuter.scala | 2 +- test/files/pos/t12312-hmm.scala | 45 +++++++++++++++++++ 2 files changed, 46 insertions(+), 1 deletion(-) create mode 100644 test/files/pos/t12312-hmm.scala diff --git a/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala b/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala index a271dcbc57cf..3971302b1c98 100644 --- a/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala +++ b/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala @@ -492,7 +492,7 @@ abstract class ExplicitOuter extends InfoTransform // D.this.$outer().a().X().isInstanceOf[D.this.$outer.a.X.type]() case TypeApply(fun, targs) => val rewriteTypeToExplicitOuter = new TypeMap { typeMap => - def apply(tp: Type) = tp map { + def apply(tp: Type) = tp match { case ThisType(sym) if sym != currentClass && !(sym.hasModuleFlag && sym.isStatic) => var cls = currentClass var tpe = cls.thisType diff --git a/test/files/pos/t12312-hmm.scala b/test/files/pos/t12312-hmm.scala new file mode 100644 index 000000000000..16decd4f9325 --- /dev/null +++ b/test/files/pos/t12312-hmm.scala @@ -0,0 +1,45 @@ +package hmm + +// Taken from https://github.com/typelevel/kind-projector/blob/7ad46d6ca995976ae2ff18215dbb32cd7ad0dd7a/src/test/scala/hmm.scala +// As a regression test for the issue spotted in https://github.com/scala/community-build/pull/1400 + +class TC[A] + +object TC { + def apply[A]: Unit = () +} + +object test { + + sealed trait HList extends Product with Serializable + case class ::[+H, +T <: HList](head : H, tail : T) extends HList + sealed trait HNil extends HList + case object HNil extends HNil + + TC[Int :: Int :: Int :: Int :: Int :: Int :: Int :: Int :: + Int :: Int :: Int :: Int :: Int :: Int :: Int :: Int :: + Int :: Int :: Int :: Int :: Int :: Int :: Int :: Int :: HNil] + + TC[Int :: Int :: Int :: Int :: Int :: Int :: Int :: Int :: + Int :: Int :: Int :: Int :: Int :: Int :: Int :: Int :: + Int :: Int :: Int :: Int :: Int :: Int :: Int :: Int :: + Int :: Int :: Int :: Int :: Int :: Int :: Int :: Int :: + Int :: Int :: Int :: Int :: Int :: Int :: Int :: Int :: + Int :: Int :: Int :: Int :: Int :: Int :: Int :: Int :: + Int :: Int :: Int :: Int :: Int :: Int :: Int :: Int :: + Int :: Int :: Int :: Int :: Int :: Int :: Int :: Int :: HNil] + + TC[Int :: Int :: Int :: Int :: Int :: Int :: Int :: Int :: + Int :: Int :: Int :: Int :: Int :: Int :: Int :: Int :: + Int :: Int :: Int :: Int :: Int :: Int :: Int :: Int :: + Int :: Int :: Int :: Int :: Int :: Int :: Int :: Int :: + Int :: Int :: Int :: Int :: Int :: Int :: Int :: Int :: + Int :: Int :: Int :: Int :: Int :: Int :: Int :: Int :: + Int :: Int :: Int :: Int :: Int :: Int :: Int :: Int :: + Int :: Int :: Int :: Int :: Int :: Int :: Int :: Int :: + Int :: Int :: Int :: Int :: Int :: Int :: Int :: Int :: + Int :: Int :: Int :: Int :: Int :: Int :: Int :: Int :: + Int :: Int :: Int :: Int :: Int :: Int :: Int :: Int :: + Int :: Int :: Int :: Int :: Int :: Int :: Int :: Int :: + Int :: Int :: Int :: Int :: Int :: Int :: Int :: Int :: HNil] +} From b796296bb1f335cd682acc8a9529a1131cbadfe9 Mon Sep 17 00:00:00 2001 From: Seth Tisue Date: Mon, 10 May 2021 20:41:11 -0700 Subject: [PATCH 177/769] partially revert scala/scala#9365 to preserve bincompat --- src/library/scala/collection/immutable/List.scala | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/src/library/scala/collection/immutable/List.scala b/src/library/scala/collection/immutable/List.scala index 4e139f1ee5a6..dc117a0bdb72 100644 --- a/src/library/scala/collection/immutable/List.scala +++ b/src/library/scala/collection/immutable/List.scala @@ -614,9 +614,11 @@ sealed abstract class List[+A] } } + // TODO: uncomment once bincompat allows (reference: scala/scala#9365) + /* // Override for performance: traverse only as much as needed // and share tail when nothing needs to be filtered out anymore - override def diff[B >: A](that: collection.Seq[B]): List[A] = { + override def diff[B >: A](that: collection.Seq[B]): AnyRef = { if (that.isEmpty || this.isEmpty) this else if (tail.isEmpty) if (that.contains(head)) Nil else this else { @@ -643,6 +645,7 @@ sealed abstract class List[+A] rec(this) } } + */ } From 477cb68453db097416fa015c54aa9e582dfc2155 Mon Sep 17 00:00:00 2001 From: Seth Tisue Date: Mon, 10 May 2021 09:06:16 -0700 Subject: [PATCH 178/769] MiMa 0.9.0 (was 0.8.1) delicious dogfood! --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index 73ce8dc22df5..f049bdae5c79 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -17,7 +17,7 @@ buildInfoKeys := Seq[BuildInfoKey](buildClasspath) buildInfoPackage := "scalabuild" -addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "0.8.1") +addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "0.9.0") libraryDependencies ++= Seq( "org.eclipse.jgit" % "org.eclipse.jgit" % "4.6.0.201612231935-r", From 82086dbf0cbee8e0842bb67b6bfecad2638a3f9d Mon Sep 17 00:00:00 2001 From: Guillaume Martres Date: Mon, 10 May 2021 23:31:42 +0200 Subject: [PATCH 179/769] Deprecate calling a type `?` without backticks https://github.com/scala/scala/pull/9560 introduced a new meaning for `?` under `-Xsource:3`, but to smooth out the migration it'd be nice if we could also enable this meaning by default. Before doing so, let's deprecate any current usage of `?` as a type that isn't wrapped in backticks. --- .../scala/tools/nsc/ast/parser/Parsers.scala | 24 +++++++++-- test/files/neg/qmark-deprecated.check | 42 +++++++++++++++++++ test/files/neg/qmark-deprecated.scala | 40 ++++++++++++++++++ 3 files changed, 102 insertions(+), 4 deletions(-) create mode 100644 test/files/neg/qmark-deprecated.check create mode 100644 test/files/neg/qmark-deprecated.scala diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala index 0a2c75cea805..602b5f1280a9 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala @@ -725,6 +725,14 @@ self => def isWildcardType = in.token == USCORE || isScala3WildcardType def isScala3WildcardType = settings.isScala3 && isRawIdent && in.name == raw.QMARK + def checkQMarkUsage() = + if (!settings.isScala3 && isRawIdent && in.name == raw.QMARK) + deprecationWarning(in.offset, + "`?` in a type will be interpreted as a wildcard in the future, wrap it in backticks to keep the current meaning.", "2.13.6") + def checkQMarkDefinition() = + if (isRawIdent && in.name == raw.QMARK) + deprecationWarning(in.offset, + "using `?` as a type name will require backticks in the future.", "2.13.6") def isIdent = in.token == IDENTIFIER || in.token == BACKQUOTED_IDENT def isMacro = in.token == IDENTIFIER && in.name == nme.MACROkw @@ -1148,11 +1156,13 @@ self => } else if (isWildcardType) { val scala3Wildcard = isScala3WildcardType wildcardType(in.skipToken(), scala3Wildcard) - } else + } else { + checkQMarkUsage() path(thisOK = false, typeOK = true) match { case r @ SingletonTypeTree(_) => r case r => convertToTypeId(r) } + } }) } } @@ -1296,8 +1306,11 @@ self => def rawIdent(): Name = try in.name finally in.nextToken() /** For when it's known already to be a type name. */ - def identForType(): TypeName = ident().toTypeName - def identForType(skipIt: Boolean): TypeName = ident(skipIt).toTypeName + def identForType(): TypeName = identForType(skipIt = true) + def identForType(skipIt: Boolean): TypeName = { + checkQMarkDefinition() + ident(skipIt).toTypeName + } def identOrMacro(): Name = if (isMacro) rawIdent() else ident() @@ -2065,12 +2078,14 @@ self => in.nextToken() if (in.token == SUBTYPE || in.token == SUPERTYPE) wildcardType(start, scala3Wildcard) else atPos(start) { Bind(tpnme.WILDCARD, EmptyTree) } - } else + } else { + checkQMarkUsage() typ() match { case Ident(name: TypeName) if nme.isVariableName(name) => atPos(start) { Bind(name, EmptyTree) } case t => t } + } } /** {{{ @@ -2569,6 +2584,7 @@ self => } } val nameOffset = in.offset + checkQMarkDefinition() // TODO AM: freshTermName(o2p(in.skipToken()), "_$$"), will need to update test suite val pname: TypeName = wildcardOrIdent().toTypeName val param = atPos(start, nameOffset) { diff --git a/test/files/neg/qmark-deprecated.check b/test/files/neg/qmark-deprecated.check new file mode 100644 index 000000000000..f1b7f333478a --- /dev/null +++ b/test/files/neg/qmark-deprecated.check @@ -0,0 +1,42 @@ +qmark-deprecated.scala:4: warning: using `?` as a type name will require backticks in the future. +class Foo[?] // error + ^ +qmark-deprecated.scala:6: warning: using `?` as a type name will require backticks in the future. +class Bar[M[?] <: List[?]] // errors + ^ +qmark-deprecated.scala:6: warning: `?` in a type will be interpreted as a wildcard in the future, wrap it in backticks to keep the current meaning. +class Bar[M[?] <: List[?]] // errors + ^ +qmark-deprecated.scala:10: warning: using `?` as a type name will require backticks in the future. + class ? { val x = 1 } // error + ^ +qmark-deprecated.scala:16: warning: using `?` as a type name will require backticks in the future. + trait ? // error + ^ +qmark-deprecated.scala:22: warning: using `?` as a type name will require backticks in the future. + type ? = Int // error + ^ +qmark-deprecated.scala:27: warning: `?` in a type will be interpreted as a wildcard in the future, wrap it in backticks to keep the current meaning. + val x: Array[?] = new Array[?](0) // errors + ^ +qmark-deprecated.scala:27: warning: `?` in a type will be interpreted as a wildcard in the future, wrap it in backticks to keep the current meaning. + val x: Array[?] = new Array[?](0) // errors + ^ +qmark-deprecated.scala:30: warning: `?` in a type will be interpreted as a wildcard in the future, wrap it in backticks to keep the current meaning. + def foo1[T <: Array[?]](x: T): Array[?] = x // errors + ^ +qmark-deprecated.scala:30: warning: `?` in a type will be interpreted as a wildcard in the future, wrap it in backticks to keep the current meaning. + def foo1[T <: Array[?]](x: T): Array[?] = x // errors + ^ +qmark-deprecated.scala:33: warning: using `?` as a type name will require backticks in the future. + def bar1[?] = {} // error + ^ +qmark-deprecated.scala:35: warning: using `?` as a type name will require backticks in the future. + def bar3[M[?]] = {} // error + ^ +qmark-deprecated.scala:38: warning: using `?` as a type name will require backticks in the future. + type A[?] = Int // error + ^ +error: No warnings can be incurred under -Werror. +13 warnings +1 error diff --git a/test/files/neg/qmark-deprecated.scala b/test/files/neg/qmark-deprecated.scala new file mode 100644 index 000000000000..c370cfcb2673 --- /dev/null +++ b/test/files/neg/qmark-deprecated.scala @@ -0,0 +1,40 @@ +// scalac: -deprecation -Xfatal-warnings +// + +class Foo[?] // error +class Foo2[`?`] // ok +class Bar[M[?] <: List[?]] // errors +class Bar2[M[`?`] <: List[`?`]] // ok + +object G { + class ? { val x = 1 } // error +} +object G2 { + class `?` { val x = 1 } // ok +} +object H { + trait ? // error +} +object H2 { + trait `?` // ok +} +object I { + type ? = Int // error +} +object I2 { + type `?` = Int // ok + + val x: Array[?] = new Array[?](0) // errors + val y: Array[`?`] = new Array[`?`](0) // ok + + def foo1[T <: Array[?]](x: T): Array[?] = x // errors + def foo2[T <: Array[`?`]](x: T): Array[`?`] = x // ok + + def bar1[?] = {} // error + def bar2[`?`] = {} // ok + def bar3[M[?]] = {} // error + def bar4[M[`?`]] = {} // error + + type A[?] = Int // error + type B[`?`] = Int // ok +} From a1cddd61ac381bc33d896c8272ed2ff391428b59 Mon Sep 17 00:00:00 2001 From: Seth Tisue Date: Mon, 10 May 2021 20:34:04 -0700 Subject: [PATCH 180/769] enable fatal warnings in manual subproject --- build.sbt | 1 + src/manual/scala/tools/docutil/EmitManPage.scala | 6 +++--- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/build.sbt b/build.sbt index 3a10e5a576e1..0f1ea88cfcdc 100644 --- a/build.sbt +++ b/build.sbt @@ -890,6 +890,7 @@ lazy val test = project lazy val manual = configureAsSubproject(project) .settings(disableDocs) .settings(publish / skip := true) + .settings(fatalWarningsSettings) .settings( libraryDependencies += "org.scala-lang" % "scala-library" % scalaVersion.value, Compile / classDirectory := (Compile / target).value / "classes" diff --git a/src/manual/scala/tools/docutil/EmitManPage.scala b/src/manual/scala/tools/docutil/EmitManPage.scala index 0c748377c987..441883e62395 100644 --- a/src/manual/scala/tools/docutil/EmitManPage.scala +++ b/src/manual/scala/tools/docutil/EmitManPage.scala @@ -93,7 +93,7 @@ object EmitManPage { case BlockQuote(text) => out println ".TP" emitText(text) - out.println + out.println() case CodeSample(text) => out println "\n.nf" @@ -104,7 +104,7 @@ object EmitManPage { for (item <- lst.items) { out println ".IP" emitText(item) - out.println + out.println() } case lst:NumberedList => @@ -114,7 +114,7 @@ object EmitManPage { val item = lst.items(idx) out.println(".IP \" " + (idx+1) + ".\"") emitText(item) - out.println + out.println() } case TitledPara(title, text) => From f91a09d263bb9b4f41b6f6d76a8716f329511d65 Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Thu, 3 Dec 2020 17:43:04 -0800 Subject: [PATCH 181/769] Simplify class hierarchy for SubstSymMap --- .../tools/nsc/transform/SpecializeTypes.scala | 9 ++- .../scala/tools/nsc/typechecker/Namers.scala | 2 +- .../tools/nsc/typechecker/RefChecks.scala | 4 +- .../scala/reflect/internal/Symbols.scala | 10 +-- .../scala/reflect/internal/Trees.scala | 4 +- .../scala/reflect/internal/Types.scala | 10 +-- .../reflect/internal/tpe/TypeComparers.scala | 6 +- .../scala/reflect/internal/tpe/TypeMaps.scala | 71 ++++++++++--------- .../reflect/runtime/JavaUniverseForce.scala | 1 + 9 files changed, 61 insertions(+), 56 deletions(-) diff --git a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala index 0e68021ae7ca..89b1e4e73dfe 100644 --- a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala +++ b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala @@ -1439,11 +1439,10 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { to: List[Symbol], targetClass: Symbol, addressFields: Boolean) extends TreeSymSubstituter(from, to) { - override val symSubst = new SubstSymMap(from, to) { - override def matches(sym1: Symbol, sym2: Symbol) = - if (sym2.isTypeSkolem) sym2.deSkolemize eq sym1 - else sym1 eq sym2 - } + private def matcher(sym1: Symbol, sym2: Symbol) = + if (sym2.isTypeSkolem) sym2.deSkolemize eq sym1 + else sym1 eq sym2 + override val symSubst = SubstSymMap(from, to, matcher) private def isAccessible(sym: Symbol): Boolean = if (currentOwner.isAnonymousFunction) { diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala index 825bcd50b04f..bdda512b6dbd 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala @@ -632,7 +632,7 @@ trait Namers extends MethodSynthesis { def assignParamTypes(copyDef: DefDef, sym: Symbol): Unit = { val clazz = sym.owner val constructorType = clazz.primaryConstructor.tpe - val subst = new SubstSymMap(clazz.typeParams, copyDef.tparams map (_.symbol)) + val subst = SubstSymMap(clazz.typeParams, copyDef.tparams.map(_.symbol)) val classParamss = constructorType.paramss foreach2(copyDef.vparamss, classParamss)((copyParams, classParams) => diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala index 08d24671876c..16bbbf6d98a6 100644 --- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala +++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala @@ -53,8 +53,8 @@ abstract class RefChecks extends Transform { def newTransformer(unit: CompilationUnit): RefCheckTransformer = new RefCheckTransformer(unit) - val toJavaRepeatedParam = new SubstSymMap(RepeatedParamClass -> JavaRepeatedParamClass) - val toScalaRepeatedParam = new SubstSymMap(JavaRepeatedParamClass -> RepeatedParamClass) + val toJavaRepeatedParam = SubstSymMap(RepeatedParamClass -> JavaRepeatedParamClass) + val toScalaRepeatedParam = SubstSymMap(JavaRepeatedParamClass -> RepeatedParamClass) def accessFlagsToString(sym: Symbol) = flagsToString( sym getFlag (PRIVATE | PROTECTED), diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala index cfb203a58cfe..3d711851548f 100644 --- a/src/reflect/scala/reflect/internal/Symbols.scala +++ b/src/reflect/scala/reflect/internal/Symbols.scala @@ -3696,7 +3696,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => if (syms.isEmpty) Nil else { val syms1 = mapList(syms)(symFn) - val map = new SubstSymMap(syms, syms1) + val map = SubstSymMap(syms, syms1) syms1.foreach(_.modifyInfo(map)) syms1 } @@ -3763,15 +3763,15 @@ trait Symbols extends api.Symbols { self: SymbolTable => if (syms.isEmpty) Nil else { val syms1 = mapList(syms)(_.cloneSymbol) - cloneSymbolsSubstSymMap.using { (msm: MutableSubstSymMap) => - msm.reset(syms, syms1) + cloneSymbolsSubstSymMap.using { (msm: SubstSymMap) => + msm.reload(syms, syms1) syms1.foreach(_.modifyInfo(msm)) } syms1 } - private[this] val cloneSymbolsSubstSymMap: ReusableInstance[MutableSubstSymMap] = - ReusableInstance[MutableSubstSymMap]( new MutableSubstSymMap()) + private[this] val cloneSymbolsSubstSymMap: ReusableInstance[SubstSymMap] = + ReusableInstance[SubstSymMap](SubstSymMap()) def cloneSymbolsAtOwner(syms: List[Symbol], owner: Symbol): List[Symbol] = deriveSymbols(syms, _ cloneSymbol owner) diff --git a/src/reflect/scala/reflect/internal/Trees.scala b/src/reflect/scala/reflect/internal/Trees.scala index 4c76b3471354..821aebd7084b 100644 --- a/src/reflect/scala/reflect/internal/Trees.scala +++ b/src/reflect/scala/reflect/internal/Trees.scala @@ -1745,7 +1745,7 @@ trait Trees extends api.Trees { lazy val EmptyTreeTypeSubstituter = new TreeTypeSubstituter(List(), List()) - class TreeSymSubstTraverser(val from: List[Symbol], val to: List[Symbol]) extends TypeMapTreeSubstituter(new SubstSymMap(from, to)) { + class TreeSymSubstTraverser(val from: List[Symbol], val to: List[Symbol]) extends TypeMapTreeSubstituter(SubstSymMap(from, to)) { override def toString() = "TreeSymSubstTraverser/" + substituterString("Symbol", "Symbol", from, to) } @@ -1759,7 +1759,7 @@ trait Trees extends api.Trees { * a symbol in `from` will have a new type assigned. */ class TreeSymSubstituter(from: List[Symbol], to: List[Symbol]) extends InternalTransformer { - val symSubst = new SubstSymMap(from, to) + val symSubst = SubstSymMap(from, to) private[this] var mutatedSymbols: List[Symbol] = Nil override def transform(tree: Tree): Tree = { @tailrec diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala index c354ab242089..732d139b0a3f 100644 --- a/src/reflect/scala/reflect/internal/Types.scala +++ b/src/reflect/scala/reflect/internal/Types.scala @@ -754,7 +754,7 @@ trait Types */ def substSym(from: List[Symbol], to: List[Symbol]): Type = if ((from eq to) || from.isEmpty) this - else new SubstSymMap(from, to) apply this + else SubstSymMap(from, to).apply(this) /** Substitute all occurrences of `ThisType(from)` in this type by `to`. * @@ -4041,8 +4041,8 @@ trait Types def refinedType(parents: List[Type], owner: Symbol): Type = refinedType(parents, owner, newScope, owner.pos) - private[this] val copyRefinedTypeSSM: ReusableInstance[MutableSubstSymMap] = - ReusableInstance[MutableSubstSymMap](new MutableSubstSymMap()) + private[this] val copyRefinedTypeSSM: ReusableInstance[SubstSymMap] = + ReusableInstance[SubstSymMap](SubstSymMap()) def copyRefinedType(original: RefinedType, parents: List[Type], decls: Scope) = if ((parents eq original.parents) && (decls eq original.decls)) original @@ -4058,8 +4058,8 @@ trait Types val syms2 = result.decls.toList val resultThis = result.typeSymbol.thisType val substThisMap = new SubstThisMap(original.typeSymbol, resultThis) - copyRefinedTypeSSM.using { (msm: MutableSubstSymMap) => - msm.reset(syms1, syms2) + copyRefinedTypeSSM.using { (msm: SubstSymMap) => + msm.reload(syms1, syms2) syms2.foreach(_.modifyInfo(info => msm.apply(substThisMap.apply(info)))) } } diff --git a/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala b/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala index 77276fbbfa50..92357d0e0e19 100644 --- a/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala +++ b/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala @@ -178,7 +178,7 @@ trait TypeComparers { sameLength(tparams1, tparams2) && { // corresponds does not check length of two sequences before checking the predicate, // but SubstMap assumes it has been checked (scala/bug#2956) - val substMap = new SubstSymMap(tparams2, tparams1) + val substMap = SubstSymMap(tparams2, tparams1) ( (tparams1 corresponds tparams2)((p1, p2) => methodHigherOrderTypeParamsSameVariance(p1, p2) && p1.info =:= substMap(p2.info)) && (res1 =:= substMap(res2)) @@ -357,8 +357,8 @@ trait TypeComparers { //@M for an example of why we need to generate fresh symbols otherwise, see neg/tcpoly_ticket2101.scala val substitutes = if (isMethod) tparams1 else cloneSymbols(tparams1) - val sub1: Type => Type = if (isMethod) (tp => tp) else new SubstSymMap(tparams1, substitutes) - val sub2: Type => Type = new SubstSymMap(tparams2, substitutes) + val sub1: Type => Type = if (isMethod) (tp => tp) else SubstSymMap(tparams1, substitutes) + val sub2: Type => Type = SubstSymMap(tparams2, substitutes) def cmp(p1: Symbol, p2: Symbol) = sub2(p2.info) <:< sub1(p1.info) (tparams1 corresponds tparams2)(cmp) && (sub1(res1) <:< sub2(res2)) diff --git a/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala b/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala index 4e25828a6a56..5d8e55f2c170 100644 --- a/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala +++ b/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala @@ -20,6 +20,7 @@ import Flags._ import scala.annotation.{nowarn, tailrec} import Variance._ import scala.collection.mutable.ListBuffer +import scala.util.chaining._ private[internal] trait TypeMaps { self: SymbolTable => @@ -663,32 +664,43 @@ private[internal] trait TypeMaps { override def toString = s"AsSeenFromMap($seenFromPrefix, $seenFromClass)" } - /** A base class to compute all substitutions */ - abstract class AbstractSubstMap[T >: Null] extends TypeMap { - protected def from: List[Symbol] = Nil - protected def to: List[T] = Nil + /** A base class to compute all substitutions. */ + sealed abstract class SubstMap[T >: Null] extends TypeMap { + private[this] var _from: List[Symbol] = Nil + private[this] var _to: List[T] = Nil private[this] var fromHasTermSymbol = false private[this] var fromMin = Int.MaxValue private[this] var fromMax = Int.MinValue private[this] var fromSize = 0 - protected def reload(): Unit = { + final def from: List[Symbol] = _from + final def to: List[T] = _to + + def reload(from0: List[Symbol], to0: List[T]): this.type = { // OPT this check was 2-3% of some profiles, demoted to -Xdev if (isDeveloper) assert(sameLength(from, to), "Unsound substitution from "+ from +" to "+ to) + _from = from0 + _to = to0 + fromHasTermSymbol = false fromMin = Int.MaxValue fromMax = Int.MinValue fromSize = 0 - from.foreach { - sym => - fromMin = math.min(fromMin, sym.id) - fromMax = math.max(fromMax, sym.id) - fromSize += 1 - if (sym.isTerm) fromHasTermSymbol = true - } + def scanFrom(ss: List[Symbol]): Unit = + ss match { + case sym :: rest => + fromMin = math.min(fromMin, sym.id) + fromMax = math.max(fromMax, sym.id) + fromSize += 1 + if (sym.isTerm) fromHasTermSymbol = true + scanFrom(rest) + case _ => () + } + scanFrom(from) + this } /** Are `sym` and `sym1` the same? Can be tuned by subclasses. */ @@ -770,7 +782,8 @@ private[internal] trait TypeMaps { } } - abstract class AbstractSubstSymMap extends AbstractSubstMap[Symbol] { + /** A map to implement the `substSym` method. */ + sealed class SubstSymMap private () extends SubstMap[Symbol] { protected def toType(fromTpe: Type, sym: Symbol) = fromTpe match { case TypeRef(pre, _, args) => copyTypeRef(fromTpe, pre, sym, args) @@ -830,30 +843,22 @@ private[internal] trait TypeMaps { mapTreeSymbols.transform(tree) } - /** A map to implement the `substSym` method. */ - class SubstSymMap(override val from: List[Symbol], override val to: List[Symbol]) extends AbstractSubstSymMap { - reload() - - def this(pairs: (Symbol, Symbol)*) = this(pairs.toList.map(_._1), pairs.toList.map(_._2)) - } - - class MutableSubstSymMap extends AbstractSubstSymMap { - private[this] var _from: List[Symbol] = Nil - private[this] var _to: List[Symbol] = Nil - - override def from: List[Symbol] = _from - override def to : List[Symbol] = _to - - def reset(nfrom: List[Symbol], nto: List[Symbol]): Unit = { - _from = nfrom - _to = nto - reload() + object SubstSymMap { + def apply(): SubstSymMap = new SubstSymMap() + def apply(from: List[Symbol], to: List[Symbol]): SubstSymMap = new SubstSymMap().tap(_.reload(from, to)) + def apply(from: List[Symbol], to: List[Symbol], cmp: (Symbol, Symbol) => Boolean): SubstSymMap = { + val ssm = new SubstSymMap() { + override protected def matches(sym: Symbol, sym1: Symbol): Boolean = cmp(sym, sym1) + } + ssm.tap(_.reload(from, to)) } + def apply(fromto: (Symbol, Symbol)): SubstSymMap = apply(List(fromto._1), List(fromto._2)) } /** A map to implement the `subst` method. */ - class SubstTypeMap(override val from: List[Symbol], override val to: List[Type]) extends AbstractSubstMap[Type] { - super.reload() + class SubstTypeMap(from0: List[Symbol], to0: List[Type]) extends SubstMap[Type] { + super.reload(from0, to0) + override protected def toType(fromtp: Type, tp: Type) = tp override def mapOver(tree: Tree, giveup: () => Nothing): Tree = { diff --git a/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala b/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala index be33ed5a6651..c11ae7f9ad74 100644 --- a/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala +++ b/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala @@ -204,6 +204,7 @@ trait JavaUniverseForce { self: runtime.JavaUniverse => this.abstractTypesToBounds this.dropIllegalStarTypes this.wildcardExtrapolation + this.SubstSymMap this.IsDependentCollector this.ApproximateDependentMap this.identityTypeMap From f46a4a33d79a744d2759a1d278d5035e90a0aa51 Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Fri, 4 Dec 2020 18:49:31 -0800 Subject: [PATCH 182/769] Reusable is not thread-safe --- src/reflect/scala/reflect/internal/Symbols.scala | 2 +- src/reflect/scala/reflect/internal/Types.scala | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala index 3d711851548f..19f9b36ad640 100644 --- a/src/reflect/scala/reflect/internal/Symbols.scala +++ b/src/reflect/scala/reflect/internal/Symbols.scala @@ -3771,7 +3771,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => } private[this] val cloneSymbolsSubstSymMap: ReusableInstance[SubstSymMap] = - ReusableInstance[SubstSymMap](SubstSymMap()) + ReusableInstance[SubstSymMap](SubstSymMap(), enabled = isCompilerUniverse) def cloneSymbolsAtOwner(syms: List[Symbol], owner: Symbol): List[Symbol] = deriveSymbols(syms, _ cloneSymbol owner) diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala index 732d139b0a3f..7b3dc375f2a3 100644 --- a/src/reflect/scala/reflect/internal/Types.scala +++ b/src/reflect/scala/reflect/internal/Types.scala @@ -4042,7 +4042,7 @@ trait Types refinedType(parents, owner, newScope, owner.pos) private[this] val copyRefinedTypeSSM: ReusableInstance[SubstSymMap] = - ReusableInstance[SubstSymMap](SubstSymMap()) + ReusableInstance[SubstSymMap](SubstSymMap(), enabled = isCompilerUniverse) def copyRefinedType(original: RefinedType, parents: List[Type], decls: Scope) = if ((parents eq original.parents) && (decls eq original.decls)) original From bb2585dede1527f4849271877391c5c31bdd51b1 Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Fri, 4 Dec 2020 19:37:13 -0800 Subject: [PATCH 183/769] Tweak ReusableInstance initialSize --- .../tools/nsc/symtab/SymbolLoaders.scala | 2 +- .../internal/util/ReusableInstance.scala | 21 ++++++++++++------- 2 files changed, 15 insertions(+), 8 deletions(-) diff --git a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala index 8836a1d80885..fa53c37a9263 100644 --- a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala +++ b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala @@ -312,7 +312,7 @@ abstract class SymbolLoaders { } } } - private lazy val classFileDataReader: ReusableInstance[ReusableDataReader] = ReusableInstance[ReusableDataReader](new ReusableDataReader(), enabled = isCompilerUniverse) + private lazy val classFileDataReader: ReusableInstance[ReusableDataReader] = ReusableInstance[ReusableDataReader](new ReusableDataReader(), initialSize = 1, enabled = isCompilerUniverse) class ClassfileLoader(val classfile: AbstractFile, clazz: ClassSymbol, module: ModuleSymbol) extends SymbolLoader with FlagAssigningCompleter { private object classfileParser extends { val symbolTable: SymbolLoaders.this.symbolTable.type = SymbolLoaders.this.symbolTable diff --git a/src/reflect/scala/reflect/internal/util/ReusableInstance.scala b/src/reflect/scala/reflect/internal/util/ReusableInstance.scala index 5dea888f6d2e..8853e7d72242 100644 --- a/src/reflect/scala/reflect/internal/util/ReusableInstance.scala +++ b/src/reflect/scala/reflect/internal/util/ReusableInstance.scala @@ -19,17 +19,18 @@ import scala.util.chaining._ * The wrapper is recursion-reentrant: several instances are kept, so * at each depth of reentrance we are reusing the instance for that. * - * An instance is created upon creating this object, and more instances - * are allocated dynamically, on demand, when reentrance occurs. + * An instance is created eagerly, then more instances + * are allocated as needed on re-entry. Once allocated, + * cached instances are not reclaimed for the life of this ReusableInstance. * * Not thread safe. */ -final class ReusableInstance[T <: AnyRef] private (make: => T, enabled: Boolean) { - private[this] val cache = if (enabled) new ArrayBuffer[T](ReusableInstance.InitialSize).tap(_.addOne(make)) else null +final class ReusableInstance[T <: AnyRef] private (make: => T, initialSize: Int) { + private[this] val cache = if (initialSize > 0) new ArrayBuffer[T](initialSize).tap(_.addOne(make)) else null private[this] var taken = 0 @inline def using[R](action: T => R): R = - if (!enabled) + if (cache == null) action(make) else { if (taken == cache.size) @@ -42,6 +43,12 @@ final class ReusableInstance[T <: AnyRef] private (make: => T, enabled: Boolean) object ReusableInstance { private final val InitialSize = 4 - def apply[T <: AnyRef](make: => T): ReusableInstance[T] = new ReusableInstance[T](make, enabled = true) - def apply[T <: AnyRef](make: => T, enabled: Boolean): ReusableInstance[T] = new ReusableInstance[T](make, enabled = enabled) + def apply[T <: AnyRef](make: => T, initialSize: Int): ReusableInstance[T] = new ReusableInstance[T](make, initialSize) + + def apply[T <: AnyRef](make: => T): ReusableInstance[T] = + apply(make, InitialSize) + def apply[T <: AnyRef](make: => T, enabled: Boolean): ReusableInstance[T] = + if (enabled) apply(make) else apply(make, -1) + def apply[T <: AnyRef](make: => T, initialSize: Int, enabled: Boolean): ReusableInstance[T] = + if (enabled) apply(make, initialSize) else apply(make, -1) } From a378d83f95098908a7ecbebf0a877e02f4b81047 Mon Sep 17 00:00:00 2001 From: Denis Rosset Date: Tue, 11 May 2021 17:58:39 +0200 Subject: [PATCH 184/769] Adding BigInt benchmarks --- .../math/BigIntEulerProblem15Benchmark.scala | 29 +++++++++++++++++ .../scala/math/BigIntFactorialBenchmark.scala | 30 +++++++++++++++++ .../scala/scala/math/BigIntRSABenchmark.scala | 32 +++++++++++++++++++ 3 files changed, 91 insertions(+) create mode 100644 test/benchmarks/src/main/scala/scala/math/BigIntEulerProblem15Benchmark.scala create mode 100644 test/benchmarks/src/main/scala/scala/math/BigIntFactorialBenchmark.scala create mode 100644 test/benchmarks/src/main/scala/scala/math/BigIntRSABenchmark.scala diff --git a/test/benchmarks/src/main/scala/scala/math/BigIntEulerProblem15Benchmark.scala b/test/benchmarks/src/main/scala/scala/math/BigIntEulerProblem15Benchmark.scala new file mode 100644 index 000000000000..690c078ec2f7 --- /dev/null +++ b/test/benchmarks/src/main/scala/scala/math/BigIntEulerProblem15Benchmark.scala @@ -0,0 +1,29 @@ +package scala.math + +import java.util.concurrent.TimeUnit + +import org.openjdk.jmh.annotations._ +import org.openjdk.jmh.infra.Blackhole + +@BenchmarkMode(Array(Mode.AverageTime)) +@Fork(2) +@Threads(1) +@Warmup(iterations = 10) +@Measurement(iterations = 10) +@OutputTimeUnit(TimeUnit.NANOSECONDS) +@State(Scope.Benchmark) +class BigIntEulerProblem15Benchmark { + + @Param(Array("5", "10", "15", "20", "25", "30", "35", "40", "45", "50", "55", + "60", "65", "70", "75", "80", "85", "90", "95", "100")) + var size: Int = _ + + @Benchmark + def eulerProblem15(bh: Blackhole): Unit = { + def f(row: Array[BigInt], c: Int): BigInt = + if (c == 0) row.last else f(row.scan(BigInt(0))(_ + _), c - 1) + def computeAnswer(n: Int): BigInt = f(Array.fill(n + 1)(BigInt(1)), n) + bh.consume(computeAnswer(size)) + } + +} diff --git a/test/benchmarks/src/main/scala/scala/math/BigIntFactorialBenchmark.scala b/test/benchmarks/src/main/scala/scala/math/BigIntFactorialBenchmark.scala new file mode 100644 index 000000000000..0aaa18c029e1 --- /dev/null +++ b/test/benchmarks/src/main/scala/scala/math/BigIntFactorialBenchmark.scala @@ -0,0 +1,30 @@ +package scala.math + +import java.util.concurrent.TimeUnit + +import org.openjdk.jmh.annotations._ +import org.openjdk.jmh.infra.Blackhole + +import scala.annotation.tailrec + +@BenchmarkMode(Array(Mode.AverageTime)) +@Fork(2) +@Threads(1) +@Warmup(iterations = 10) +@Measurement(iterations = 10) +@OutputTimeUnit(TimeUnit.NANOSECONDS) +@State(Scope.Benchmark) +class BigIntFactorialBenchmark { + + @Param(Array("5", "10", "15", "20", "25", "30", "35", "40", "45", "50", "55", + "60", "65", "70", "75", "80", "85", "90", "95", "100")) + var size: Int = _ + + @Benchmark + def factorial(bh: Blackhole): Unit = { + @tailrec def fact(i: Int, n: Int, prev: BigInt): BigInt = + if (i > n) prev else fact(i + 1, n, prev * i) + bh.consume(fact(1, size, BigInt(1))) + } + +} diff --git a/test/benchmarks/src/main/scala/scala/math/BigIntRSABenchmark.scala b/test/benchmarks/src/main/scala/scala/math/BigIntRSABenchmark.scala new file mode 100644 index 000000000000..4c93f324e0bd --- /dev/null +++ b/test/benchmarks/src/main/scala/scala/math/BigIntRSABenchmark.scala @@ -0,0 +1,32 @@ +package scala.math + +import java.util.concurrent.TimeUnit + +import org.openjdk.jmh.annotations._ +import org.openjdk.jmh.infra._ + +@BenchmarkMode(Array(Mode.AverageTime)) +@Fork(2) +@Threads(1) +@Warmup(iterations = 10) +@Measurement(iterations = 10) +@OutputTimeUnit(TimeUnit.NANOSECONDS) +@State(Scope.Benchmark) +class BigIntRSABenchmark { + + @Benchmark + def encodeDecode(bh: Blackhole): Unit = { + // private key + val d = BigInt("5617843187844953170308463622230283376298685") + // public key + val n = BigInt("9516311845790656153499716760847001433441357") + val e = 65537 + + // concatenation of "Scala is great" + val plaintext = BigInt("83099097108097032105115032103114101097116") + val ciphertext = plaintext.modPow(e, n) + val recoveredtext = ciphertext.modPow(d, n) + bh.consume(plaintext == recoveredtext) + } + +} From 0ae2ff9d5af2f307f4528f5d0f36d70e458a7892 Mon Sep 17 00:00:00 2001 From: Denis Rosset Date: Tue, 11 May 2021 18:01:50 +0200 Subject: [PATCH 185/769] Add BigInt constructor named argument syntax test --- test/scalacheck/scala/math/BigIntProperties.scala | 1 + 1 file changed, 1 insertion(+) diff --git a/test/scalacheck/scala/math/BigIntProperties.scala b/test/scalacheck/scala/math/BigIntProperties.scala index c4c0295dc50a..d036719b368f 100644 --- a/test/scalacheck/scala/math/BigIntProperties.scala +++ b/test/scalacheck/scala/math/BigIntProperties.scala @@ -61,6 +61,7 @@ object BigIntProperties extends Properties("BigInt") { property("longValue") = forAll { (l: Long) => BigInt(l).longValue ?= l } property("toLong") = forAll { (l: Long) => BigInt(l).toLong ?= l } + property("new BigInt(bigInteger = BigInteger.ZERO)") = (new BigInt(bigInteger = BigInteger.ZERO)) == 0 property("BigInt.apply(i: Int)") = forAll { (i: Int) => BigInt(i) ?= BigInt(BigInteger.valueOf(i)) } property("BigInt.apply(l: Long)") = forAll { (l: Long) => BigInt(l) ?= BigInt(BigInteger.valueOf(l)) } property("BigInt.apply(x: Array[Byte])") = forAll(bigInteger) { bi => BigInt(bi) ?= BigInt(bi.toByteArray) } From d931366c83632f37a7fa874a7e28740bdf9a02df Mon Sep 17 00:00:00 2001 From: Denis Rosset Date: Tue, 11 May 2021 18:15:48 +0200 Subject: [PATCH 186/769] Force every BigInt construction through the companion BigInt.apply method --- src/library/scala/math/BigInt.scala | 86 +++++++++++++++-------------- 1 file changed, 46 insertions(+), 40 deletions(-) diff --git a/src/library/scala/math/BigInt.scala b/src/library/scala/math/BigInt.scala index 20cec9742ed2..6f76c6b055e9 100644 --- a/src/library/scala/math/BigInt.scala +++ b/src/library/scala/math/BigInt.scala @@ -24,6 +24,17 @@ object BigInt { private[this] val minCached = -1024 private[this] val maxCached = 1024 private[this] val cache = new Array[BigInt](maxCached - minCached + 1) + + private[this] def getCached(i: Int): BigInt = { + val offset = i - minCached + var n = cache(offset) + if (n eq null) { + n = new BigInt(BigInteger.valueOf(i.toLong)) + cache(offset) = n + } + n + } + private val minusOne = BigInteger.valueOf(-1) /** Constructs a `BigInt` whose value is equal to that of the @@ -33,12 +44,7 @@ object BigInt { * @return the constructed `BigInt` */ def apply(i: Int): BigInt = - if (minCached <= i && i <= maxCached) { - val offset = i - minCached - var n = cache(offset) - if (n eq null) { n = new BigInt(BigInteger.valueOf(i.toLong)); cache(offset) = n } - n - } else new BigInt(BigInteger.valueOf(i.toLong)) + if (minCached <= i && i <= maxCached) getCached(i) else new BigInt(BigInteger.valueOf(i.toLong)) /** Constructs a `BigInt` whose value is equal to that of the * specified long value. @@ -47,14 +53,14 @@ object BigInt { * @return the constructed `BigInt` */ def apply(l: Long): BigInt = - if (minCached <= l && l <= maxCached) apply(l.toInt) - else new BigInt(BigInteger.valueOf(l)) + if (minCached <= l && l <= maxCached) getCached(l.toInt) + else new BigInt(BigInteger.valueOf(l)) /** Translates a byte array containing the two's-complement binary * representation of a BigInt into a BigInt. */ def apply(x: Array[Byte]): BigInt = - new BigInt(new BigInteger(x)) + apply(new BigInteger(x)) /** Translates the sign-magnitude representation of a BigInt into a BigInt. * @@ -64,30 +70,30 @@ object BigInt { * the number. */ def apply(signum: Int, magnitude: Array[Byte]): BigInt = - new BigInt(new BigInteger(signum, magnitude)) + apply(new BigInteger(signum, magnitude)) /** Constructs a randomly generated positive BigInt that is probably prime, * with the specified bitLength. */ def apply(bitlength: Int, certainty: Int, rnd: scala.util.Random): BigInt = - new BigInt(new BigInteger(bitlength, certainty, rnd.self)) + apply(new BigInteger(bitlength, certainty, rnd.self)) /** Constructs a randomly generated BigInt, uniformly distributed over the * range `0` to `(2 ^ numBits - 1)`, inclusive. */ def apply(numbits: Int, rnd: scala.util.Random): BigInt = - new BigInt(new BigInteger(numbits, rnd.self)) + apply(new BigInteger(numbits, rnd.self)) /** Translates the decimal String representation of a BigInt into a BigInt. */ def apply(x: String): BigInt = - new BigInt(new BigInteger(x)) + apply(new BigInteger(x)) /** Translates the string representation of a `BigInt` in the * specified `radix` into a BigInt. */ def apply(x: String, radix: Int): BigInt = - new BigInt(new BigInteger(x, radix)) + apply(new BigInteger(x, radix)) /** Translates a `java.math.BigInteger` into a BigInt. */ @@ -97,7 +103,7 @@ object BigInt { /** Returns a positive BigInt that is probably prime, with the specified bitLength. */ def probablePrime(bitLength: Int, rnd: scala.util.Random): BigInt = - new BigInt(BigInteger.probablePrime(bitLength, rnd.self)) + apply(BigInteger.probablePrime(bitLength, rnd.self)) /** Implicit conversion from `Int` to `BigInt`. */ @@ -186,94 +192,94 @@ final class BigInt(val bigInteger: BigInteger) /** Addition of BigInts */ - def + (that: BigInt): BigInt = new BigInt(this.bigInteger.add(that.bigInteger)) + def + (that: BigInt): BigInt = BigInt(this.bigInteger.add(that.bigInteger)) /** Subtraction of BigInts */ - def - (that: BigInt): BigInt = new BigInt(this.bigInteger.subtract(that.bigInteger)) + def - (that: BigInt): BigInt = BigInt(this.bigInteger.subtract(that.bigInteger)) /** Multiplication of BigInts */ - def * (that: BigInt): BigInt = new BigInt(this.bigInteger.multiply(that.bigInteger)) + def * (that: BigInt): BigInt = BigInt(this.bigInteger.multiply(that.bigInteger)) /** Division of BigInts */ - def / (that: BigInt): BigInt = new BigInt(this.bigInteger.divide(that.bigInteger)) + def / (that: BigInt): BigInt = BigInt(this.bigInteger.divide(that.bigInteger)) /** Remainder of BigInts */ - def % (that: BigInt): BigInt = new BigInt(this.bigInteger.remainder(that.bigInteger)) + def % (that: BigInt): BigInt = BigInt(this.bigInteger.remainder(that.bigInteger)) /** Returns a pair of two BigInts containing (this / that) and (this % that). */ def /% (that: BigInt): (BigInt, BigInt) = { val dr = this.bigInteger.divideAndRemainder(that.bigInteger) - (new BigInt(dr(0)), new BigInt(dr(1))) + (BigInt(dr(0)), BigInt(dr(1))) } /** Leftshift of BigInt */ - def << (n: Int): BigInt = new BigInt(this.bigInteger.shiftLeft(n)) + def << (n: Int): BigInt = BigInt(this.bigInteger.shiftLeft(n)) /** (Signed) rightshift of BigInt */ - def >> (n: Int): BigInt = new BigInt(this.bigInteger.shiftRight(n)) + def >> (n: Int): BigInt = BigInt(this.bigInteger.shiftRight(n)) /** Bitwise and of BigInts */ - def & (that: BigInt): BigInt = new BigInt(this.bigInteger.and(that.bigInteger)) + def & (that: BigInt): BigInt = BigInt(this.bigInteger.and(that.bigInteger)) /** Bitwise or of BigInts */ - def | (that: BigInt): BigInt = new BigInt(this.bigInteger.or (that.bigInteger)) + def | (that: BigInt): BigInt = BigInt(this.bigInteger.or (that.bigInteger)) /** Bitwise exclusive-or of BigInts */ - def ^ (that: BigInt): BigInt = new BigInt(this.bigInteger.xor(that.bigInteger)) + def ^ (that: BigInt): BigInt = BigInt(this.bigInteger.xor(that.bigInteger)) /** Bitwise and-not of BigInts. Returns a BigInt whose value is (this & ~that). */ - def &~ (that: BigInt): BigInt = new BigInt(this.bigInteger.andNot(that.bigInteger)) + def &~ (that: BigInt): BigInt = BigInt(this.bigInteger.andNot(that.bigInteger)) /** Returns the greatest common divisor of abs(this) and abs(that) */ - def gcd (that: BigInt): BigInt = new BigInt(this.bigInteger.gcd(that.bigInteger)) + def gcd (that: BigInt): BigInt = BigInt(this.bigInteger.gcd(that.bigInteger)) /** Returns a BigInt whose value is (this mod that). * This method differs from `%` in that it always returns a non-negative BigInt. * @param that A positive number */ - def mod (that: BigInt): BigInt = new BigInt(this.bigInteger.mod(that.bigInteger)) + def mod (that: BigInt): BigInt = BigInt(this.bigInteger.mod(that.bigInteger)) /** Returns the minimum of this and that */ - def min (that: BigInt): BigInt = new BigInt(this.bigInteger.min(that.bigInteger)) + def min (that: BigInt): BigInt = BigInt(this.bigInteger.min(that.bigInteger)) /** Returns the maximum of this and that */ - def max (that: BigInt): BigInt = new BigInt(this.bigInteger.max(that.bigInteger)) + def max (that: BigInt): BigInt = BigInt(this.bigInteger.max(that.bigInteger)) /** Returns a BigInt whose value is (this raised to the power of exp). */ - def pow (exp: Int): BigInt = new BigInt(this.bigInteger.pow(exp)) + def pow (exp: Int): BigInt = BigInt(this.bigInteger.pow(exp)) /** Returns a BigInt whose value is * (this raised to the power of exp modulo m). */ def modPow (exp: BigInt, m: BigInt): BigInt = - new BigInt(this.bigInteger.modPow(exp.bigInteger, m.bigInteger)) + BigInt(this.bigInteger.modPow(exp.bigInteger, m.bigInteger)) /** Returns a BigInt whose value is (the inverse of this modulo m). */ - def modInverse (m: BigInt): BigInt = new BigInt(this.bigInteger.modInverse(m.bigInteger)) + def modInverse (m: BigInt): BigInt = BigInt(this.bigInteger.modInverse(m.bigInteger)) /** Returns a BigInt whose value is the negation of this BigInt */ - def unary_- : BigInt = new BigInt(this.bigInteger.negate()) + def unary_- : BigInt = BigInt(this.bigInteger.negate()) /** Returns the absolute value of this BigInt */ - def abs: BigInt = new BigInt(this.bigInteger.abs()) + def abs: BigInt = BigInt(this.bigInteger.abs()) /** Returns the sign of this BigInt; * -1 if it is less than 0, @@ -291,7 +297,7 @@ final class BigInt(val bigInteger: BigInteger) /** Returns the bitwise complement of this BigInt */ - def unary_~ : BigInt = new BigInt(this.bigInteger.not()) + def unary_~ : BigInt = BigInt(this.bigInteger.not()) /** Returns true if and only if the designated bit is set. */ @@ -299,15 +305,15 @@ final class BigInt(val bigInteger: BigInteger) /** Returns a BigInt whose value is equivalent to this BigInt with the designated bit set. */ - def setBit (n: Int): BigInt = new BigInt(this.bigInteger.setBit(n)) + def setBit (n: Int): BigInt = BigInt(this.bigInteger.setBit(n)) /** Returns a BigInt whose value is equivalent to this BigInt with the designated bit cleared. */ - def clearBit(n: Int): BigInt = new BigInt(this.bigInteger.clearBit(n)) + def clearBit(n: Int): BigInt = BigInt(this.bigInteger.clearBit(n)) /** Returns a BigInt whose value is equivalent to this BigInt with the designated bit flipped. */ - def flipBit (n: Int): BigInt = new BigInt(this.bigInteger.flipBit(n)) + def flipBit (n: Int): BigInt = BigInt(this.bigInteger.flipBit(n)) /** Returns the index of the rightmost (lowest-order) one bit in this BigInt * (the number of zero bits to the right of the rightmost one bit). From adaef2ba213518d9421af2ee64af8fdb89f4ce0b Mon Sep 17 00:00:00 2001 From: Denis Rosset Date: Tue, 11 May 2021 18:23:22 +0200 Subject: [PATCH 187/769] Introducing BigInt new storage scheme --- project/MimaFilters.scala | 5 +++ src/library/scala/math/BigInt.scala | 61 +++++++++++++++++++++++++---- 2 files changed, 59 insertions(+), 7 deletions(-) diff --git a/project/MimaFilters.scala b/project/MimaFilters.scala index 0b35213fffec..0cde580c4f63 100644 --- a/project/MimaFilters.scala +++ b/project/MimaFilters.scala @@ -25,6 +25,11 @@ object MimaFilters extends AutoPlugin { // don't publish the artifact built with JDK 11 anyways ProblemFilters.exclude[DirectMissingMethodProblem]("scala.collection.convert.JavaCollectionWrappers#IteratorWrapper.asIterator"), + // for the method this(Long)Unit in class scala.math.BigInt does not have a correspondent in other versions + // this new constructor is nevertheless private, and can only be called from the BigInt class and its companion + // object + ProblemFilters.exclude[DirectMissingMethodProblem]("scala.math.BigInt.this"), + // PR: https://github.com/scala/scala/pull/9336; remove after re-STARR ProblemFilters.exclude[MissingTypesProblem]("scala.deprecatedOverriding"), ProblemFilters.exclude[MissingTypesProblem]("scala.deprecatedInheritance"), diff --git a/src/library/scala/math/BigInt.scala b/src/library/scala/math/BigInt.scala index 6f76c6b055e9..a88b1371ccc6 100644 --- a/src/library/scala/math/BigInt.scala +++ b/src/library/scala/math/BigInt.scala @@ -21,6 +21,9 @@ import scala.collection.immutable.NumericRange object BigInt { + private val longMinValueBigInteger = BigInteger.valueOf(Long.MinValue) + private val longMinValue = new BigInt(longMinValueBigInteger, Long.MinValue) + private[this] val minCached = -1024 private[this] val maxCached = 1024 private[this] val cache = new Array[BigInt](maxCached - minCached + 1) @@ -29,7 +32,7 @@ object BigInt { val offset = i - minCached var n = cache(offset) if (n eq null) { - n = new BigInt(BigInteger.valueOf(i.toLong)) + n = new BigInt(null, i.toLong) cache(offset) = n } n @@ -44,7 +47,7 @@ object BigInt { * @return the constructed `BigInt` */ def apply(i: Int): BigInt = - if (minCached <= i && i <= maxCached) getCached(i) else new BigInt(BigInteger.valueOf(i.toLong)) + if (minCached <= i && i <= maxCached) getCached(i) else apply(i: Long) /** Constructs a `BigInt` whose value is equal to that of the * specified long value. @@ -53,8 +56,9 @@ object BigInt { * @return the constructed `BigInt` */ def apply(l: Long): BigInt = - if (minCached <= l && l <= maxCached) getCached(l.toInt) - else new BigInt(BigInteger.valueOf(l)) + if (minCached <= l && l <= maxCached) getCached(l.toInt) else { + if (l == Long.MinValue) longMinValue else new BigInt(null, l) + } /** Translates a byte array containing the two's-complement binary * representation of a BigInt into a BigInt. @@ -97,8 +101,12 @@ object BigInt { /** Translates a `java.math.BigInteger` into a BigInt. */ - def apply(x: BigInteger): BigInt = - new BigInt(x) + def apply(x: BigInteger): BigInt = { + if (x.bitLength <= 63) { + val l = x.longValue + if (minCached <= l && l <= maxCached) getCached(l.toInt) else new BigInt(x, l) + } else new BigInt(x, Long.MinValue) + } /** Returns a positive BigInt that is probably prime, with the specified bitLength. */ @@ -118,12 +126,51 @@ object BigInt { implicit def javaBigInteger2bigInt(x: BigInteger): BigInt = apply(x) } -final class BigInt(val bigInteger: BigInteger) +/** A type with efficient encoding of arbitrary integers. + * + * It wraps `java.math.BigInteger`, with optimization for small values that can be encoded in a `Long`. + */ +final class BigInt private (private var _bigInteger: BigInteger, private val _long: Long) extends ScalaNumber with ScalaNumericConversions with Serializable with Ordered[BigInt] { + // The class has a special encoding for integer that fit in a Long *and* are not equal to Long.MinValue. + // + // The Long value Long.MinValue is a tag specifying that the integer is encoded in the BigInteger field. + // + // There are three possible states for the class fields (_bigInteger, _long) + // 1. (null, l) where l != Long.MinValue, encodes the integer "l" + // 2. (b, l) where l != Long.MinValue; then b is a BigInteger with value l, encodes "l" == "b" + // 3a. (b, Long.MinValue) where b == Long.MinValue, encodes Long.MinValue + // 3b. (b, Long.MinValue) where b does not fit in a Long, encodes "b" + // + // There is only one possible transition 1. -> 2., when the method .bigInteger is called, then the field + // _bigInteger caches the result. + // + // The case 3a. is the only one where the BigInteger could actually fit in a Long, but as its value is used as a + // tag, we'll take the slow path instead. + // + // Additionally, we know that if this.isValidLong is true, then _long is the encoded value. + + /** Public constructor present for compatibility. Use the BigInt.apply companion object method instead. */ + def this(bigInteger: BigInteger) = this( + bigInteger, // even if it is a short BigInteger, we cache the instance + if (bigInteger.bitLength <= 63) + bigInteger.longValue // if _bigInteger is actually equal to Long.MinValue, no big deal, its value acts as a tag + else Long.MinValue + ) + + def bigInteger: BigInteger = { + val read = _bigInteger + if (read ne null) read else { + val write = BigInteger.valueOf(_long) + _bigInteger = write // reference assignment is atomic; this is multi-thread safe (if possibly wasteful) + write + } + } + /** Returns the hash code for this BigInt. */ override def hashCode(): Int = if (isValidLong) unifiedPrimitiveHashcode From 2e67ec13f044b14f3b7e76834772224dbd23f107 Mon Sep 17 00:00:00 2001 From: Denis Rosset Date: Tue, 11 May 2021 23:15:42 +0200 Subject: [PATCH 188/769] Optimized BigInt operations --- src/library/scala/math/BigInt.scala | 257 +++++++++++++++++++++++----- 1 file changed, 210 insertions(+), 47 deletions(-) diff --git a/src/library/scala/math/BigInt.scala b/src/library/scala/math/BigInt.scala index a88b1371ccc6..d0018fc8e970 100644 --- a/src/library/scala/math/BigInt.scala +++ b/src/library/scala/math/BigInt.scala @@ -124,6 +124,46 @@ object BigInt { /** Implicit conversion from `java.math.BigInteger` to `scala.BigInt`. */ implicit def javaBigInteger2bigInt(x: BigInteger): BigInt = apply(x) + + /** + * Returns the greatest common divisor of a and b. Returns 0 if a == 0 && b == 0. + */ + private def longGcd(a: Long, b: Long): Long = { + // code adapted from Google Guava LongMath.java / gcd + if (a == 0) { // 0 % b == 0, so b divides a, but the converse doesn't hold. + // BigInteger.gcd is consistent with this decision. + return b + } + else if (b == 0) return a // similar logic + /* + * Uses the binary GCD algorithm; see http://en.wikipedia.org/wiki/Binary_GCD_algorithm. This is + * >60% faster than the Euclidean algorithm in benchmarks. + */ + val aTwos = java.lang.Long.numberOfTrailingZeros(a) + var a1 = a >> aTwos // divide out all 2s + + val bTwos = java.lang.Long.numberOfTrailingZeros(b) + var b1 = b >> bTwos + while (a1 != b1) { // both a, b are odd + // The key to the binary GCD algorithm is as follows: + // Both a1 and b1 are odd. Assume a1 > b1; then gcd(a1 - b1, b1) = gcd(a1, b1). + // But in gcd(a1 - b1, b1), a1 - b1 is even and b1 is odd, so we can divide out powers of two. + // We bend over backwards to avoid branching, adapting a technique from + // http://graphics.stanford.edu/~seander/bithacks.html#IntegerMinOrMax + val delta = a1 - b1 // can't overflow, since a1 and b1 are nonnegative + val minDeltaOrZero = delta & (delta >> (java.lang.Long.SIZE - 1)) + // equivalent to Math.min(delta, 0) + a1 = delta - minDeltaOrZero - minDeltaOrZero // sets a to Math.abs(a - b) + + // a is now nonnegative and even + b1 += minDeltaOrZero // sets b to min(old a, b) + + a1 >>= java.lang.Long.numberOfTrailingZeros(a1) // divide out all 2s, since 2 doesn't divide b + + } + a1 << scala.math.min(aTwos, bTwos) + } + } /** A type with efficient encoding of arbitrary integers. @@ -162,6 +202,10 @@ final class BigInt private (private var _bigInteger: BigInteger, private val _lo else Long.MinValue ) + /** Returns whether the integer is encoded in the Long. Returns true for all values fitting in a Long except + * Long.MinValue. */ + private def longEncoding: Boolean = _long != Long.MinValue + def bigInteger: BigInteger = { val read = _bigInteger if (read ne null) read else { @@ -185,11 +229,13 @@ final class BigInt private (private var _bigInteger: BigInteger, private val _lo case that: Float => isValidFloat && toFloat == that case x => isValidLong && unifiedPrimitiveEquals(x) } - override def isValidByte: Boolean = this >= Byte.MinValue && this <= Byte.MaxValue - override def isValidShort: Boolean = this >= Short.MinValue && this <= Short.MaxValue - override def isValidChar: Boolean = this >= Char.MinValue && this <= Char.MaxValue - override def isValidInt: Boolean = this >= Int.MinValue && this <= Int.MaxValue - def isValidLong: Boolean = this >= Long.MinValue && this <= Long.MaxValue + + override def isValidByte: Boolean = _long >= Byte.MinValue && _long <= Byte.MaxValue /* && longEncoding */ + override def isValidShort: Boolean = _long >= Short.MinValue && _long <= Short.MaxValue /* && longEncoding */ + override def isValidChar: Boolean = _long >= Char.MinValue && _long <= Char.MaxValue /* && longEncoding */ + override def isValidInt: Boolean = _long >= Int.MinValue && _long <= Int.MaxValue /* && longEncoding */ + def isValidLong: Boolean = longEncoding || _bigInteger == BigInt.longMinValueBigInteger // rhs of || tests == Long.MinValue + /** Returns `true` iff this can be represented exactly by [[scala.Float]]; otherwise returns `false`. */ def isValidFloat: Boolean = { @@ -231,151 +277,266 @@ final class BigInt private (private var _bigInteger: BigInteger, private val _lo /** Compares this BigInt with the specified BigInt for equality. */ - def equals (that: BigInt): Boolean = compare(that) == 0 + def equals(that: BigInt): Boolean = + if (this.longEncoding) + that.longEncoding && (this._long == that._long) + else + !that.longEncoding && (this._bigInteger == that._bigInteger) /** Compares this BigInt with the specified BigInt */ - def compare (that: BigInt): Int = this.bigInteger.compareTo(that.bigInteger) + def compare(that: BigInt): Int = + if (this.longEncoding) { + if (that.longEncoding) java.lang.Long.compare(this._long, that._long) else -that._bigInteger.signum() + } else { + if (that.longEncoding) _bigInteger.signum() else this._bigInteger.compareTo(that._bigInteger) + } /** Addition of BigInts */ - def + (that: BigInt): BigInt = BigInt(this.bigInteger.add(that.bigInteger)) + def +(that: BigInt): BigInt = { + if (this.longEncoding && that.longEncoding) { // fast path + val x = this._long + val y = that._long + val z = x + y + if ((~(x ^ y) & (x ^ z)) >= 0L) return BigInt(z) + } + BigInt(this.bigInteger.add(that.bigInteger)) + } /** Subtraction of BigInts */ - def - (that: BigInt): BigInt = BigInt(this.bigInteger.subtract(that.bigInteger)) + def -(that: BigInt): BigInt = { + if (this.longEncoding && that.longEncoding) { // fast path + val x = this._long + val y = that._long + val z = x - y + if (((x ^ y) & (x ^ z)) >= 0L) return BigInt(z) + } + BigInt(this.bigInteger.subtract(that.bigInteger)) + } /** Multiplication of BigInts */ - def * (that: BigInt): BigInt = BigInt(this.bigInteger.multiply(that.bigInteger)) + def *(that: BigInt): BigInt = { + if (this.longEncoding && that.longEncoding) { // fast path + val x = this._long + val y = that._long + val z = x * y + // original code checks the y != Long.MinValue, but when longEncoding is true, that is never the case + // if (x == 0 || (y == z / x && !(x == -1 && y == Long.MinValue))) return BigInt(z) + if (x == 0 || y == z / x) return BigInt(z) + } + BigInt(this.bigInteger.multiply(that.bigInteger)) + } /** Division of BigInts */ - def / (that: BigInt): BigInt = BigInt(this.bigInteger.divide(that.bigInteger)) + def /(that: BigInt): BigInt = + // in the fast path, note that the original code avoided storing -Long.MinValue in a long: + // if (this._long != Long.MinValue || that._long != -1) return BigInt(this._long / that._long) + // but we know this._long cannot be Long.MinValue, because Long.MinValue is the tag for bigger integers + if (this.longEncoding && that.longEncoding) BigInt(this._long / that._long) + else BigInt(this.bigInteger.divide(that.bigInteger)) /** Remainder of BigInts */ - def % (that: BigInt): BigInt = BigInt(this.bigInteger.remainder(that.bigInteger)) + def %(that: BigInt): BigInt = + // see / for the original logic regarding Long.MinValue + if (this.longEncoding && that.longEncoding) BigInt(this._long % that._long) + else BigInt(this.bigInteger.remainder(that.bigInteger)) /** Returns a pair of two BigInts containing (this / that) and (this % that). */ - def /% (that: BigInt): (BigInt, BigInt) = { - val dr = this.bigInteger.divideAndRemainder(that.bigInteger) - (BigInt(dr(0)), BigInt(dr(1))) - } + def /%(that: BigInt): (BigInt, BigInt) = + if (this.longEncoding && that.longEncoding) { + val x = this._long + val y = that._long + // original line: if (x != Long.MinValue || y != -1) return (BigInt(x / y), BigInt(x % y)) + (BigInt(x / y), BigInt(x % y)) + } else { + val dr = this.bigInteger.divideAndRemainder(that.bigInteger) + (BigInt(dr(0)), BigInt(dr(1))) + } /** Leftshift of BigInt */ - def << (n: Int): BigInt = BigInt(this.bigInteger.shiftLeft(n)) + def <<(n: Int): BigInt = + if (longEncoding && n <= 0) (this >> (-n)) else BigInt(this.bigInteger.shiftLeft(n)) /** (Signed) rightshift of BigInt */ - def >> (n: Int): BigInt = BigInt(this.bigInteger.shiftRight(n)) - + def >>(n: Int): BigInt = + if (longEncoding && n >= 0) { + if (n < 64) BigInt(_long >> n) + else if (_long < 0) BigInt(-1) + else BigInt(0) // for _long >= 0 + } else BigInt(this.bigInteger.shiftRight(n)) + /** Bitwise and of BigInts */ - def & (that: BigInt): BigInt = BigInt(this.bigInteger.and(that.bigInteger)) + def &(that: BigInt): BigInt = + if (this.longEncoding && that.longEncoding) + BigInt(this._long & that._long) + else BigInt(this.bigInteger.and(that.bigInteger)) /** Bitwise or of BigInts */ - def | (that: BigInt): BigInt = BigInt(this.bigInteger.or (that.bigInteger)) + def |(that: BigInt): BigInt = + if (this.longEncoding && that.longEncoding) + BigInt(this._long | that._long) + else BigInt(this.bigInteger.or(that.bigInteger)) /** Bitwise exclusive-or of BigInts */ - def ^ (that: BigInt): BigInt = BigInt(this.bigInteger.xor(that.bigInteger)) + def ^(that: BigInt): BigInt = + if (this.longEncoding && that.longEncoding) + BigInt(this._long ^ that._long) + else BigInt(this.bigInteger.xor(that.bigInteger)) /** Bitwise and-not of BigInts. Returns a BigInt whose value is (this & ~that). */ - def &~ (that: BigInt): BigInt = BigInt(this.bigInteger.andNot(that.bigInteger)) + def &~(that: BigInt): BigInt = + if (this.longEncoding && that.longEncoding) + BigInt(this._long & ~that._long) + else BigInt(this.bigInteger.andNot(that.bigInteger)) /** Returns the greatest common divisor of abs(this) and abs(that) */ - def gcd (that: BigInt): BigInt = BigInt(this.bigInteger.gcd(that.bigInteger)) + def gcd(that: BigInt): BigInt = + if (this.longEncoding) { + if (this._long == 0) return that.abs + // if (this._long == Long.MinValue) return (-this) gcd that + // this != 0 && this != Long.MinValue + if (that.longEncoding) { + if (that._long == 0) return this.abs + // if (that._long == Long.MinValue) return this gcd (-that) + BigInt(BigInt.longGcd(this._long.abs, that._long.abs)) + } else that gcd this // force the BigInteger on the left + } else { + // this is not a valid long + if (that.longEncoding) { + if (that._long == 0) return this.abs + // if (that._long == Long.MinValue) return this gcd (-that) + val red = (this._bigInteger mod BigInteger.valueOf(that._long.abs)).longValue() + if (red == 0) return that.abs + BigInt(BigInt.longGcd(that._long.abs, red)) + } else BigInt(this.bigInteger.gcd(that.bigInteger)) + } + /** Returns a BigInt whose value is (this mod that). * This method differs from `%` in that it always returns a non-negative BigInt. * @param that A positive number */ - def mod (that: BigInt): BigInt = BigInt(this.bigInteger.mod(that.bigInteger)) + def mod(that: BigInt): BigInt = + if (this.longEncoding && that.longEncoding) { + val res = this._long % that._long + if (res >= 0) BigInt(res) else BigInt(res + that._long) + } else BigInt(this.bigInteger.mod(that.bigInteger)) /** Returns the minimum of this and that */ - def min (that: BigInt): BigInt = BigInt(this.bigInteger.min(that.bigInteger)) + def min(that: BigInt): BigInt = + if (this <= that) this else that /** Returns the maximum of this and that */ - def max (that: BigInt): BigInt = BigInt(this.bigInteger.max(that.bigInteger)) + def max(that: BigInt): BigInt = + if (this >= that) this else that /** Returns a BigInt whose value is (this raised to the power of exp). */ - def pow (exp: Int): BigInt = BigInt(this.bigInteger.pow(exp)) + def pow(exp: Int): BigInt = BigInt(this.bigInteger.pow(exp)) /** Returns a BigInt whose value is * (this raised to the power of exp modulo m). */ - def modPow (exp: BigInt, m: BigInt): BigInt = - BigInt(this.bigInteger.modPow(exp.bigInteger, m.bigInteger)) + def modPow(exp: BigInt, m: BigInt): BigInt = BigInt(this.bigInteger.modPow(exp.bigInteger, m.bigInteger)) /** Returns a BigInt whose value is (the inverse of this modulo m). */ - def modInverse (m: BigInt): BigInt = BigInt(this.bigInteger.modInverse(m.bigInteger)) + def modInverse(m: BigInt): BigInt = BigInt(this.bigInteger.modInverse(m.bigInteger)) /** Returns a BigInt whose value is the negation of this BigInt */ - def unary_- : BigInt = BigInt(this.bigInteger.negate()) + def unary_- : BigInt = if (longEncoding) BigInt(-_long) else BigInt(this.bigInteger.negate()) /** Returns the absolute value of this BigInt */ - def abs: BigInt = BigInt(this.bigInteger.abs()) + def abs: BigInt = if (signum < 0) -this else this /** Returns the sign of this BigInt; * -1 if it is less than 0, * +1 if it is greater than 0, * 0 if it is equal to 0. */ - def signum: Int = this.bigInteger.signum() + def signum: Int = if (longEncoding) java.lang.Long.signum(_long) else _bigInteger.signum() /** Returns the sign of this BigInt; * -1 if it is less than 0, * +1 if it is greater than 0, * 0 if it is equal to 0. */ - def sign: BigInt = signum + def sign: BigInt = BigInt(signum) /** Returns the bitwise complement of this BigInt */ - def unary_~ : BigInt = BigInt(this.bigInteger.not()) + def unary_~ : BigInt = + // it is equal to -(this + 1) + if (longEncoding && _long != Long.MaxValue) BigInt(-(_long + 1)) else BigInt(this.bigInteger.not()) /** Returns true if and only if the designated bit is set. */ - def testBit (n: Int): Boolean = this.bigInteger.testBit(n) + def testBit(n: Int): Boolean = + if (longEncoding) { + if (n <= 63) + (_long & (1L << n)) != 0 + else + _long < 0 // give the sign bit + } else _bigInteger.testBit(n) /** Returns a BigInt whose value is equivalent to this BigInt with the designated bit set. */ - def setBit (n: Int): BigInt = BigInt(this.bigInteger.setBit(n)) + def setBit(n: Int): BigInt = // note that we do not operate on the Long sign bit #63 + if (longEncoding && n <= 62) BigInt(_long | (1L << n)) else BigInt(this.bigInteger.setBit(n)) /** Returns a BigInt whose value is equivalent to this BigInt with the designated bit cleared. */ - def clearBit(n: Int): BigInt = BigInt(this.bigInteger.clearBit(n)) + def clearBit(n: Int): BigInt = // note that we do not operate on the Long sign bit #63 + if (longEncoding && n <= 62) BigInt(_long & ~(1L << n)) else BigInt(this.bigInteger.clearBit(n)) /** Returns a BigInt whose value is equivalent to this BigInt with the designated bit flipped. */ - def flipBit (n: Int): BigInt = BigInt(this.bigInteger.flipBit(n)) + def flipBit(n: Int): BigInt = // note that we do not operate on the Long sign bit #63 + if (longEncoding && n <= 62) BigInt(_long ^ (1L << n)) else BigInt(this.bigInteger.flipBit(n)) /** Returns the index of the rightmost (lowest-order) one bit in this BigInt * (the number of zero bits to the right of the rightmost one bit). */ - def lowestSetBit: Int = this.bigInteger.getLowestSetBit() + def lowestSetBit: Int = + if (longEncoding) { + if (_long == 0) -1 else java.lang.Long.numberOfTrailingZeros(_long) + } else this.bigInteger.getLowestSetBit() /** Returns the number of bits in the minimal two's-complement representation of this BigInt, * excluding a sign bit. */ - def bitLength: Int = this.bigInteger.bitLength() + def bitLength: Int = + // bitLength is defined as ceil(log2(this < 0 ? -this : this + 1))) + // where ceil(log2(x)) = 64 - numberOfLeadingZeros(x - 1) + if (longEncoding) { + if (_long < 0) 64 - java.lang.Long.numberOfLeadingZeros(-(_long + 1)) // takes care of Long.MinValue + else 64 - java.lang.Long.numberOfLeadingZeros(_long) + } else _bigInteger.bitLength() /** Returns the number of bits in the two's complement representation of this BigInt * that differ from its sign bit. */ - def bitCount: Int = this.bigInteger.bitCount() + def bitCount: Int = + if (longEncoding) { + if (_long < 0) java.lang.Long.bitCount(-(_long + 1)) else java.lang.Long.bitCount(_long) + } else this.bigInteger.bitCount() /** Returns true if this BigInt is probably prime, false if it's definitely composite. * @param certainty a measure of the uncertainty that the caller is willing to tolerate: @@ -413,7 +574,7 @@ final class BigInt private (private var _bigInteger: BigInteger, private val _lo * overall magnitude of the BigInt value as well as return a result with * the opposite sign. */ - def intValue: Int = this.bigInteger.intValue + def intValue: Int = if (longEncoding) _long.toInt else this.bigInteger.intValue /** Converts this BigInt to a long. * If the BigInt is too big to fit in a long, only the low-order 64 bits @@ -421,7 +582,7 @@ final class BigInt private (private var _bigInteger: BigInteger, private val _lo * overall magnitude of the BigInt value as well as return a result with * the opposite sign. */ - def longValue: Long = this.bigInteger.longValue + def longValue: Long = if (longEncoding) _long else _bigInteger.longValue /** Converts this `BigInt` to a `float`. * If this `BigInt` has too great a magnitude to represent as a float, @@ -435,7 +596,9 @@ final class BigInt private (private var _bigInteger: BigInteger, private val _lo * it will be converted to `Double.NEGATIVE_INFINITY` or * `Double.POSITIVE_INFINITY` as appropriate. */ - def doubleValue: Double = this.bigInteger.doubleValue + def doubleValue: Double = + if (isValidLong && (-(1L << 53) <= _long && _long <= (1L << 53))) _long.toDouble + else this.bigInteger.doubleValue /** Create a `NumericRange[BigInt]` in range `[start;end)` * with the specified step, where start is the target BigInt. @@ -452,7 +615,7 @@ final class BigInt private (private var _bigInteger: BigInteger, private val _lo /** Returns the decimal String representation of this BigInt. */ - override def toString(): String = this.bigInteger.toString() + override def toString(): String = if (longEncoding) _long.toString() else _bigInteger.toString() /** Returns the String representation in the specified radix of this BigInt. */ From 7cc2a07df5617ffe09fd8230249f8487a97ee5b3 Mon Sep 17 00:00:00 2001 From: Denis Rosset Date: Tue, 11 May 2021 23:24:31 +0200 Subject: [PATCH 189/769] Added comment about nonnegativity on BigInt.longGcd --- src/library/scala/math/BigInt.scala | 1 + 1 file changed, 1 insertion(+) diff --git a/src/library/scala/math/BigInt.scala b/src/library/scala/math/BigInt.scala index d0018fc8e970..ba00778bd049 100644 --- a/src/library/scala/math/BigInt.scala +++ b/src/library/scala/math/BigInt.scala @@ -130,6 +130,7 @@ object BigInt { */ private def longGcd(a: Long, b: Long): Long = { // code adapted from Google Guava LongMath.java / gcd + // both a and b must be >= 0 if (a == 0) { // 0 % b == 0, so b divides a, but the converse doesn't hold. // BigInteger.gcd is consistent with this decision. return b From 141167111e2e75e443663e113b997308d3e69f90 Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Fri, 9 Apr 2021 03:39:20 -0700 Subject: [PATCH 190/769] Test status quo for leading infix --- .../scala/tools/nsc/ast/parser/Scanners.scala | 4 +- test/files/neg/multiLineOps.check | 2 +- test/files/neg/multiLineOps.scala | 4 +- test/files/neg/t12071.check | 37 +++++++++++++++++++ test/files/neg/t12071.scala | 36 ++++++++++++++++++ test/files/run/t12071.scala | 28 ++++++++++++++ 6 files changed, 106 insertions(+), 5 deletions(-) create mode 100644 test/files/neg/t12071.check create mode 100644 test/files/neg/t12071.scala create mode 100644 test/files/run/t12071.scala diff --git a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala index 9d1f7b55a91e..aca8096852bd 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala @@ -469,8 +469,8 @@ trait Scanners extends ScannersCommon { val msg = """|Line starts with an operator that in future |will be taken as an infix expression continued from the previous line. |To force the previous interpretation as a separate statement, - |add an explicit `;`, add an empty line, or remove spaces after the operator.""".stripMargin - deprecationWarning(msg, "2.13.2") + |add an explicit `;`, add an empty line, or remove spaces after the operator.""" + deprecationWarning(msg.stripMargin, "2.13.2") insertNL(NEWLINE) } } diff --git a/test/files/neg/multiLineOps.check b/test/files/neg/multiLineOps.check index c9882d57e1c2..32b8a5366e17 100644 --- a/test/files/neg/multiLineOps.check +++ b/test/files/neg/multiLineOps.check @@ -1,5 +1,5 @@ multiLineOps.scala:6: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses - +3 // error: Expected a toplevel definition + +3 // error: Expected a toplevel definition (or pure expr warning, here) ^ error: No warnings can be incurred under -Werror. 1 warning diff --git a/test/files/neg/multiLineOps.scala b/test/files/neg/multiLineOps.scala index 792528620773..e1c2bfee6304 100644 --- a/test/files/neg/multiLineOps.scala +++ b/test/files/neg/multiLineOps.scala @@ -1,7 +1,7 @@ -// scalac: -Werror -Xsource:3 +// scalac: -Werror -Xlint -Xsource:3 class Test { val x = 1 + 2 - +3 // error: Expected a toplevel definition + +3 // error: Expected a toplevel definition (or pure expr warning, here) } diff --git a/test/files/neg/t12071.check b/test/files/neg/t12071.check new file mode 100644 index 000000000000..6c8167faa8ec --- /dev/null +++ b/test/files/neg/t12071.check @@ -0,0 +1,37 @@ +t12071.scala:15: error: not found: value c c + `c c` i + ^ +t12071.scala:15: error: postfix operator i needs to be enabled +by making the implicit value scala.language.postfixOps visible. +This can be achieved by adding the import clause 'import scala.language.postfixOps' +or by setting the compiler option -language:postfixOps. +See the Scaladoc for value scala.language.postfixOps for a discussion +why the feature needs to be explicitly enabled. + `c c` i + ^ +t12071.scala:20: warning: Line starts with an operator that in future +will be taken as an infix expression continued from the previous line. +To force the previous interpretation as a separate statement, +add an explicit `;`, add an empty line, or remove spaces after the operator. + + 2 + ^ +t12071.scala:25: warning: Line starts with an operator that in future +will be taken as an infix expression continued from the previous line. +To force the previous interpretation as a separate statement, +add an explicit `;`, add an empty line, or remove spaces after the operator. + + 1 + ^ +t12071.scala:28: warning: Line starts with an operator that in future +will be taken as an infix expression continued from the previous line. +To force the previous interpretation as a separate statement, +add an explicit `;`, add an empty line, or remove spaces after the operator. + `test-1` + `test-2` + ^ +t12071.scala:31: warning: Line starts with an operator that in future +will be taken as an infix expression continued from the previous line. +To force the previous interpretation as a separate statement, +add an explicit `;`, add an empty line, or remove spaces after the operator. + `compareTo` (2 - 1) + ^ +4 warnings +2 errors diff --git a/test/files/neg/t12071.scala b/test/files/neg/t12071.scala new file mode 100644 index 000000000000..28dc895c03c6 --- /dev/null +++ b/test/files/neg/t12071.scala @@ -0,0 +1,36 @@ +// scalac: -Werror -Xlint + +class C { + def `c c`(n: Int): Int = n + 1 +} + +// backticked operator is candidate for multiline infix, +// but backticked value is an innocent bystander. +// +class t12071 { + def c: C = ??? + def i: Int = 42 + def `n n`: Int = 17 + def f = c + `c c` i + def g = i + + `n n` + def basic = + 1 + + 2 +} + +object C { + def x = 42 + + 1 + + def y = 1 + + `test-1` + `test-2` + + def z = 2 + `compareTo` (2 - 1) + + def `test-1`: Int = 23 + def `test-2`: Int = 42 + def compareTo(x: Int) = println("lol") +} diff --git a/test/files/run/t12071.scala b/test/files/run/t12071.scala new file mode 100644 index 000000000000..5950647a1526 --- /dev/null +++ b/test/files/run/t12071.scala @@ -0,0 +1,28 @@ +// scalac: -Werror -Xlint -Xsource:3 + +class C { + def `c c`(n: Int): Int = n + 1 +} + +// backticked operator is candidate for multiline infix, +// but backticked value is an innocent bystander. +// +class t12071 { + def c: C = new C + def i: Int = 42 + def `n n`: Int = 27 + def f = c + `c c` i + def g = i + + `n n` + def basic = + 1 + + 2 +} + +object Test extends App { + val t = new t12071 + assert(t.f == 43) + assert(t.g == 69) + assert(t.basic == 3) +} From 1924d2dc4a3d66163387cb5eae652a6071168dc7 Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Fri, 9 Apr 2021 04:58:13 -0700 Subject: [PATCH 191/769] Postfix error doesn't suppress warnings --- src/compiler/scala/tools/nsc/Reporting.scala | 8 ++++++-- test/files/neg/t12071.check | 4 ++++ 2 files changed, 10 insertions(+), 2 deletions(-) diff --git a/src/compiler/scala/tools/nsc/Reporting.scala b/src/compiler/scala/tools/nsc/Reporting.scala index c69a60f3f8be..cd26e72a7cfd 100644 --- a/src/compiler/scala/tools/nsc/Reporting.scala +++ b/src/compiler/scala/tools/nsc/Reporting.scala @@ -246,8 +246,12 @@ trait Reporting extends internal.Reporting { self: ast.Positions with Compilatio && parentFileName(pos.source).getOrElse("") == "xsbt" && Thread.currentThread.getStackTrace.exists(_.getClassName.startsWith("sbt.")) ) - if (required && !isSbtCompat) reporter.error(pos, msg) - else warning(pos, msg, featureCategory(featureTrait.nameString), site) + // on postfix error, include interesting infix warning + def isXfix = featureName == "postfixOps" && suspendedMessages.get(pos.source).map(_.exists(w => pos.includes(w.pos))).getOrElse(false) + if (required && !isSbtCompat) { + val amended = if (isXfix) s"$msg\n${suspendedMessages(pos.source).filter(pos includes _.pos).map(_.msg).mkString("\n")}" else msg + reporter.error(pos, amended) + } else warning(pos, msg, featureCategory(featureTrait.nameString), site) } // Used in the optimizer where we don't have no symbols, the site string is created from the class internal name and method name. diff --git a/test/files/neg/t12071.check b/test/files/neg/t12071.check index 6c8167faa8ec..88198baf3274 100644 --- a/test/files/neg/t12071.check +++ b/test/files/neg/t12071.check @@ -7,6 +7,10 @@ This can be achieved by adding the import clause 'import scala.language.postfixO or by setting the compiler option -language:postfixOps. See the Scaladoc for value scala.language.postfixOps for a discussion why the feature needs to be explicitly enabled. +Line starts with an operator that in future +will be taken as an infix expression continued from the previous line. +To force the previous interpretation as a separate statement, +add an explicit `;`, add an empty line, or remove spaces after the operator. `c c` i ^ t12071.scala:20: warning: Line starts with an operator that in future From 838092d2b8668bb57a662020d6653cfe4b0701d1 Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Sat, 10 Apr 2021 14:08:49 -0700 Subject: [PATCH 192/769] Backport Allow infix operators on their own line --- .../scala/tools/nsc/ast/parser/Scanners.scala | 8 ++++---- test/files/pos/leading-infix-op.scala | 19 +++++++++++++++++++ test/files/run/multiLineOps.scala | 5 +++-- 3 files changed, 26 insertions(+), 6 deletions(-) create mode 100644 test/files/pos/leading-infix-op.scala diff --git a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala index aca8096852bd..d727806e9c0b 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala @@ -448,11 +448,11 @@ trait Scanners extends ScannersCommon { */ def isLeadingInfixOperator = allowLeadingInfixOperators && - (token == BACKQUOTED_IDENT || - token == IDENTIFIER && isOperatorPart(name.charAt(name.length - 1))) && - (ch == ' ') && lookingAhead { + (token == BACKQUOTED_IDENT || token == IDENTIFIER && isOperatorPart(name.charAt(name.length - 1))) && + ch <= ' ' && lookingAhead { // force a NEWLINE after current token if it is on its own line - isSimpleExprIntroToken(token) + isSimpleExprIntroToken(token) || + token == NEWLINE && { nextToken() ; isSimpleExprIntroToken(token) } } /* Insert NEWLINE or NEWLINES if diff --git a/test/files/pos/leading-infix-op.scala b/test/files/pos/leading-infix-op.scala new file mode 100644 index 000000000000..4b60aa67b8c1 --- /dev/null +++ b/test/files/pos/leading-infix-op.scala @@ -0,0 +1,19 @@ + +// scalac: -Xsource:3 + +trait T { + def f(x: Int): Boolean = + x < 0 + || + x > 0 + && + x != 3 + + def g(x: Option[Int]) = x match { + case Some(err) => + println("hi") + ??? + case None => + ??? + } +} diff --git a/test/files/run/multiLineOps.scala b/test/files/run/multiLineOps.scala index 0bba854027fc..793a8a49eebb 100644 --- a/test/files/run/multiLineOps.scala +++ b/test/files/run/multiLineOps.scala @@ -1,6 +1,7 @@ // scalac: -Xsource:3 // -// without backticks, "not found: value +" +// was: without backticks, "not found: value +" (but parsed here as +a * 6, where backticks fool the lexer) +// now: + is taken as "solo" infix op // object Test extends App { val a = 7 @@ -8,5 +9,5 @@ object Test extends App { + // `a` * 6 - assert(x == 1) + assert(x == 1 + 42, x) // was: 1 } From fc47f58f161c9e1c0e8c0d62e3c6656d27529c3d Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Sun, 11 Apr 2021 00:02:53 -0700 Subject: [PATCH 193/769] Backport Refine condition of leading infix operator --- .../scala/tools/nsc/ast/parser/Scanners.scala | 23 +++++++++++++++---- test/files/neg/t12071.scala | 6 +++++ test/files/pos/i11371.scala | 21 +++++++++++++++++ 3 files changed, 45 insertions(+), 5 deletions(-) create mode 100644 test/files/pos/i11371.scala diff --git a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala index d727806e9c0b..683614c76451 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala @@ -442,6 +442,23 @@ trait Scanners extends ScannersCommon { token = nl } + /* A leading infix operator must be followed by a lexically suitable expression. + * Usually any simple expr will do. However, if the op is backtick style, make + * sure it is not followed by a binary op, which suggests the backticked identifier + * is a reference. + */ + def followedByInfixRHS: Boolean = { + val current = token + def isOp: Boolean = token == IDENTIFIER && isOperatorPart(name.charAt(name.length - 1)) + def isCandidateInfixRHS: Boolean = + isSimpleExprIntroToken(token) && + (current != BACKQUOTED_IDENT || !isOp || nme.raw.isUnary(name)) + lookingAhead { + isCandidateInfixRHS || + token == NEWLINE && { nextToken() ; isCandidateInfixRHS } + } + } + /* A leading symbolic or backquoted identifier is treated as an infix operator * if it is followed by at least one ' ' and a token on the same line * that can start an expression. @@ -449,11 +466,7 @@ trait Scanners extends ScannersCommon { def isLeadingInfixOperator = allowLeadingInfixOperators && (token == BACKQUOTED_IDENT || token == IDENTIFIER && isOperatorPart(name.charAt(name.length - 1))) && - ch <= ' ' && lookingAhead { - // force a NEWLINE after current token if it is on its own line - isSimpleExprIntroToken(token) || - token == NEWLINE && { nextToken() ; isSimpleExprIntroToken(token) } - } + ch <= ' ' && followedByInfixRHS /* Insert NEWLINE or NEWLINES if * - we are after a newline diff --git a/test/files/neg/t12071.scala b/test/files/neg/t12071.scala index 28dc895c03c6..e08dc0815ec1 100644 --- a/test/files/neg/t12071.scala +++ b/test/files/neg/t12071.scala @@ -33,4 +33,10 @@ object C { def `test-1`: Int = 23 def `test-2`: Int = 42 def compareTo(x: Int) = println("lol") + + var `test-3`: List[Int] = Nil + + // since ++ is not unary, test-3 is not taken as an operator; this test doesn't fix y above. + def yy = List.empty[Int] ++ + `test-3` ++ `test-3` } diff --git a/test/files/pos/i11371.scala b/test/files/pos/i11371.scala new file mode 100644 index 000000000000..74156b777c9f --- /dev/null +++ b/test/files/pos/i11371.scala @@ -0,0 +1,21 @@ +// scalac: -Xsource:3 +// +object HelloWorld { + def whileLoop: Int = { + var i = 0 + var acc = 0 + while (i < 3) { + var `i'` = 0 + while (`i'` < 4) { + acc += (i * `i'`) + `i'` += 1 + } + i += 1 + } + acc + } + + def main(args: Array[String]): Unit = { + println(s"hello world: ${whileLoop}") + } +} From 2efc7ed50678088473ec7414e78a7911f6611d88 Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Thu, 15 Apr 2021 07:49:11 -0700 Subject: [PATCH 194/769] Backport Generalize isOperator and test for assignment op --- .../scala/tools/nsc/ast/parser/Scanners.scala | 28 +++++++++++-------- test/files/neg/t12071.scala | 17 ++++++++--- test/files/run/multiLineOps.scala | 8 ++++-- 3 files changed, 34 insertions(+), 19 deletions(-) diff --git a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala index 683614c76451..04e648100f6e 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala @@ -442,20 +442,23 @@ trait Scanners extends ScannersCommon { token = nl } + def isOperator: Boolean = token == BACKQUOTED_IDENT || token == IDENTIFIER && isOperatorPart(name.charAt(name.length - 1)) + /* A leading infix operator must be followed by a lexically suitable expression. - * Usually any simple expr will do. However, if the op is backtick style, make - * sure it is not followed by a binary op, which suggests the backticked identifier - * is a reference. + * Usually any simple expr will do. However, a backquoted identifier may serve as + * either an op or a reference. So the additional constraint is that the following + * token can't be an assignment operator. (Dotty disallows binary ops, hence the + * test for unary.) See run/multiLineOps.scala for 42 + `x` on 3 lines, where + + * is not leading infix because backquoted x is non-unary op. */ def followedByInfixRHS: Boolean = { - val current = token - def isOp: Boolean = token == IDENTIFIER && isOperatorPart(name.charAt(name.length - 1)) - def isCandidateInfixRHS: Boolean = - isSimpleExprIntroToken(token) && - (current != BACKQUOTED_IDENT || !isOp || nme.raw.isUnary(name)) + //def isCandidateInfixRHS: Boolean = isSimpleExprIntroToken(token) && (!isOperator || nme.raw.isUnary(name) || token == BACKQUOTED_IDENT) + def isAssignmentOperator: Boolean = + name.endsWith('=') && !name.startsWith('=') && isOperatorPart(name.startChar) && + (name.length != 2 || (name.startChar match { case '!' | '<' | '>' => false case _ => true })) + def isCandidateInfixRHS: Boolean = isSimpleExprIntroToken(token) && (!isOperator || token == BACKQUOTED_IDENT || !isAssignmentOperator) lookingAhead { - isCandidateInfixRHS || - token == NEWLINE && { nextToken() ; isCandidateInfixRHS } + isCandidateInfixRHS || token == NEWLINE && { nextToken() ; isCandidateInfixRHS } } } @@ -465,8 +468,9 @@ trait Scanners extends ScannersCommon { */ def isLeadingInfixOperator = allowLeadingInfixOperators && - (token == BACKQUOTED_IDENT || token == IDENTIFIER && isOperatorPart(name.charAt(name.length - 1))) && - ch <= ' ' && followedByInfixRHS + isOperator && + (isWhitespace(ch) || ch == LF) && + followedByInfixRHS /* Insert NEWLINE or NEWLINES if * - we are after a newline diff --git a/test/files/neg/t12071.scala b/test/files/neg/t12071.scala index e08dc0815ec1..e9eb18b9d89a 100644 --- a/test/files/neg/t12071.scala +++ b/test/files/neg/t12071.scala @@ -34,9 +34,18 @@ object C { def `test-2`: Int = 42 def compareTo(x: Int) = println("lol") - var `test-3`: List[Int] = Nil + def yy = 1 + /* fails in scala 3 + + + `test-1` + + + `test-2` + */ +} - // since ++ is not unary, test-3 is not taken as an operator; this test doesn't fix y above. - def yy = List.empty[Int] ++ - `test-3` ++ `test-3` +object Test extends App { + println(C.x) + println(C.y) + println(C.z) + println(C.yy) } diff --git a/test/files/run/multiLineOps.scala b/test/files/run/multiLineOps.scala index 793a8a49eebb..ef319d9210dc 100644 --- a/test/files/run/multiLineOps.scala +++ b/test/files/run/multiLineOps.scala @@ -6,8 +6,10 @@ object Test extends App { val a = 7 val x = 1 - + // - `a` * 6 + + + `a` + * + 6 - assert(x == 1 + 42, x) // was: 1 + assert(x == 1 + 7 * 6, x) // was: 1, now: successor(42) } From d5cb78cc1337592f4e968647e714618cf49a1a26 Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Tue, 11 May 2021 16:32:26 -0700 Subject: [PATCH 195/769] Infix warn on migration --- .../scala/tools/nsc/ast/parser/Scanners.scala | 6 +++++- test/files/neg/multiLineOps.check | 2 +- test/files/neg/multiLineOps.scala | 2 +- test/files/neg/stmt-expr-discard.check | 14 +------------- test/files/neg/t12071.scala | 2 +- test/files/neg/t9847.check | 14 +------------- 6 files changed, 10 insertions(+), 30 deletions(-) diff --git a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala index 04e648100f6e..17b46da9191c 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala @@ -13,6 +13,7 @@ package scala.tools.nsc package ast.parser +import scala.tools.nsc.settings.ScalaVersion import scala.tools.nsc.util.{CharArrayReader, CharArrayReaderData} import scala.reflect.internal.util._ import scala.reflect.internal.Chars._ @@ -403,6 +404,9 @@ trait Scanners extends ScannersCommon { sepRegions = sepRegions.tail } + /** True to warn about migration change in infix syntax. */ + private val infixMigration = settings.Xmigration.value <= ScalaVersion("2.13.2") + /** Produce next token, filling TokenData fields of Scanner. */ def nextToken(): Unit = { @@ -487,7 +491,7 @@ trait Scanners extends ScannersCommon { |will be taken as an infix expression continued from the previous line. |To force the previous interpretation as a separate statement, |add an explicit `;`, add an empty line, or remove spaces after the operator.""" - deprecationWarning(msg.stripMargin, "2.13.2") + if (infixMigration) deprecationWarning(msg.stripMargin, "2.13.2") insertNL(NEWLINE) } } diff --git a/test/files/neg/multiLineOps.check b/test/files/neg/multiLineOps.check index 32b8a5366e17..e3d865c984d4 100644 --- a/test/files/neg/multiLineOps.check +++ b/test/files/neg/multiLineOps.check @@ -1,5 +1,5 @@ multiLineOps.scala:6: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses - +3 // error: Expected a toplevel definition (or pure expr warning, here) + +3 // warning: a pure expression does nothing in statement position ^ error: No warnings can be incurred under -Werror. 1 warning diff --git a/test/files/neg/multiLineOps.scala b/test/files/neg/multiLineOps.scala index e1c2bfee6304..4a92fd9f2c0c 100644 --- a/test/files/neg/multiLineOps.scala +++ b/test/files/neg/multiLineOps.scala @@ -3,5 +3,5 @@ class Test { val x = 1 + 2 - +3 // error: Expected a toplevel definition (or pure expr warning, here) + +3 // warning: a pure expression does nothing in statement position } diff --git a/test/files/neg/stmt-expr-discard.check b/test/files/neg/stmt-expr-discard.check index 250de20f98d2..cc22eb1d843b 100644 --- a/test/files/neg/stmt-expr-discard.check +++ b/test/files/neg/stmt-expr-discard.check @@ -1,15 +1,3 @@ -stmt-expr-discard.scala:5: warning: Line starts with an operator that in future -will be taken as an infix expression continued from the previous line. -To force the previous interpretation as a separate statement, -add an explicit `;`, add an empty line, or remove spaces after the operator. - + 2 - ^ -stmt-expr-discard.scala:6: warning: Line starts with an operator that in future -will be taken as an infix expression continued from the previous line. -To force the previous interpretation as a separate statement, -add an explicit `;`, add an empty line, or remove spaces after the operator. - - 4 - ^ stmt-expr-discard.scala:5: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses + 2 ^ @@ -17,5 +5,5 @@ stmt-expr-discard.scala:6: warning: a pure expression does nothing in statement - 4 ^ error: No warnings can be incurred under -Werror. -4 warnings +2 warnings 1 error diff --git a/test/files/neg/t12071.scala b/test/files/neg/t12071.scala index e9eb18b9d89a..f3f9529c147b 100644 --- a/test/files/neg/t12071.scala +++ b/test/files/neg/t12071.scala @@ -1,4 +1,4 @@ -// scalac: -Werror -Xlint +// scalac: -Werror -Xlint -Xmigration:2.13 class C { def `c c`(n: Int): Int = n + 1 diff --git a/test/files/neg/t9847.check b/test/files/neg/t9847.check index 27899eb467be..d3c6c485f72c 100644 --- a/test/files/neg/t9847.check +++ b/test/files/neg/t9847.check @@ -1,15 +1,3 @@ -t9847.scala:10: warning: Line starts with an operator that in future -will be taken as an infix expression continued from the previous line. -To force the previous interpretation as a separate statement, -add an explicit `;`, add an empty line, or remove spaces after the operator. - + 1 - ^ -t9847.scala:14: warning: Line starts with an operator that in future -will be taken as an infix expression continued from the previous line. -To force the previous interpretation as a separate statement, -add an explicit `;`, add an empty line, or remove spaces after the operator. - + 1 - ^ t9847.scala:6: warning: discarded non-Unit value def f(): Unit = 42 ^ @@ -47,5 +35,5 @@ t9847.scala:24: warning: a pure expression does nothing in statement position; m class D { 42 ; 17 } ^ error: No warnings can be incurred under -Werror. -14 warnings +12 warnings 1 error From f774ae5ef340b11f1ce6936c1710b35f22626730 Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Wed, 12 May 2021 11:58:52 +0200 Subject: [PATCH 196/769] single sym for specialErasure --- .../scala/tools/nsc/transform/Erasure.scala | 6 ++--- .../scala/reflect/internal/SymbolPairs.scala | 4 ++-- .../reflect/internal/transform/Erasure.scala | 22 ++++++++----------- 3 files changed, 14 insertions(+), 18 deletions(-) diff --git a/src/compiler/scala/tools/nsc/transform/Erasure.scala b/src/compiler/scala/tools/nsc/transform/Erasure.scala index 8eec39c7de05..c950d89fd258 100644 --- a/src/compiler/scala/tools/nsc/transform/Erasure.scala +++ b/src/compiler/scala/tools/nsc/transform/Erasure.scala @@ -575,7 +575,7 @@ abstract class Erasure extends InfoTransform debuglog("generating bridge from %s (%s): %s%s to %s: %s%s".format( other, flagsToString(newFlags), otpe, other.locationString, member, - specialErasure(root)(member.tpe, root), member.locationString) + specialErasure(root)(member.tpe), member.locationString) ) // the parameter symbols need to have the new owner @@ -1120,7 +1120,7 @@ abstract class Erasure extends InfoTransform gen.mkMethodCall( qual1(), fun.symbol, - List(specialErasure(fun.symbol)(arg.tpe, fun.symbol)), + List(specialErasure(fun.symbol)(arg.tpe)), Nil ), isArrayTest(qual1()) @@ -1355,7 +1355,7 @@ abstract class Erasure extends InfoTransform fields.dropFieldAnnotationsFromGetter(tree.symbol) try super.transform(tree1).clearType() - finally tpt setType specialErasure(tree1.symbol)(tree1.symbol.tpe, tree1.symbol).resultType + finally tpt setType specialErasure(tree1.symbol)(tree1.symbol.tpe).resultType case ApplyDynamic(qual, Literal(Constant(bootstrapMethodRef: Symbol)) :: _) => tree case _: Apply if tree1 ne tree => diff --git a/src/reflect/scala/reflect/internal/SymbolPairs.scala b/src/reflect/scala/reflect/internal/SymbolPairs.scala index 495b3c4e18a6..7d2f1f895550 100644 --- a/src/reflect/scala/reflect/internal/SymbolPairs.scala +++ b/src/reflect/scala/reflect/internal/SymbolPairs.scala @@ -43,12 +43,12 @@ abstract class SymbolPairs { def rootType: Type = self def lowType: Type = self memberType low - def lowErased: Type = erasure.specialErasure(base)(low.tpe, low) + def lowErased: Type = erasure.specialErasure(low)(low.tpe) def lowClassBound: Type = classBoundAsSeen(low.tpe.typeSymbol) def highType: Type = self memberType high def highInfo: Type = self memberInfo high - def highErased: Type = erasure.specialErasure(base)(high.tpe, high) + def highErased: Type = erasure.specialErasure(high)(high.tpe) def highClassBound: Type = classBoundAsSeen(high.tpe.typeSymbol) def isErroneous = low.tpe.isErroneous || high.tpe.isErroneous diff --git a/src/reflect/scala/reflect/internal/transform/Erasure.scala b/src/reflect/scala/reflect/internal/transform/Erasure.scala index f02bed550f03..ba1a683d076f 100644 --- a/src/reflect/scala/reflect/internal/transform/Erasure.scala +++ b/src/reflect/scala/reflect/internal/transform/Erasure.scala @@ -256,18 +256,14 @@ trait Erasure { /** This is used as the Scala erasure during the erasure phase itself * It differs from normal erasure in that value classes are erased to ErasedValueTypes which * are then later converted to the underlying parameter type in phase posterasure. - * - * @param symOfTp used to determine the erasure mode for the type, - * e.g. in `SymbolPair#highErased`, `sym` may be an anonymous class for a SAM type, - * but `symOfTp` may be the a bridge method for the SAM method being erased. */ - def specialErasure(sym: Symbol)(tp: Type, symOfTp: Symbol): Type = + def specialErasure(sym: Symbol)(tp: Type): Type = if (sym != NoSymbol && sym.enclClass.isJavaDefined) erasure(sym)(tp) else if (sym.isClassConstructor) - specialConstructorErasure(sym.owner, symOfTp, tp) + specialConstructorErasure(sym.owner, sym, tp) else { - specialScalaErasureFor(symOfTp)(tp) + specialScalaErasureFor(sym)(tp) } def specialConstructorErasure(clazz: Symbol, ctor: Symbol, tpe: Type): Type = { @@ -654,15 +650,15 @@ trait Erasure { if (sym == Object_asInstanceOf || synchronizedPrimitive(sym)) sym.info else if (sym == Object_isInstanceOf || sym == ArrayClass) - PolyType(sym.info.typeParams, specialErasure(sym)(sym.info.resultType, sym)) + PolyType(sym.info.typeParams, specialErasure(sym)(sym.info.resultType)) else if (sym.isAbstractType) TypeBounds(WildcardType, WildcardType) // TODO why not use the erasure of the type's bounds, as stated in the doc? else if (sym.isTerm && sym.owner == ArrayClass) { if (sym.isClassConstructor) // TODO: switch on name for all branches -- this one is sym.name == nme.CONSTRUCTOR tp match { case MethodType(params, TypeRef(pre, sym1, args)) => - MethodType(cloneSymbolsAndModify(params, tp => specialErasure(sym)(tp, sym)), - typeRef(specialErasure(sym)(pre, sym), sym1, args)) + MethodType(cloneSymbolsAndModify(params, tp => specialErasure(sym)(tp)), + typeRef(specialErasure(sym)(pre), sym1, args)) case x => throw new MatchError(x) } else if (sym.name == nme.apply) @@ -670,9 +666,9 @@ trait Erasure { else if (sym.name == nme.update) (tp: @unchecked) match { case MethodType(List(index, tvar), restpe) => - MethodType(List(index.cloneSymbol.setInfo(specialErasure(sym)(index.tpe, sym)), tvar), UnitTpe) + MethodType(List(index.cloneSymbol.setInfo(specialErasure(sym)(index.tpe)), tvar), UnitTpe) } - else specialErasure(sym)(tp, sym) + else specialErasure(sym)(tp) } else if ( sym.owner != NoSymbol && sym.owner.owner == ArrayClass && @@ -684,7 +680,7 @@ trait Erasure { } else { // TODO OPT: altogether, there are 9 symbols that we special-case. // Could we get to the common case more quickly by looking them up in a set? - specialErasure(sym)(tp, sym) + specialErasure(sym)(tp) } } } From a234798ff65dc15da7e0915da7a524c1b648bc61 Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Thu, 8 Apr 2021 11:30:11 +0200 Subject: [PATCH 197/769] Clarify the semantics of `Map.equals` and `Set.equals` in Scaladoc Also change the overrides of `SortedMap.equals` and `SortedSet.equals` to check for key equivalence according to the ordering, instead of key equality. --- src/library/scala/collection/Map.scala | 31 ++++++++++ src/library/scala/collection/Set.scala | 31 ++++++++++ src/library/scala/collection/SortedMap.scala | 9 ++- src/library/scala/collection/SortedSet.scala | 4 +- .../scala/collection/immutable/HashMap.scala | 2 +- .../scala/collection/immutable/HashSet.scala | 2 +- .../scala/collection/immutable/LongMap.scala | 6 +- .../scala/collection/immutable/TreeMap.scala | 2 +- .../collection/SortedSetMapEqualsTest.scala | 59 ++++++++++++++++++- 9 files changed, 134 insertions(+), 12 deletions(-) diff --git a/src/library/scala/collection/Map.scala b/src/library/scala/collection/Map.scala index 5f929fe82e1f..81cbfd65497e 100644 --- a/src/library/scala/collection/Map.scala +++ b/src/library/scala/collection/Map.scala @@ -29,6 +29,37 @@ trait Map[K, +V] def canEqual(that: Any): Boolean = true + /** + * Equality of maps is implemented using the lookup method [[get]]. This method returns `true` if + * - the argument `o` is a `Map`, + * - the two maps have the same [[size]], and + * - for every `(key, value)` pair in this map, `other.get(key) == Some(value)`. + * + * The implementation of `equals` checks the [[canEqual]] method, so subclasses of `Map` can narrow down the equality + * to specific map types. The `Map` implementations in the standard library can all be compared, their `canEqual` + * methods return `true`. + * + * Note: The `equals` method only respects the equality laws (symmetry, transitivity) if the two maps use the same + * key equivalence function in their lookup operation. For example, the key equivalence operation in a + * [[scala.collection.immutable.TreeMap]] is defined by its ordering. Comparing a `TreeMap` with a `HashMap` leads + * to unexpected results if `ordering.equiv(k1, k2)` (used for lookup in `TreeMap`) is different from `k1 == k2` + * (used for lookup in `HashMap`). + * + * {{{ + * scala> import scala.collection.immutable._ + * scala> val ord: Ordering[String] = _ compareToIgnoreCase _ + * + * scala> TreeMap("A" -> 1)(ord) == HashMap("a" -> 1) + * val res0: Boolean = false + * + * scala> HashMap("a" -> 1) == TreeMap("A" -> 1)(ord) + * val res1: Boolean = true + * }}} + * + * + * @param o The map to which this map is compared + * @return `true` if the two maps are equal according to the description + */ override def equals(o: Any): Boolean = (this eq o.asInstanceOf[AnyRef]) || (o match { case map: Map[K, _] if map.canEqual(this) => diff --git a/src/library/scala/collection/Set.scala b/src/library/scala/collection/Set.scala index 188a96e78326..b2b93114d2e1 100644 --- a/src/library/scala/collection/Set.scala +++ b/src/library/scala/collection/Set.scala @@ -28,6 +28,37 @@ trait Set[A] def canEqual(that: Any) = true + /** + * Equality of sets is implemented using the lookup method [[contains]]. This method returns `true` if + * - the argument `that` is a `Set`, + * - the two sets have the same [[size]], and + * - for every `element` this set, `other.contains(element) == true`. + * + * The implementation of `equals` checks the [[canEqual]] method, so subclasses of `Set` can narrow down the equality + * to specific set types. The `Set` implementations in the standard library can all be compared, their `canEqual` + * methods return `true`. + * + * Note: The `equals` method only respects the equality laws (symmetry, transitivity) if the two sets use the same + * element equivalence function in their lookup operation. For example, the element equivalence operation in a + * [[scala.collection.immutable.TreeSet]] is defined by its ordering. Comparing a `TreeSet` with a `HashSet` leads + * to unexpected results if `ordering.equiv(e1, e2)` (used for lookup in `TreeSet`) is different from `e1 == e2` + * (used for lookup in `HashSet`). + * + * {{{ + * scala> import scala.collection.immutable._ + * scala> val ord: Ordering[String] = _ compareToIgnoreCase _ + * + * scala> TreeSet("A")(ord) == HashSet("a") + * val res0: Boolean = false + * + * scala> HashSet("a") == TreeSet("A")(ord) + * val res1: Boolean = true + * }}} + * + * + * @param that The set to which this set is compared + * @return `true` if the two sets are equal according to the description + */ override def equals(that: Any): Boolean = (this eq that.asInstanceOf[AnyRef]) || (that match { case set: Set[A] if set.canEqual(this) => diff --git a/src/library/scala/collection/SortedMap.scala b/src/library/scala/collection/SortedMap.scala index 570b09a12b3c..29ebc304678c 100644 --- a/src/library/scala/collection/SortedMap.scala +++ b/src/library/scala/collection/SortedMap.scala @@ -30,14 +30,17 @@ trait SortedMap[K, +V] override def equals(that: Any): Boolean = that match { case _ if this eq that.asInstanceOf[AnyRef] => true - case sm: SortedMap[k, v] if sm.ordering == this.ordering => + case sm: SortedMap[K, _] if sm.ordering == this.ordering => (sm canEqual this) && (this.size == sm.size) && { val i1 = this.iterator val i2 = sm.iterator var allEqual = true - while (allEqual && i1.hasNext) - allEqual = i1.next() == i2.next() + while (allEqual && i1.hasNext) { + val kv1 = i1.next() + val kv2 = i2.next() + allEqual = ordering.equiv(kv1._1, kv2._1) && kv1._2 == kv2._2 + } allEqual } case _ => super.equals(that) diff --git a/src/library/scala/collection/SortedSet.scala b/src/library/scala/collection/SortedSet.scala index 77f62dc15e98..6dc3ed6242e6 100644 --- a/src/library/scala/collection/SortedSet.scala +++ b/src/library/scala/collection/SortedSet.scala @@ -29,14 +29,14 @@ trait SortedSet[A] extends Set[A] override def equals(that: Any): Boolean = that match { case _ if this eq that.asInstanceOf[AnyRef] => true - case ss: SortedSet[_] if ss.ordering == this.ordering => + case ss: SortedSet[A] if ss.ordering == this.ordering => (ss canEqual this) && (this.size == ss.size) && { val i1 = this.iterator val i2 = ss.iterator var allEqual = true while (allEqual && i1.hasNext) - allEqual = i1.next() == i2.next() + allEqual = ordering.equiv(i1.next(), i2.next()) allEqual } case _ => diff --git a/src/library/scala/collection/immutable/HashMap.scala b/src/library/scala/collection/immutable/HashMap.scala index c6fb4abe6e03..d59841853476 100644 --- a/src/library/scala/collection/immutable/HashMap.scala +++ b/src/library/scala/collection/immutable/HashMap.scala @@ -254,7 +254,7 @@ final class HashMap[K, +V] private[immutable] (private[immutable] val rootNode: override def equals(that: Any): Boolean = that match { - case map: HashMap[K, V] => (this eq map) || (this.rootNode == map.rootNode) + case map: HashMap[_, _] => (this eq map) || (this.rootNode == map.rootNode) case _ => super.equals(that) } diff --git a/src/library/scala/collection/immutable/HashSet.scala b/src/library/scala/collection/immutable/HashSet.scala index 67bcb2924fda..1c08da18023b 100644 --- a/src/library/scala/collection/immutable/HashSet.scala +++ b/src/library/scala/collection/immutable/HashSet.scala @@ -176,7 +176,7 @@ final class HashSet[A] private[immutable](private[immutable] val rootNode: Bitma override def equals(that: Any): Boolean = that match { - case set: HashSet[A] => (this eq set) || (this.rootNode == set.rootNode) + case set: HashSet[_] => (this eq set) || (this.rootNode == set.rootNode) case _ => super.equals(that) } diff --git a/src/library/scala/collection/immutable/LongMap.scala b/src/library/scala/collection/immutable/LongMap.scala index aed44f57a966..c418dc7616ac 100644 --- a/src/library/scala/collection/immutable/LongMap.scala +++ b/src/library/scala/collection/immutable/LongMap.scala @@ -63,9 +63,9 @@ object LongMap { private[immutable] case object Nil extends LongMap[Nothing] { // Important, don't remove this! See IntMap for explanation. override def equals(that : Any) = that match { - case (that: AnyRef) if (this eq that) => true - case (that: LongMap[_]) => false // The only empty LongMaps are eq Nil - case that => super.equals(that) + case _: this.type => true + case _: LongMap[_] => false // The only empty LongMaps are eq Nil + case _ => super.equals(that) } } diff --git a/src/library/scala/collection/immutable/TreeMap.scala b/src/library/scala/collection/immutable/TreeMap.scala index 81165b798580..90441e867052 100644 --- a/src/library/scala/collection/immutable/TreeMap.scala +++ b/src/library/scala/collection/immutable/TreeMap.scala @@ -283,7 +283,7 @@ final class TreeMap[K, +V] private (private val tree: RB.Tree[K, V])(implicit va } } override def equals(obj: Any): Boolean = obj match { - case that: TreeMap[K, V] if ordering == that.ordering => RB.entriesEqual(tree, that.tree) + case that: TreeMap[K, _] if ordering == that.ordering => RB.entriesEqual(tree, that.tree) case _ => super.equals(obj) } diff --git a/test/junit/scala/collection/SortedSetMapEqualsTest.scala b/test/junit/scala/collection/SortedSetMapEqualsTest.scala index 44653696c74f..804a6989f8e7 100644 --- a/test/junit/scala/collection/SortedSetMapEqualsTest.scala +++ b/test/junit/scala/collection/SortedSetMapEqualsTest.scala @@ -1,6 +1,7 @@ package scala.collection -import org.junit.{Assert, Test}, Assert.assertEquals +import org.junit.{Assert, Test} +import Assert.{assertEquals, assertNotEquals} class SortedSetMapEqualsTest { @Test @@ -68,4 +69,60 @@ class SortedSetMapEqualsTest { } assertEquals(m1, m2) } + + @Test + def compareSortedMapKeysByOrdering(): Unit = { + val ord: Ordering[String] = _ compareToIgnoreCase _ + + val itm1 = scala.collection.immutable.TreeMap("A" -> "2")(ord) + val itm2 = scala.collection.immutable.TreeMap("a" -> "2")(ord) + val mtm1 = scala.collection.mutable.TreeMap("A" -> "2")(ord) + val mtm2 = scala.collection.mutable.TreeMap("a" -> "2")(ord) + + assertEquals(itm1, itm2) + assertEquals(mtm1, mtm2) + + assertEquals(itm1, mtm2) + assertEquals(mtm1, itm2) + + val m1 = Map("A" -> "2") + val m2 = Map("a" -> "2") + + for (m <- List(m1, m2); tm <- List[Map[String, String]](itm1, itm2, mtm1, mtm2)) + assertEquals(m, tm) // uses keys in `m` to look up values in `tm`, which always succeeds + + assertEquals(itm1, m1) + assertEquals(mtm1, m1) + + assertNotEquals(itm2, m1) // uses key in `itm2` ("a") to look up in `m1`, which fails + assertNotEquals(mtm2, m1) + } + + @Test + def compareSortedSetsByOrdering(): Unit = { + val ord: Ordering[String] = _ compareToIgnoreCase _ + + val its1 = scala.collection.immutable.TreeSet("A")(ord) + val its2 = scala.collection.immutable.TreeSet("a")(ord) + val mts1 = scala.collection.mutable.TreeSet("A")(ord) + val mts2 = scala.collection.mutable.TreeSet("a")(ord) + + assertEquals(its1, its2) + assertEquals(mts1, mts2) + + assertEquals(its1, mts2) + assertEquals(mts1, its2) + + val s1 = Set("A") + val s2 = Set("a") + + for (m <- List(s1, s2); tm <- List[Set[String]](its1, its2, mts1, mts2)) + assertEquals(m, tm) // uses keys in `m` to look up values in `tm`, which always succeeds + + assertEquals(its1, s1) + assertEquals(mts1, s1) + + assertNotEquals(its2, s1) // uses key in `its2` ("a") to look up in `s1`, which fails + assertNotEquals(mts2, s1) + } } From 24476bc15e8bc85cb2e45b409a6e385d08f31ff9 Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Wed, 12 May 2021 14:23:30 +0200 Subject: [PATCH 198/769] Catch ClassCastException in Map.equals / Set.equals `Map.equals` calls `otherMap.getOrElse(keyFromThisMap, ...)` which fails with a `ClassCastException` if `otherMap` assumes a certain key type. Similarly for `Set.equals`. This is not a good solution, but nothing better is known as of now given the binary compatibility constraints, see discussion in PR 9565. --- src/library/scala/collection/Map.scala | 6 ++++-- src/library/scala/collection/Set.scala | 5 ++++- test/junit/scala/collection/MapTest.scala | 5 +++++ 3 files changed, 13 insertions(+), 3 deletions(-) diff --git a/src/library/scala/collection/Map.scala b/src/library/scala/collection/Map.scala index 81cbfd65497e..59e1b5db0651 100644 --- a/src/library/scala/collection/Map.scala +++ b/src/library/scala/collection/Map.scala @@ -63,8 +63,10 @@ trait Map[K, +V] override def equals(o: Any): Boolean = (this eq o.asInstanceOf[AnyRef]) || (o match { case map: Map[K, _] if map.canEqual(this) => - (this.size == map.size) && - this.forall(kv => map.getOrElse(kv._1, Map.DefaultSentinelFn()) == kv._2) + (this.size == map.size) && { + try this.forall(kv => map.getOrElse(kv._1, Map.DefaultSentinelFn()) == kv._2) + catch { case _: ClassCastException => false } // PR #9565 / scala/bug#12228 + } case _ => false }) diff --git a/src/library/scala/collection/Set.scala b/src/library/scala/collection/Set.scala index b2b93114d2e1..d35494cd1eb5 100644 --- a/src/library/scala/collection/Set.scala +++ b/src/library/scala/collection/Set.scala @@ -62,7 +62,10 @@ trait Set[A] override def equals(that: Any): Boolean = (this eq that.asInstanceOf[AnyRef]) || (that match { case set: Set[A] if set.canEqual(this) => - (this.size == set.size) && this.subsetOf(set) + (this.size == set.size) && { + try this.subsetOf(set) + catch { case _: ClassCastException => false } // PR #9565 / scala/bug#12228 + } case _ => false }) diff --git a/test/junit/scala/collection/MapTest.scala b/test/junit/scala/collection/MapTest.scala index 6bfa66955e54..90900619f472 100644 --- a/test/junit/scala/collection/MapTest.scala +++ b/test/junit/scala/collection/MapTest.scala @@ -123,4 +123,9 @@ class MapTest { check(mutable.CollisionProofHashMap(1 -> 1)) } + @Test + def t12228(): Unit = { + assertFalse(Set("") == immutable.BitSet(1)) + assertFalse(Map("" -> 2) == scala.collection.immutable.LongMap(1L -> 2)) + } } From 945c2a59725c974ec46e4366bebcc075f8e4ce66 Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Wed, 12 May 2021 13:34:32 +0200 Subject: [PATCH 199/769] Override checks between Java-defined members only for mixins Override checking between Java-defined members can and should be skipped: javac will do the checks anyway, and applying Scala's rules can lead to false errors. However, when mixing in a Java interface into a Scala class, default methods in the interface have to be checked according to Scala's rules, because Scala linearization applies in this case. --- .../tools/nsc/transform/OverridingPairs.scala | 15 +++++++++++++++ .../scala/tools/nsc/typechecker/RefChecks.scala | 2 +- test/files/neg/t12394.check | 11 +++++++++++ test/files/neg/t12394/A.java | 17 +++++++++++++++++ test/files/neg/t12394/Test.scala | 4 ++++ 5 files changed, 48 insertions(+), 1 deletion(-) create mode 100644 test/files/neg/t12394.check create mode 100644 test/files/neg/t12394/A.java create mode 100644 test/files/neg/t12394/Test.scala diff --git a/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala b/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala index ef7479a52e60..1eeb283560f3 100644 --- a/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala +++ b/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala @@ -53,6 +53,21 @@ abstract class OverridingPairs extends SymbolPairs { && !exclude(low) // this admits private, as one can't have a private member that matches a less-private member. && (lowMemberType matches (self memberType high)) ) // TODO we don't call exclude(high), should we? + + override protected def skipOwnerPair(lowClass: Symbol, highClass: Symbol): Boolean = { + // Two Java-defined methods can be skipped in most cases, as javac will check the overrides; skipping is + // actually necessary to avoid false errors, as Java doesn't have the Scala's linearization rules. However, when + // a Java interface is mixed into a Scala class, mixed-in default methods need to go through override checking + // (neg/t12394). Checking is also required if the "mixed-in" Java interface method is abstract (neg/t12380). + lowClass.isJavaDefined && highClass.isJavaDefined && { + !lowClass.isJavaInterface && !highClass.isJavaInterface || { + !base.info.parents.tail.exists(p => { + val psym = p.typeSymbol + psym.isNonBottomSubClass(lowClass) || psym.isNonBottomSubClass(highClass) + }) + } + } + } } private def bothJavaOwnedAndEitherIsField(low: Symbol, high: Symbol): Boolean = { diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala index 16bbbf6d98a6..eaffb019aee6 100644 --- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala +++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala @@ -383,7 +383,7 @@ abstract class RefChecks extends Transform { def isOverrideAccessOK = member.isPublic || { // member is public, definitely same or relaxed access (!other.isProtected || member.isProtected) && // if o is protected, so is m ((!isRootOrNone(ob) && ob.hasTransOwner(mb)) || // m relaxes o's access boundary - (other.isJavaDefined && (member.isJavaDefined || other.isProtected))) // overriding a protected java member, see #3946 #12349 + (other.isJavaDefined && other.isProtected)) // overriding a protected java member, see #3946 #12349 } if (!isOverrideAccessOK) { overrideAccessError() diff --git a/test/files/neg/t12394.check b/test/files/neg/t12394.check new file mode 100644 index 000000000000..7dbf4d49d9e5 --- /dev/null +++ b/test/files/neg/t12394.check @@ -0,0 +1,11 @@ +Test.scala:2: error: cannot override final member: +final def m(): Int (defined in class C) + with def m(): Int (defined in trait J) +class S2 extends p.A.C with p.A.J + ^ +Test.scala:4: error: cannot override final member: +final def m(): Int (defined in class C) + with def m(): Int (defined in trait J) +class S3 extends p.A.C with K + ^ +2 errors diff --git a/test/files/neg/t12394/A.java b/test/files/neg/t12394/A.java new file mode 100644 index 000000000000..cf3188018d93 --- /dev/null +++ b/test/files/neg/t12394/A.java @@ -0,0 +1,17 @@ +package p; + +public class A { + public static interface I { + default int m() { return 1; } + } + + public static interface J extends I { + @Override default int m() { return 2; } + } + + public static class C implements I { + @Override public final int m() { return 3; } + } + + public static class D extends C implements J { } +} diff --git a/test/files/neg/t12394/Test.scala b/test/files/neg/t12394/Test.scala new file mode 100644 index 000000000000..8a272c5127cd --- /dev/null +++ b/test/files/neg/t12394/Test.scala @@ -0,0 +1,4 @@ +class S1 extends p.A.D +class S2 extends p.A.C with p.A.J +trait K extends p.A.J +class S3 extends p.A.C with K From 194b1c0977a63a162b67906a9c6e762fddd05d3e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rafa=C5=82=20Sumis=C5=82awski?= Date: Wed, 12 May 2021 17:09:09 +0200 Subject: [PATCH 200/769] optimise immutable.Queue.last in case `in` is nonEmpty --- src/library/scala/collection/immutable/Queue.scala | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/src/library/scala/collection/immutable/Queue.scala b/src/library/scala/collection/immutable/Queue.scala index ae90826cd2bf..9c8a32d95a3e 100644 --- a/src/library/scala/collection/immutable/Queue.scala +++ b/src/library/scala/collection/immutable/Queue.scala @@ -97,6 +97,11 @@ sealed class Queue[+A] protected(protected val in: List[A], protected val out: L else if (in.nonEmpty) new Queue(Nil, in.reverse.tail) else throw new NoSuchElementException("tail on empty queue") + override def last: A = + if (in.nonEmpty) in.head + else if (out.nonEmpty) out.last + else throw new NoSuchElementException("last on empty queue") + /* This is made to avoid inefficient implementation of iterator. */ override def forall(p: A => Boolean): Boolean = in.forall(p) && out.forall(p) From 7c1d9df574b16f81344ef157b9554712f3e4e877 Mon Sep 17 00:00:00 2001 From: Seth Tisue Date: Wed, 12 May 2021 14:06:08 -0700 Subject: [PATCH 201/769] add copyright notice for BigInt#longGcd --- src/library/scala/math/BigInt.scala | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/src/library/scala/math/BigInt.scala b/src/library/scala/math/BigInt.scala index ba00778bd049..6ea371328d9e 100644 --- a/src/library/scala/math/BigInt.scala +++ b/src/library/scala/math/BigInt.scala @@ -125,11 +125,16 @@ object BigInt { */ implicit def javaBigInteger2bigInt(x: BigInteger): BigInt = apply(x) + // this method is adapted from Google Guava's version at + // https://github.com/google/guava/blob/master/guava/src/com/google/common/math/LongMath.java + // that code carries the following notice: + // * Copyright (C) 2011 The Guava Authors + // * + // * Licensed under the Apache License, Version 2.0 (the "License") /** * Returns the greatest common divisor of a and b. Returns 0 if a == 0 && b == 0. */ private def longGcd(a: Long, b: Long): Long = { - // code adapted from Google Guava LongMath.java / gcd // both a and b must be >= 0 if (a == 0) { // 0 % b == 0, so b divides a, but the converse doesn't hold. // BigInteger.gcd is consistent with this decision. From b2fc73c2ddc251bfb8b4161f8bab919561feebaa Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Thu, 13 May 2021 12:14:47 +1000 Subject: [PATCH 202/769] Handle Singleton types in patmat's outer prefix align testing --- .../transform/patmat/MatchTreeMaking.scala | 22 ++++++++++++------- 1 file changed, 14 insertions(+), 8 deletions(-) diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala index e6ac5f16d358..6896c16fb36a 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala @@ -399,17 +399,23 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging { val testedPrefixIsExpectedTypePrefix = pre =:= testedBinderType.prefix val testedPrefixAndExpectedPrefixAreStaticallyIdentical: Boolean = { - val freshPrefix = pre match { + def check(freshPrefix: Type): Boolean = { + val expectedTpFromFreshPrefix = TypeRef(freshPrefix, sym, args) + val baseTypeFromFreshPrefix = expectedTpFromFreshPrefix.baseType(testedBinderClass) + freshPrefix eq baseTypeFromFreshPrefix.prefix + } + pre match { case ThisType(thissym) => - ThisType(thissym.cloneSymbol(thissym.owner)) + check(ThisType(thissym.cloneSymbol(thissym.owner))) case _ => - val preSym = pre.termSymbol.orElse(pre.typeSymbol) - val freshPreSym = preSym.cloneSymbol(preSym.owner).setInfo(preSym.info) - singleType(pre.prefix, freshPreSym) + pre.termSymbol match { + case NoSymbol => false + case preSym => + val freshPreSym = preSym.cloneSymbol(preSym.owner).setInfo(preSym.info) + check(singleType(pre.prefix, freshPreSym)) + } } - val expectedTpFromFreshPrefix = TypeRef(freshPrefix, sym, args) - val baseTypeFromFreshPrefix = expectedTpFromFreshPrefix.baseType(testedBinderClass) - freshPrefix eq baseTypeFromFreshPrefix.prefix + } testedPrefixAndExpectedPrefixAreStaticallyIdentical && testedPrefixIsExpectedTypePrefix case _ => From cbb9c5ef7c6083b7af7ef7d5a55a7fd7fc2b8217 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Thu, 13 May 2021 14:46:01 +1000 Subject: [PATCH 203/769] Remove obsolete build config from benchmarks Should have been removed in b18bdddde0357158796dfbfd77581f7cb98e20e8 --- test/benchmarks/project/build.properties | 1 - test/benchmarks/project/plugins.sbt | 1 - 2 files changed, 2 deletions(-) delete mode 100644 test/benchmarks/project/build.properties delete mode 100644 test/benchmarks/project/plugins.sbt diff --git a/test/benchmarks/project/build.properties b/test/benchmarks/project/build.properties deleted file mode 100644 index 0837f7a132de..000000000000 --- a/test/benchmarks/project/build.properties +++ /dev/null @@ -1 +0,0 @@ -sbt.version=1.3.13 diff --git a/test/benchmarks/project/plugins.sbt b/test/benchmarks/project/plugins.sbt deleted file mode 100644 index b57429f738ec..000000000000 --- a/test/benchmarks/project/plugins.sbt +++ /dev/null @@ -1 +0,0 @@ -addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.2.27") From 1b69c135192ae1c712bea039ced71ca6904a34c5 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Thu, 13 May 2021 15:33:03 +1000 Subject: [PATCH 204/769] Upgrade to sbt-jmh 1.4.2 --- project/plugins.sbt | 2 +- src/intellij/scala.ipr.SAMPLE | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index b032af93a8c7..17b1a733e101 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -32,6 +32,6 @@ concurrentRestrictions in Global := Seq( addSbtPlugin("de.heikoseeberger" % "sbt-header" % "5.0.0") -addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.2.27") +addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.4.2") addSbtPlugin("com.lightbend" % "sbt-whitesource" % "0.1.16") diff --git a/src/intellij/scala.ipr.SAMPLE b/src/intellij/scala.ipr.SAMPLE index d96540ba59a8..9fafee581e74 100644 --- a/src/intellij/scala.ipr.SAMPLE +++ b/src/intellij/scala.ipr.SAMPLE @@ -241,7 +241,7 @@ - + @@ -489,7 +489,7 @@ - + From 681cb9d0d6f099cb0bbf9de18c0bcbaf485da627 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Mon, 1 Oct 2018 15:09:57 +1000 Subject: [PATCH 205/769] [backport] Documentation and automation for using JITWatch to analyse JMH benchmarks Cherry picked from a2de76f71308ededd47455bafd6d05f45182b45e --- build.sbt | 2 +- project/JitWatch.scala | 88 +++++++++++++++++++++++++++++++++++++++ test/benchmarks/README.md | 61 +++++++++++++++++++++++++++ 3 files changed, 150 insertions(+), 1 deletion(-) create mode 100644 project/JitWatch.scala diff --git a/build.sbt b/build.sbt index 40a5311aff18..b951b11ca682 100644 --- a/build.sbt +++ b/build.sbt @@ -661,7 +661,7 @@ lazy val bench = project.in(file("test") / "benchmarks") libraryDependencies += "org.openjdk.jol" % "jol-core" % "0.6", compileOrder := CompileOrder.JavaThenScala, // to allow inlining from Java ("... is defined in a Java source (mixed compilation), no bytecode is available") scalacOptions ++= Seq("-feature", "-opt:l:inline", "-opt-inline-from:scala/**", "-opt-warnings"), - ) + ).settings(inConfig(JmhPlugin.JmhKeys.Jmh)(scalabuild.JitWatchFilePlugin.jitwatchSettings)) lazy val junit = project.in(file("test") / "junit") .dependsOn(library, reflect, compiler, partest, scaladoc) diff --git a/project/JitWatch.scala b/project/JitWatch.scala new file mode 100644 index 000000000000..7fdfb55813f9 --- /dev/null +++ b/project/JitWatch.scala @@ -0,0 +1,88 @@ +package scalabuild + +import java.io.FileWriter +import java.util.Properties + +import sbt._ +import Keys._ + +object JitWatchFilePlugin extends AutoPlugin { + override def trigger = allRequirements + override def requires = sbt.plugins.JvmPlugin + val jitwatchConfigFileContents = taskKey[Properties]("Contents of file suitable for `jitwatch/launchUI -Djitwatch.config.file=jitwatch.properties`") + val jitwatchConfigFile = taskKey[Unit]("file suitable for `jitwatch/launchUI.sh -Djitwatch.config.file=jitwatch.properties`") + + override lazy val projectSettings = List(Compile, Test).flatMap(c => inConfig(c)(jitwatchSettings)) + + def jitwatchSettings: Seq[Setting[_]] = Seq( + jitwatchConfigFileContents := { + val sourcesValue = sources.value + val depdependencyClasspathValue = dependencyClasspath.value ++ internalDependencyClasspath.value + val props = new java.util.Properties + val classpathString = (classDirectory.value +: depdependencyClasspathValue.map(_.data.toString)).mkString(",") + val artifacts: Seq[Artifact] = depdependencyClasspathValue.flatMap(_.get(Keys.artifact.key)) + val dependencyModuleIds: Set[ModuleID] = depdependencyClasspathValue.flatMap(_.get(Keys.moduleID.key)).toSet + props.put("Classes", classpathString) + + // JDK sources from $JAVA_HOME/src.zip + val javaHomeSrc = { + val javaDir = javaHome.value.getOrElse(new File(System.getProperty("java.home"))) + val src1 = javaDir / "src.zip" + val src2 = javaDir.getParentFile / "src.zip" + if (src1.exists()) src1 else src2 + } + + // Transitive sources from the projects that contribute to this classpath. + val projects: Seq[ProjectRef] = buildDependencies.value.classpathTransitiveRefs(thisProjectRef.value) :+ thisProjectRef.value + val projectArtifacts: Map[ProjectRef, Seq[Artifact]] = projects.map(project => (project -> (Keys.artifacts in project get settingsData.value).getOrElse(Nil))).toMap + val artifactNameToProject: Map[String, Seq[ProjectRef]] = projects.groupBy(project => (Keys.name in project get settingsData.value).getOrElse("")) + val transitiveSourceDirectories = projects.flatMap { project => + val projectArtifacts: Seq[Artifact] = (Keys.artifacts in project get settingsData.value).getOrElse(Nil) + val matching = projectArtifacts.filter(artifacts.contains(_)) + val configs = matching.flatMap(artifact => artifact.configurations).distinct + val sourceDirectories: Seq[File] = configs.flatMap { configRef => + (Keys.sourceDirectories in project in sbt.Configuration.of(configRef.name.capitalize, configRef.name)).get(settingsData.value).toList.flatten + } + sourceDirectories + }.distinct + val transitiveSourceDirectories2 = artifacts.flatMap { artifact => + val projects = artifactNameToProject.getOrElse(artifact.name, Nil) + projects.flatMap { project: ProjectRef => + val configs = artifact.configurations + val sourceDirectories: Seq[File] = configs.toList.flatMap { configRef => + (Keys.sourceDirectories in project in sbt.Configuration.of(configRef.name.capitalize, configRef.name)).get(settingsData.value).toList.flatten + } + sourceDirectories + } + } + + // Download and add transitive sources from the classpath + val classiferArtifacts: Seq[(ModuleID, Artifact, File)] = updateClassifiers.value.configurations.flatMap(_.details.flatMap(_.modules.flatMap(report => report.artifacts.map(x => (report.module, x._1, x._2))))) + val sourceClassiferArtifacts = classiferArtifacts.filter(tuple => tuple._2.classifier == Some("sources") && dependencyModuleIds.contains(tuple._1)) + + val externalSources = sourceClassiferArtifacts.map(_._3) + val internalAndExternalSources = (sourceDirectory.value +: javaHomeSrc +: (transitiveSourceDirectories ++ transitiveSourceDirectories2).distinct) ++ externalSources + props.put("Sources", internalAndExternalSources.map(_.getAbsolutePath).mkString(",")) + val baseDir = baseDirectory.value + val lastLogDir = Keys.forkOptions.value.workingDirectory match { + case Some(dir) => dir + case _=> baseDir + } + props.put("LastLogDir", lastLogDir.getAbsolutePath) + props + }, + + jitwatchConfigFile := { + val f = target.value / ("jitwatch-" + configuration.value.name + ".properties") + val contents = jitwatchConfigFileContents.value + val log = streams.value.log + val fw = new FileWriter(f) + try { + jitwatchConfigFileContents.value.store(fw, null) + log.info(s"./launchUI.sh -Djitwatch.config.file=" + f.getAbsolutePath) + } finally { + fw.close() + } + } + ) +} diff --git a/test/benchmarks/README.md b/test/benchmarks/README.md index 994297110f20..f1815a24daa8 100644 --- a/test/benchmarks/README.md +++ b/test/benchmarks/README.md @@ -72,6 +72,9 @@ For an Oracle (or other compatible) JVM not set up by your distribution, you may also need to copy or link the disassembler library to the `jre/lib/`_`architecture`_ directory inside your JVM installation directory. +The JITWatch project has [hsdis build instructions](https://github.com/AdoptOpenJDK/jitwatch/wiki/Building-hsdis). +One way to obtain HSDIS is to use [the binaries](https://lafo.ssw.uni-linz.ac.at/pub/graal-external-deps/hsdis/intel/) which are used in the [Graal build](https://github.com/oracle/graal/blob/master/compiler/mx.compiler/mx_graal_tools.py#L94-L119). + To show the assembly code corresponding to the code generated by the JIT compiler for specific methods, add `-XX:CompileCommand=print,scala.collection.mutable.OpenHashMap::*`, for example, to show all of the methods in the `scala.collection.mutable.OpenHashMap` class. @@ -79,6 +82,64 @@ for example, to show all of the methods in the `scala.collection.mutable.OpenHas To show it for _all_ methods, add `-XX:+PrintAssembly`. (This is usually excessive.) +### Using JITWatch + +[JITWatch](https://github.com/AdoptOpenJDK/jitwatch) is useful to understand how the JVM has JIT compiled +code. + +If you install `hsdis`, as described above, machine code disassembly is also created. + +You can generate the `hotspot.log` file for a benchmark run by adding the [required JVM options](https://github.com/AdoptOpenJDK/jitwatch/wiki/Building-hsdis) +to JMH benchmark execution: + +``` +sbt:root> bench/jmh:run scala.collection.mutable.ArrayOpsBenchmark.insertInteger -psize=1000 -f1 -jvmArgs -XX:+UnlockDiagnosticVMOptions -jvmArgs -XX:+TraceClassLoading -jvmArgs -XX:+LogCompilation -jvmArgs -XX:LogFile=hotspot.log -jvmArgs -XX:+PrintAssembly +... +[info] Loaded disassembler from /Users/jz/.jabba/jdk/1.8.172/Contents/Home/jre/lib/hsdis-amd64.dylib +[info] Decoding compiled method 0x0000000113f60bd0: +[info] Code: +[info] [Disassembling for mach='i386:x86-64'] +[info] [Entry Point] +[info] [Constants] +[info] # {method} {0x000000010ffa0000} 'hashCode' '()I' in 'java/lang/String' +[info] # [sp+0x40] (sp of caller) +[info] 0x0000000113f60d40: mov r10d,DWORD PTR [rsi+0x8] +[info] 0x0000000113f60d44: shl r10,0x3 +... +[info] # Run complete. Total time: 00:00:30 +[info] Benchmark (size) Mode Cnt Score Error Units +[info] ArrayOpsBenchmark.insertInteger 1000 avgt 10 188199.582 ± 5930.520 ns/op +``` + +JITWatch requires configuration of the class and source path. We can generate that with a custom +task in our build: + +``` +sbt> bench/jmh:jitwatchConfigFile +[info] Resolving jline#jline;2.14.6 ... +jmh +[info] ./launchUI.sh -Djitwatch.config.file=/Users/jz/code/scala/test/benchmarks/target/jitwatch-jmh.properties + +sbt> ^C +``` + +Build jitwatch. + +``` +$ git clone https://github.com/AdoptOpenJDK/jitwatch +$ cd jitwatch +$ mvn install +``` + +Launch with the generated config file. +``` +$ ./launchUI.sh -Djitwatch.config.file=/Users/jz/code/scala/test/benchmarks/target/jitwatch-jmh.properties +``` + + + +Select the generated `hotspot.log`, `start`, and then browse the the benchmark to start gleaning insights! + ## Useful reading * [OpenJDK advice on microbenchmarks](https://wiki.openjdk.java.net/display/HotSpot/MicroBenchmarks) * Brian Goetz's "Java theory and practice" articles: From f98247a17f9a4ea853f7ffeaca4dbd6d8fb9f75f Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Fri, 26 Apr 2019 09:44:08 +1000 Subject: [PATCH 206/769] [backport] Update instructions an automation for using JITWatch for benchmark analysis - Fix source directories of the benchmarks themselves in the generated jitwatch.properties - Tell JITWatch where hotspot.log is located (see https://github.com/AdoptOpenJDK/jitwatch/issues/302) - Advise use of `mvn:exec java` rather than `launchUI.sh`. This should be cross-platform and handles compilation and execution in one command. - Move some instructions out of README.md and into the output of the jitwatchConfig task. - git ignore temp file generated in the working directory when following these instructions. Cherry picked from 8e4ddcc3f9c292be01e2498a8e4193f9eb799b33 --- .gitignore | 1 + project/JitWatch.scala | 16 ++++++++++------ test/benchmarks/README.md | 14 ++++++++------ 3 files changed, 19 insertions(+), 12 deletions(-) diff --git a/.gitignore b/.gitignore index 6bc73ba1261a..bda4862d1afe 100644 --- a/.gitignore +++ b/.gitignore @@ -57,3 +57,4 @@ /project/project/project/target/ /build-sbt/ local.sbt +jitwatch.out \ No newline at end of file diff --git a/project/JitWatch.scala b/project/JitWatch.scala index 7fdfb55813f9..8bd483cc618f 100644 --- a/project/JitWatch.scala +++ b/project/JitWatch.scala @@ -61,7 +61,7 @@ object JitWatchFilePlugin extends AutoPlugin { val sourceClassiferArtifacts = classiferArtifacts.filter(tuple => tuple._2.classifier == Some("sources") && dependencyModuleIds.contains(tuple._1)) val externalSources = sourceClassiferArtifacts.map(_._3) - val internalAndExternalSources = (sourceDirectory.value +: javaHomeSrc +: (transitiveSourceDirectories ++ transitiveSourceDirectories2).distinct) ++ externalSources + val internalAndExternalSources = sourceDirectories.value ++ (javaHomeSrc +: (transitiveSourceDirectories ++ transitiveSourceDirectories2).distinct) ++ externalSources props.put("Sources", internalAndExternalSources.map(_.getAbsolutePath).mkString(",")) val baseDir = baseDirectory.value val lastLogDir = Keys.forkOptions.value.workingDirectory match { @@ -73,16 +73,20 @@ object JitWatchFilePlugin extends AutoPlugin { }, jitwatchConfigFile := { - val f = target.value / ("jitwatch-" + configuration.value.name + ".properties") - val contents = jitwatchConfigFileContents.value + val jitwatchProps = target.value / ("jitwatch-" + configuration.value.name + ".properties") + val hotSpotLog = target.value / "hotspot.log" val log = streams.value.log - val fw = new FileWriter(f) + val fw = new FileWriter(jitwatchProps) try { jitwatchConfigFileContents.value.store(fw, null) - log.info(s"./launchUI.sh -Djitwatch.config.file=" + f.getAbsolutePath) + // TODO figure out the last benchmark that was run and focus the UI on that member with: -Djitwatch.focus.member="scala/collection/mutable/ArrayOpsBenchmark insertInteger (Lorg/openjdk/jmh/infra/Blackhole;)V" + log.info(s"^-- UNRESOLVED DEPENDENCIES warnings above are normal, please ignore") + log.info("After cloning https://github.com/AdoptOpenJDK/jitwatch to $JITWATCH_HOME, compile and launch with:") + log.info(s"mvn -f $$JITWATCH_HOME clean compile exec:java -Djitwatch.config.file=${jitwatchProps.getAbsolutePath} -Djitwatch.logfile=${hotSpotLog.getAbsolutePath}") + log.info("Note: Add, for example, `-Djitwatch.focus.member=\"scala/collection/mutable/ArrayOpsBenchmark insertInteger (Lorg/openjdk/jmh/infra/Blackhole;)V\"` to focus UI on a method of interest on startup.") } finally { fw.close() } - } + } ) } diff --git a/test/benchmarks/README.md b/test/benchmarks/README.md index f1815a24daa8..1c3cbee79f9d 100644 --- a/test/benchmarks/README.md +++ b/test/benchmarks/README.md @@ -93,7 +93,7 @@ You can generate the `hotspot.log` file for a benchmark run by adding the [requi to JMH benchmark execution: ``` -sbt:root> bench/jmh:run scala.collection.mutable.ArrayOpsBenchmark.insertInteger -psize=1000 -f1 -jvmArgs -XX:+UnlockDiagnosticVMOptions -jvmArgs -XX:+TraceClassLoading -jvmArgs -XX:+LogCompilation -jvmArgs -XX:LogFile=hotspot.log -jvmArgs -XX:+PrintAssembly +sbt:root> bench/jmh:run scala.collection.mutable.ArrayOpsBenchmark.insertInteger -psize=1000 -f1 -jvmArgs -XX:+UnlockDiagnosticVMOptions -jvmArgs -XX:+TraceClassLoading -jvmArgs -XX:+LogCompilation -jvmArgs -XX:LogFile=target/hotspot.log -jvmArgs -XX:+PrintAssembly ... [info] Loaded disassembler from /Users/jz/.jabba/jdk/1.8.172/Contents/Home/jre/lib/hsdis-amd64.dylib [info] Decoding compiled method 0x0000000113f60bd0: @@ -111,15 +111,17 @@ sbt:root> bench/jmh:run scala.collection.mutable.ArrayOpsBenchmark.insertInteger [info] ArrayOpsBenchmark.insertInteger 1000 avgt 10 188199.582 ± 5930.520 ns/op ``` -JITWatch requires configuration of the class and source path. We can generate that with a custom -task in our build: +JITWatch requires configuration of the class and source path. We generate that with a custom task in our build: ``` sbt> bench/jmh:jitwatchConfigFile [info] Resolving jline#jline;2.14.6 ... jmh -[info] ./launchUI.sh -Djitwatch.config.file=/Users/jz/code/scala/test/benchmarks/target/jitwatch-jmh.properties - +... +[info] ^-- UNRESOLVED DEPENDENCIES warnings above are normal, please ignore +[info] After cloning https://github.com/AdoptOpenJDK/jitwatch to $JITWATCH_HOME, compile and launch with: +[info] mvn -f $JITWATCH_HOME clean compile exec:java -Djitwatch.config.file=/Users/jz/code/scala/test/benchmarks/target/jitwatch-compile.properties -Djitwatch.logfile=/Users/jz/code/scala/test/benchmarks/target/hotspot.log +[info] Note: Add, for example, `-Djitwatch.focus.member="scala/collection/mutable/ArrayOpsBenchmark insertInteger (Lorg/openjdk/jmh/infra/Blackhole;)V"` to focus UI on a method of interest. sbt> ^C ``` @@ -138,7 +140,7 @@ $ ./launchUI.sh -Djitwatch.config.file=/Users/jz/code/scala/test/benchmarks/targ -Select the generated `hotspot.log`, `start`, and then browse the the benchmark to start gleaning insights! +Select the generated `hotspot.log`, `start`, and then browse the benchmark to start gleaning insights! ## Useful reading * [OpenJDK advice on microbenchmarks](https://wiki.openjdk.java.net/display/HotSpot/MicroBenchmarks) From a554fc0a6195aaf84aaada72b70e8a9c058542dd Mon Sep 17 00:00:00 2001 From: Anatolii Kmetiuk Date: Thu, 13 May 2021 17:31:42 +0200 Subject: [PATCH 207/769] Upgrade Dotty to 3.0.0 --- project/DottySupport.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/DottySupport.scala b/project/DottySupport.scala index 94c29eed0701..cd503c780b82 100644 --- a/project/DottySupport.scala +++ b/project/DottySupport.scala @@ -22,7 +22,7 @@ object TastySupport { * Dotty in .travis.yml. */ object DottySupport { - val dottyVersion = "3.0.0-RC3" + val dottyVersion = "3.0.0" val compileWithDotty: Boolean = Option(System.getProperty("scala.build.compileWithDotty")).map(_.toBoolean).getOrElse(false) lazy val commonSettings = Seq( From 855b47f4eab628816ac1b83581aa2b33a6de9735 Mon Sep 17 00:00:00 2001 From: Dale Wijnand Date: Thu, 13 May 2021 17:42:33 +0100 Subject: [PATCH 208/769] Add a benchmark on pattern matching vs alternatives MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit [info] Benchmark (numCases) Mode Cnt Score Error Units [info] ClassMatchBenchmark.justClassValueLookup 4 avgt 30 5.806 ± 0.021 ns/op [info] ClassMatchBenchmark.patmatShow 4 avgt 30 9.581 ± 0.085 ns/op [info] ClassMatchBenchmark.virtualShow 4 avgt 30 12.128 ± 0.115 ns/op [info] ClassMatchBenchmark.intSwitchShow 4 avgt 30 12.306 ± 0.301 ns/op [info] ClassMatchBenchmark.classNameHashSwitchShow 4 avgt 30 14.725 ± 0.259 ns/op [info] ClassMatchBenchmark.classValueShow 4 avgt 30 20.391 ± 0.099 ns/op [info] ClassMatchBenchmark.justClassValueLookup 8 avgt 30 5.804 ± 0.026 ns/op [info] ClassMatchBenchmark.patmatShow 8 avgt 30 11.553 ± 0.230 ns/op [info] ClassMatchBenchmark.virtualShow 8 avgt 30 13.454 ± 0.129 ns/op [info] ClassMatchBenchmark.intSwitchShow 8 avgt 30 13.600 ± 0.366 ns/op [info] ClassMatchBenchmark.classNameHashSwitchShow 8 avgt 30 17.978 ± 0.143 ns/op [info] ClassMatchBenchmark.classValueShow 8 avgt 30 22.896 ± 0.070 ns/op [info] ClassMatchBenchmark.justClassValueLookup 16 avgt 30 5.894 ± 0.035 ns/op [info] ClassMatchBenchmark.patmatShow 16 avgt 30 13.679 ± 0.349 ns/op [info] ClassMatchBenchmark.virtualShow 16 avgt 30 14.063 ± 0.076 ns/op [info] ClassMatchBenchmark.intSwitchShow 16 avgt 30 14.263 ± 0.393 ns/op [info] ClassMatchBenchmark.classNameHashSwitchShow 16 avgt 30 21.181 ± 0.113 ns/op [info] ClassMatchBenchmark.classValueShow 16 avgt 30 24.096 ± 0.081 ns/op [info] ClassMatchBenchmark.justClassValueLookup 32 avgt 30 6.057 ± 0.032 ns/op [info] ClassMatchBenchmark.intSwitchShow 32 avgt 30 14.539 ± 0.392 ns/op [info] ClassMatchBenchmark.virtualShow 32 avgt 30 15.321 ± 0.081 ns/op [info] ClassMatchBenchmark.patmatShow 32 avgt 30 16.044 ± 0.373 ns/op [info] ClassMatchBenchmark.classNameHashSwitchShow 32 avgt 30 21.954 ± 0.105 ns/op [info] ClassMatchBenchmark.classValueShow 32 avgt 30 24.795 ± 0.096 ns/op [info] ClassMatchBenchmark.justClassValueLookup 64 avgt 30 6.913 ± 0.033 ns/op [info] ClassMatchBenchmark.intSwitchShow 64 avgt 30 14.969 ± 0.377 ns/op [info] ClassMatchBenchmark.virtualShow 64 avgt 30 17.153 ± 0.094 ns/op [info] ClassMatchBenchmark.patmatShow 64 avgt 30 20.411 ± 0.071 ns/op [info] ClassMatchBenchmark.classNameHashSwitchShow 64 avgt 30 23.116 ± 0.094 ns/op [info] ClassMatchBenchmark.classValueShow 64 avgt 30 25.899 ± 0.181 ns/op [info] ClassMatchBenchmark.justClassValueLookup 128 avgt 30 9.092 ± 0.033 ns/op [info] ClassMatchBenchmark.intSwitchShow 128 avgt 30 15.814 ± 0.354 ns/op [info] ClassMatchBenchmark.virtualShow 128 avgt 30 18.603 ± 0.104 ns/op [info] ClassMatchBenchmark.classValueShow 128 avgt 30 27.443 ± 0.095 ns/op [info] ClassMatchBenchmark.classNameHashSwitchShow 128 avgt 30 27.656 ± 0.111 ns/op [info] ClassMatchBenchmark.patmatShow 128 avgt 30 31.792 ± 0.095 ns/op [info] ClassMatchBenchmark.justClassValueLookup 256 avgt 30 11.961 ± 0.342 ns/op [info] ClassMatchBenchmark.intSwitchShow 256 avgt 30 16.278 ± 0.271 ns/op [info] ClassMatchBenchmark.virtualShow 256 avgt 30 20.247 ± 0.126 ns/op [info] ClassMatchBenchmark.classValueShow 256 avgt 30 30.162 ± 0.136 ns/op [info] ClassMatchBenchmark.classNameHashSwitchShow 256 avgt 30 31.498 ± 0.127 ns/op [info] ClassMatchBenchmark.patmatShow 256 avgt 30 56.949 ± 0.206 ns/op --- .../patmat/ClassMatchBenchmark.scala | 1127 +++++++++++++++++ 1 file changed, 1127 insertions(+) create mode 100644 test/benchmarks/src/main/scala/scala/tools/nsc/transform/patmat/ClassMatchBenchmark.scala diff --git a/test/benchmarks/src/main/scala/scala/tools/nsc/transform/patmat/ClassMatchBenchmark.scala b/test/benchmarks/src/main/scala/scala/tools/nsc/transform/patmat/ClassMatchBenchmark.scala new file mode 100644 index 000000000000..fd1f2c681239 --- /dev/null +++ b/test/benchmarks/src/main/scala/scala/tools/nsc/transform/patmat/ClassMatchBenchmark.scala @@ -0,0 +1,1127 @@ +package scala.tools.nsc.transform.patmat + +import java.util.concurrent.TimeUnit +import org.openjdk.jmh.annotations.CompilerControl.Mode.DONT_INLINE +import org.openjdk.jmh.annotations._ +import org.openjdk.jmh.infra.Blackhole + +import scala.annotation.switch +import scala.util.Random + +@Warmup(iterations = 10, time = 1, timeUnit = TimeUnit.SECONDS) +@Measurement(iterations = 10, time = 1, timeUnit = TimeUnit.SECONDS) +@Fork(3) +@BenchmarkMode(Array(Mode.AverageTime)) +@OutputTimeUnit(TimeUnit.NANOSECONDS) +@State(Scope.Benchmark) +class ClassMatchBenchmark { + private final val count = 10000 + @Param(Array("4", "8", "16", "32", "64", "128", "256")) private var numCases = 0 + + private var names: Array[Name] = null + private var classValue: ClassValue[Int] = null + + @Setup def setup(): Unit = { + val r = new Random(12345) + val names = Array[Name]( + Name0(), Name1(), Name2(), Name3(), Name4(), Name5(), Name6(), Name7(), Name8(), Name9(), + Name10(), Name11(), Name12(), Name13(), Name14(), Name15(), Name16(), Name17(), Name18(), Name19(), + Name20(), Name21(), Name22(), Name23(), Name24(), Name25(), Name26(), Name27(), Name28(), Name29(), + Name30(), Name31(), Name32(), Name33(), Name34(), Name35(), Name36(), Name37(), Name38(), Name39(), + Name40(), Name41(), Name42(), Name43(), Name44(), Name45(), Name46(), Name47(), Name48(), Name49(), + Name50(), Name51(), Name52(), Name53(), Name54(), Name55(), Name56(), Name57(), Name58(), Name59(), + Name60(), Name61(), Name62(), Name63(), Name64(), Name65(), Name66(), Name67(), Name68(), Name69(), + Name70(), Name71(), Name72(), Name73(), Name74(), Name75(), Name76(), Name77(), Name78(), Name79(), + Name80(), Name81(), Name82(), Name83(), Name84(), Name85(), Name86(), Name87(), Name88(), Name89(), + Name90(), Name91(), Name92(), Name93(), Name94(), Name95(), Name96(), Name97(), Name98(), Name99(), + Name100(), Name101(), Name102(), Name103(), Name104(), Name105(), Name106(), Name107(), Name108(), Name109(), + Name110(), Name111(), Name112(), Name113(), Name114(), Name115(), Name116(), Name117(), Name118(), Name119(), + Name120(), Name121(), Name122(), Name123(), Name124(), Name125(), Name126(), Name127(), Name128(), Name129(), + Name130(), Name131(), Name132(), Name133(), Name134(), Name135(), Name136(), Name137(), Name138(), Name139(), + Name140(), Name141(), Name142(), Name143(), Name144(), Name145(), Name146(), Name147(), Name148(), Name149(), + Name150(), Name151(), Name152(), Name153(), Name154(), Name155(), Name156(), Name157(), Name158(), Name159(), + Name160(), Name161(), Name162(), Name163(), Name164(), Name165(), Name166(), Name167(), Name168(), Name169(), + Name170(), Name171(), Name172(), Name173(), Name174(), Name175(), Name176(), Name177(), Name178(), Name179(), + Name180(), Name181(), Name182(), Name183(), Name184(), Name185(), Name186(), Name187(), Name188(), Name189(), + Name190(), Name191(), Name192(), Name193(), Name194(), Name195(), Name196(), Name197(), Name198(), Name199(), + Name200(), Name201(), Name202(), Name203(), Name204(), Name205(), Name206(), Name207(), Name208(), Name209(), + Name210(), Name211(), Name212(), Name213(), Name214(), Name215(), Name216(), Name217(), Name218(), Name219(), + Name220(), Name221(), Name222(), Name223(), Name224(), Name225(), Name226(), Name227(), Name228(), Name229(), + Name230(), Name231(), Name232(), Name233(), Name234(), Name235(), Name236(), Name237(), Name238(), Name239(), + Name240(), Name241(), Name242(), Name243(), Name244(), Name245(), Name246(), Name247(), Name248(), Name249(), + Name250(), Name251(), Name252(), Name253(), Name254(), Name255(), + ) + this.names = Array.fill(count)(names(r.nextInt(numCases))) + this.classValue = new NameClassValue + } + + @Benchmark @OperationsPerInvocation(count) def patmatShow(bh: Blackhole): Unit = { + val names = this.names + var i = 0 + while (i < names.length) { + val x = names(i) match { + case Name0() => "0" case Name1() => "1" case Name2() => "2" case Name3() => "3" case Name4() => "4" + case Name5() => "5" case Name6() => "6" case Name7() => "7" case Name8() => "8" case Name9() => "9" + case Name10() => "10" case Name11() => "11" case Name12() => "12" case Name13() => "13" case Name14() => "14" + case Name15() => "15" case Name16() => "16" case Name17() => "17" case Name18() => "18" case Name19() => "19" + case Name20() => "20" case Name21() => "21" case Name22() => "22" case Name23() => "23" case Name24() => "24" + case Name25() => "25" case Name26() => "26" case Name27() => "27" case Name28() => "28" case Name29() => "29" + case Name30() => "30" case Name31() => "31" case Name32() => "32" case Name33() => "33" case Name34() => "34" + case Name35() => "35" case Name36() => "36" case Name37() => "37" case Name38() => "38" case Name39() => "39" + case Name40() => "40" case Name41() => "41" case Name42() => "42" case Name43() => "43" case Name44() => "44" + case Name45() => "45" case Name46() => "46" case Name47() => "47" case Name48() => "48" case Name49() => "49" + case Name50() => "50" case Name51() => "51" case Name52() => "52" case Name53() => "53" case Name54() => "54" + case Name55() => "55" case Name56() => "56" case Name57() => "57" case Name58() => "58" case Name59() => "59" + case Name60() => "60" case Name61() => "61" case Name62() => "62" case Name63() => "63" case Name64() => "64" + case Name65() => "65" case Name66() => "66" case Name67() => "67" case Name68() => "68" case Name69() => "69" + case Name70() => "70" case Name71() => "71" case Name72() => "72" case Name73() => "73" case Name74() => "74" + case Name75() => "75" case Name76() => "76" case Name77() => "77" case Name78() => "78" case Name79() => "79" + case Name80() => "80" case Name81() => "81" case Name82() => "82" case Name83() => "83" case Name84() => "84" + case Name85() => "85" case Name86() => "86" case Name87() => "87" case Name88() => "88" case Name89() => "89" + case Name90() => "90" case Name91() => "91" case Name92() => "92" case Name93() => "93" case Name94() => "94" + case Name95() => "95" case Name96() => "96" case Name97() => "97" case Name98() => "98" case Name99() => "99" + case Name100() => "100" case Name101() => "101" case Name102() => "102" case Name103() => "103" case Name104() => "104" + case Name105() => "105" case Name106() => "106" case Name107() => "107" case Name108() => "108" case Name109() => "109" + case Name110() => "110" case Name111() => "111" case Name112() => "112" case Name113() => "113" case Name114() => "114" + case Name115() => "115" case Name116() => "116" case Name117() => "117" case Name118() => "118" case Name119() => "119" + case Name120() => "120" case Name121() => "121" case Name122() => "122" case Name123() => "123" case Name124() => "124" + case Name125() => "125" case Name126() => "126" case Name127() => "127" case Name128() => "128" case Name129() => "129" + case Name130() => "130" case Name131() => "131" case Name132() => "132" case Name133() => "133" case Name134() => "134" + case Name135() => "135" case Name136() => "136" case Name137() => "137" case Name138() => "138" case Name139() => "139" + case Name140() => "140" case Name141() => "141" case Name142() => "142" case Name143() => "143" case Name144() => "144" + case Name145() => "145" case Name146() => "146" case Name147() => "147" case Name148() => "148" case Name149() => "149" + case Name150() => "150" case Name151() => "151" case Name152() => "152" case Name153() => "153" case Name154() => "154" + case Name155() => "155" case Name156() => "156" case Name157() => "157" case Name158() => "158" case Name159() => "159" + case Name160() => "160" case Name161() => "161" case Name162() => "162" case Name163() => "163" case Name164() => "164" + case Name165() => "165" case Name166() => "166" case Name167() => "167" case Name168() => "168" case Name169() => "169" + case Name170() => "170" case Name171() => "171" case Name172() => "172" case Name173() => "173" case Name174() => "174" + case Name175() => "175" case Name176() => "176" case Name177() => "177" case Name178() => "178" case Name179() => "179" + case Name180() => "180" case Name181() => "181" case Name182() => "182" case Name183() => "183" case Name184() => "184" + case Name185() => "185" case Name186() => "186" case Name187() => "187" case Name188() => "188" case Name189() => "189" + case Name190() => "190" case Name191() => "191" case Name192() => "192" case Name193() => "193" case Name194() => "194" + case Name195() => "195" case Name196() => "196" case Name197() => "197" case Name198() => "198" case Name199() => "199" + case Name200() => "200" case Name201() => "201" case Name202() => "202" case Name203() => "203" case Name204() => "204" + case Name205() => "205" case Name206() => "206" case Name207() => "207" case Name208() => "208" case Name209() => "209" + case Name210() => "210" case Name211() => "211" case Name212() => "212" case Name213() => "213" case Name214() => "214" + case Name215() => "215" case Name216() => "216" case Name217() => "217" case Name218() => "218" case Name219() => "219" + case Name220() => "220" case Name221() => "221" case Name222() => "222" case Name223() => "223" case Name224() => "224" + case Name225() => "225" case Name226() => "226" case Name227() => "227" case Name228() => "228" case Name229() => "229" + case Name230() => "230" case Name231() => "231" case Name232() => "232" case Name233() => "233" case Name234() => "234" + case Name235() => "235" case Name236() => "236" case Name237() => "237" case Name238() => "238" case Name239() => "239" + case Name240() => "240" case Name241() => "241" case Name242() => "242" case Name243() => "243" case Name244() => "244" + case Name245() => "245" case Name246() => "246" case Name247() => "247" case Name248() => "248" case Name249() => "249" + case Name250() => "250" case Name251() => "251" case Name252() => "252" case Name253() => "253" case Name254() => "254" + case Name255() => "255" + } + bh.consume(x) + i += 1 + } + } + + @Benchmark @OperationsPerInvocation(count) def virtualShow(bh: Blackhole): Unit = { + val names = this.names + var i = 0 + while (i < names.length) { + bh.consume(names(i).virtualShow) + i += 1 + } + } + + @Benchmark @OperationsPerInvocation(count) def intSwitchShow(bh: Blackhole): Unit = { + val names = this.names + var i = 0 + while (i < names.length) { + val x = (names(i)._id: @switch) match { + case 0 => "0" case 1 => "1" case 2 => "2" case 3 => "3" case 4 => "4" + case 5 => "5" case 6 => "6" case 7 => "7" case 8 => "8" case 9 => "9" + case 10 => "10" case 11 => "11" case 12 => "12" case 13 => "13" case 14 => "14" + case 15 => "15" case 16 => "16" case 17 => "17" case 18 => "18" case 19 => "19" + case 20 => "20" case 21 => "21" case 22 => "22" case 23 => "23" case 24 => "24" + case 25 => "25" case 26 => "26" case 27 => "27" case 28 => "28" case 29 => "29" + case 30 => "30" case 31 => "31" case 32 => "32" case 33 => "33" case 34 => "34" + case 35 => "35" case 36 => "36" case 37 => "37" case 38 => "38" case 39 => "39" + case 40 => "40" case 41 => "41" case 42 => "42" case 43 => "43" case 44 => "44" + case 45 => "45" case 46 => "46" case 47 => "47" case 48 => "48" case 49 => "49" + case 50 => "50" case 51 => "51" case 52 => "52" case 53 => "53" case 54 => "54" + case 55 => "55" case 56 => "56" case 57 => "57" case 58 => "58" case 59 => "59" + case 60 => "60" case 61 => "61" case 62 => "62" case 63 => "63" case 64 => "64" + case 65 => "65" case 66 => "66" case 67 => "67" case 68 => "68" case 69 => "69" + case 70 => "70" case 71 => "71" case 72 => "72" case 73 => "73" case 74 => "74" + case 75 => "75" case 76 => "76" case 77 => "77" case 78 => "78" case 79 => "79" + case 80 => "80" case 81 => "81" case 82 => "82" case 83 => "83" case 84 => "84" + case 85 => "85" case 86 => "86" case 87 => "87" case 88 => "88" case 89 => "89" + case 90 => "90" case 91 => "91" case 92 => "92" case 93 => "93" case 94 => "94" + case 95 => "95" case 96 => "96" case 97 => "97" case 98 => "98" case 99 => "99" + case 100 => "100" case 101 => "101" case 102 => "102" case 103 => "103" case 104 => "104" + case 105 => "105" case 106 => "106" case 107 => "107" case 108 => "108" case 109 => "109" + case 110 => "110" case 111 => "111" case 112 => "112" case 113 => "113" case 114 => "114" + case 115 => "115" case 116 => "116" case 117 => "117" case 118 => "118" case 119 => "119" + case 120 => "120" case 121 => "121" case 122 => "122" case 123 => "123" case 124 => "124" + case 125 => "125" case 126 => "126" case 127 => "127" case 128 => "128" case 129 => "129" + case 130 => "130" case 131 => "131" case 132 => "132" case 133 => "133" case 134 => "134" + case 135 => "135" case 136 => "136" case 137 => "137" case 138 => "138" case 139 => "139" + case 140 => "140" case 141 => "141" case 142 => "142" case 143 => "143" case 144 => "144" + case 145 => "145" case 146 => "146" case 147 => "147" case 148 => "148" case 149 => "149" + case 150 => "150" case 151 => "151" case 152 => "152" case 153 => "153" case 154 => "154" + case 155 => "155" case 156 => "156" case 157 => "157" case 158 => "158" case 159 => "159" + case 160 => "160" case 161 => "161" case 162 => "162" case 163 => "163" case 164 => "164" + case 165 => "165" case 166 => "166" case 167 => "167" case 168 => "168" case 169 => "169" + case 170 => "170" case 171 => "171" case 172 => "172" case 173 => "173" case 174 => "174" + case 175 => "175" case 176 => "176" case 177 => "177" case 178 => "178" case 179 => "179" + case 180 => "180" case 181 => "181" case 182 => "182" case 183 => "183" case 184 => "184" + case 185 => "185" case 186 => "186" case 187 => "187" case 188 => "188" case 189 => "189" + case 190 => "190" case 191 => "191" case 192 => "192" case 193 => "193" case 194 => "194" + case 195 => "195" case 196 => "196" case 197 => "197" case 198 => "198" case 199 => "199" + case 200 => "200" case 201 => "201" case 202 => "202" case 203 => "203" case 204 => "204" + case 205 => "205" case 206 => "206" case 207 => "207" case 208 => "208" case 209 => "209" + case 210 => "210" case 211 => "211" case 212 => "212" case 213 => "213" case 214 => "214" + case 215 => "215" case 216 => "216" case 217 => "217" case 218 => "218" case 219 => "219" + case 220 => "220" case 221 => "221" case 222 => "222" case 223 => "223" case 224 => "224" + case 225 => "225" case 226 => "226" case 227 => "227" case 228 => "228" case 229 => "229" + case 230 => "230" case 231 => "231" case 232 => "232" case 233 => "233" case 234 => "234" + case 235 => "235" case 236 => "236" case 237 => "237" case 238 => "238" case 239 => "239" + case 240 => "240" case 241 => "241" case 242 => "242" case 243 => "243" case 244 => "244" + case 245 => "245" case 246 => "246" case 247 => "247" case 248 => "248" case 249 => "249" + case 250 => "250" case 251 => "251" case 252 => "252" case 253 => "253" case 254 => "254" + case 255 => "255" + } + bh.consume(x) + i += 1 + } + } + + @Benchmark @OperationsPerInvocation(count) def justClassValueLookup(bh: Blackhole): Unit = { + val names = this.names + val classValue = this.classValue + var i = 0 + while (i < names.length) { + bh.consume(classValue.get(names(i).getClass)) + i += 1 + } + } + + @Benchmark @OperationsPerInvocation(count) def classValueShow(bh: Blackhole): Unit = { + val names = this.names + val classValue = this.classValue + var i = 0 + while (i < names.length) { + val x = (classValue.get(names(i).getClass): @switch) match { + case 0 => "0" case 1 => "1" case 2 => "2" case 3 => "3" case 4 => "4" + case 5 => "5" case 6 => "6" case 7 => "7" case 8 => "8" case 9 => "9" + case 10 => "10" case 11 => "11" case 12 => "12" case 13 => "13" case 14 => "14" + case 15 => "15" case 16 => "16" case 17 => "17" case 18 => "18" case 19 => "19" + case 20 => "20" case 21 => "21" case 22 => "22" case 23 => "23" case 24 => "24" + case 25 => "25" case 26 => "26" case 27 => "27" case 28 => "28" case 29 => "29" + case 30 => "30" case 31 => "31" case 32 => "32" case 33 => "33" case 34 => "34" + case 35 => "35" case 36 => "36" case 37 => "37" case 38 => "38" case 39 => "39" + case 40 => "40" case 41 => "41" case 42 => "42" case 43 => "43" case 44 => "44" + case 45 => "45" case 46 => "46" case 47 => "47" case 48 => "48" case 49 => "49" + case 50 => "50" case 51 => "51" case 52 => "52" case 53 => "53" case 54 => "54" + case 55 => "55" case 56 => "56" case 57 => "57" case 58 => "58" case 59 => "59" + case 60 => "60" case 61 => "61" case 62 => "62" case 63 => "63" case 64 => "64" + case 65 => "65" case 66 => "66" case 67 => "67" case 68 => "68" case 69 => "69" + case 70 => "70" case 71 => "71" case 72 => "72" case 73 => "73" case 74 => "74" + case 75 => "75" case 76 => "76" case 77 => "77" case 78 => "78" case 79 => "79" + case 80 => "80" case 81 => "81" case 82 => "82" case 83 => "83" case 84 => "84" + case 85 => "85" case 86 => "86" case 87 => "87" case 88 => "88" case 89 => "89" + case 90 => "90" case 91 => "91" case 92 => "92" case 93 => "93" case 94 => "94" + case 95 => "95" case 96 => "96" case 97 => "97" case 98 => "98" case 99 => "99" + case 100 => "100" case 101 => "101" case 102 => "102" case 103 => "103" case 104 => "104" + case 105 => "105" case 106 => "106" case 107 => "107" case 108 => "108" case 109 => "109" + case 110 => "110" case 111 => "111" case 112 => "112" case 113 => "113" case 114 => "114" + case 115 => "115" case 116 => "116" case 117 => "117" case 118 => "118" case 119 => "119" + case 120 => "120" case 121 => "121" case 122 => "122" case 123 => "123" case 124 => "124" + case 125 => "125" case 126 => "126" case 127 => "127" case 128 => "128" case 129 => "129" + case 130 => "130" case 131 => "131" case 132 => "132" case 133 => "133" case 134 => "134" + case 135 => "135" case 136 => "136" case 137 => "137" case 138 => "138" case 139 => "139" + case 140 => "140" case 141 => "141" case 142 => "142" case 143 => "143" case 144 => "144" + case 145 => "145" case 146 => "146" case 147 => "147" case 148 => "148" case 149 => "149" + case 150 => "150" case 151 => "151" case 152 => "152" case 153 => "153" case 154 => "154" + case 155 => "155" case 156 => "156" case 157 => "157" case 158 => "158" case 159 => "159" + case 160 => "160" case 161 => "161" case 162 => "162" case 163 => "163" case 164 => "164" + case 165 => "165" case 166 => "166" case 167 => "167" case 168 => "168" case 169 => "169" + case 170 => "170" case 171 => "171" case 172 => "172" case 173 => "173" case 174 => "174" + case 175 => "175" case 176 => "176" case 177 => "177" case 178 => "178" case 179 => "179" + case 180 => "180" case 181 => "181" case 182 => "182" case 183 => "183" case 184 => "184" + case 185 => "185" case 186 => "186" case 187 => "187" case 188 => "188" case 189 => "189" + case 190 => "190" case 191 => "191" case 192 => "192" case 193 => "193" case 194 => "194" + case 195 => "195" case 196 => "196" case 197 => "197" case 198 => "198" case 199 => "199" + case 200 => "200" case 201 => "201" case 202 => "202" case 203 => "203" case 204 => "204" + case 205 => "205" case 206 => "206" case 207 => "207" case 208 => "208" case 209 => "209" + case 210 => "210" case 211 => "211" case 212 => "212" case 213 => "213" case 214 => "214" + case 215 => "215" case 216 => "216" case 217 => "217" case 218 => "218" case 219 => "219" + case 220 => "220" case 221 => "221" case 222 => "222" case 223 => "223" case 224 => "224" + case 225 => "225" case 226 => "226" case 227 => "227" case 228 => "228" case 229 => "229" + case 230 => "230" case 231 => "231" case 232 => "232" case 233 => "233" case 234 => "234" + case 235 => "235" case 236 => "236" case 237 => "237" case 238 => "238" case 239 => "239" + case 240 => "240" case 241 => "241" case 242 => "242" case 243 => "243" case 244 => "244" + case 245 => "245" case 246 => "246" case 247 => "247" case 248 => "248" case 249 => "249" + case 250 => "250" case 251 => "251" case 252 => "252" case 253 => "253" case 254 => "254" + case 255 => "255" + } + bh.consume(x) + i += 1 + } + } + + @Benchmark @OperationsPerInvocation(count) def classNameHashSwitchShow(bh: Blackhole): Unit = { + val names = this.names + var i = 0 + while (i < names.length) { + val name = names(i) + val cls = name.getClass + val x = ((cls.getName.##): @switch) match { + case -1200720095 => "0" + case -1200720094 => "1" + case -1200720093 => "2" + case -1200720092 => "3" + case -1200720091 => "4" + case -1200720090 => "5" + case -1200720089 => "6" + case -1200720088 => "7" + case -1200720087 => "8" + case -1200720086 => "9" + case 1432382798 => "10" + case 1432382799 => "11" + case 1432382800 => "12" + case 1432382801 => "13" + case 1432382802 => "14" + case 1432382803 => "15" + case 1432382804 => "16" + case 1432382805 => "17" + case 1432382806 => "18" + case 1432382807 => "19" + case 1432382829 => "20" + case 1432382830 => "21" + case 1432382831 => "22" + case 1432382832 => "23" + case 1432382833 => "24" + case 1432382834 => "25" + case 1432382835 => "26" + case 1432382836 => "27" + case 1432382837 => "28" + case 1432382838 => "29" + case 1432382860 => "30" + case 1432382861 => "31" + case 1432382862 => "32" + case 1432382863 => "33" + case 1432382864 => "34" + case 1432382865 => "35" + case 1432382866 => "36" + case 1432382867 => "37" + case 1432382868 => "38" + case 1432382869 => "39" + case 1432382891 => "40" + case 1432382892 => "41" + case 1432382893 => "42" + case 1432382894 => "43" + case 1432382895 => "44" + case 1432382896 => "45" + case 1432382897 => "46" + case 1432382898 => "47" + case 1432382899 => "48" + case 1432382900 => "49" + case 1432382922 => "50" + case 1432382923 => "51" + case 1432382924 => "52" + case 1432382925 => "53" + case 1432382926 => "54" + case 1432382927 => "55" + case 1432382928 => "56" + case 1432382929 => "57" + case 1432382930 => "58" + case 1432382931 => "59" + case 1432382953 => "60" + case 1432382954 => "61" + case 1432382955 => "62" + case 1432382956 => "63" + case 1432382957 => "64" + case 1432382958 => "65" + case 1432382959 => "66" + case 1432382960 => "67" + case 1432382961 => "68" + case 1432382962 => "69" + case 1432382984 => "70" + case 1432382985 => "71" + case 1432382986 => "72" + case 1432382987 => "73" + case 1432382988 => "74" + case 1432382989 => "75" + case 1432382990 => "76" + case 1432382991 => "77" + case 1432382992 => "78" + case 1432382993 => "79" + case 1432383015 => "80" + case 1432383016 => "81" + case 1432383017 => "82" + case 1432383018 => "83" + case 1432383019 => "84" + case 1432383020 => "85" + case 1432383021 => "86" + case 1432383022 => "87" + case 1432383023 => "88" + case 1432383024 => "89" + case 1432383046 => "90" + case 1432383047 => "91" + case 1432383048 => "92" + case 1432383049 => "93" + case 1432383050 => "94" + case 1432383051 => "95" + case 1432383052 => "96" + case 1432383053 => "97" + case 1432383054 => "98" + case 1432383055 => "99" + case 1454193826 => "100" + case 1454193827 => "101" + case 1454193828 => "102" + case 1454193829 => "103" + case 1454193830 => "104" + case 1454193831 => "105" + case 1454193832 => "106" + case 1454193833 => "107" + case 1454193834 => "108" + case 1454193835 => "109" + case 1454193857 => "110" + case 1454193858 => "111" + case 1454193859 => "112" + case 1454193860 => "113" + case 1454193861 => "114" + case 1454193862 => "115" + case 1454193863 => "116" + case 1454193864 => "117" + case 1454193865 => "118" + case 1454193866 => "119" + case 1454193888 => "120" + case 1454193889 => "121" + case 1454193890 => "122" + case 1454193891 => "123" + case 1454193892 => "124" + case 1454193893 => "125" + case 1454193894 => "126" + case 1454193895 => "127" + case 1454193896 => "128" + case 1454193897 => "129" + case 1454193919 => "130" + case 1454193920 => "131" + case 1454193921 => "132" + case 1454193922 => "133" + case 1454193923 => "134" + case 1454193924 => "135" + case 1454193925 => "136" + case 1454193926 => "137" + case 1454193927 => "138" + case 1454193928 => "139" + case 1454193950 => "140" + case 1454193951 => "141" + case 1454193952 => "142" + case 1454193953 => "143" + case 1454193954 => "144" + case 1454193955 => "145" + case 1454193956 => "146" + case 1454193957 => "147" + case 1454193958 => "148" + case 1454193959 => "149" + case 1454193981 => "150" + case 1454193982 => "151" + case 1454193983 => "152" + case 1454193984 => "153" + case 1454193985 => "154" + case 1454193986 => "155" + case 1454193987 => "156" + case 1454193988 => "157" + case 1454193989 => "158" + case 1454193990 => "159" + case 1454194012 => "160" + case 1454194013 => "161" + case 1454194014 => "162" + case 1454194015 => "163" + case 1454194016 => "164" + case 1454194017 => "165" + case 1454194018 => "166" + case 1454194019 => "167" + case 1454194020 => "168" + case 1454194021 => "169" + case 1454194043 => "170" + case 1454194044 => "171" + case 1454194045 => "172" + case 1454194046 => "173" + case 1454194047 => "174" + case 1454194048 => "175" + case 1454194049 => "176" + case 1454194050 => "177" + case 1454194051 => "178" + case 1454194052 => "179" + case 1454194074 => "180" + case 1454194075 => "181" + case 1454194076 => "182" + case 1454194077 => "183" + case 1454194078 => "184" + case 1454194079 => "185" + case 1454194080 => "186" + case 1454194081 => "187" + case 1454194082 => "188" + case 1454194083 => "189" + case 1454194105 => "190" + case 1454194106 => "191" + case 1454194107 => "192" + case 1454194108 => "193" + case 1454194109 => "194" + case 1454194110 => "195" + case 1454194111 => "196" + case 1454194112 => "197" + case 1454194113 => "198" + case 1454194114 => "199" + case 1454194787 => "200" + case 1454194788 => "201" + case 1454194789 => "202" + case 1454194790 => "203" + case 1454194791 => "204" + case 1454194792 => "205" + case 1454194793 => "206" + case 1454194794 => "207" + case 1454194795 => "208" + case 1454194796 => "209" + case 1454194818 => "210" + case 1454194819 => "211" + case 1454194820 => "212" + case 1454194821 => "213" + case 1454194822 => "214" + case 1454194823 => "215" + case 1454194824 => "216" + case 1454194825 => "217" + case 1454194826 => "218" + case 1454194827 => "219" + case 1454194849 => "220" + case 1454194850 => "221" + case 1454194851 => "222" + case 1454194852 => "223" + case 1454194853 => "224" + case 1454194854 => "225" + case 1454194855 => "226" + case 1454194856 => "227" + case 1454194857 => "228" + case 1454194858 => "229" + case 1454194880 => "230" + case 1454194881 => "231" + case 1454194882 => "232" + case 1454194883 => "233" + case 1454194884 => "234" + case 1454194885 => "235" + case 1454194886 => "236" + case 1454194887 => "237" + case 1454194888 => "238" + case 1454194889 => "239" + case 1454194911 => "240" + case 1454194912 => "241" + case 1454194913 => "242" + case 1454194914 => "243" + case 1454194915 => "244" + case 1454194916 => "245" + case 1454194917 => "246" + case 1454194918 => "247" + case 1454194919 => "248" + case 1454194920 => "249" + case 1454194942 => "250" + case 1454194943 => "251" + case 1454194944 => "252" + case 1454194945 => "253" + case 1454194946 => "254" + case 1454194947 => "255" + case hashCode => throw new MatchError(s"No case for: $name -> $cls -> $hashCode") + } + bh.consume(x) + i += 1 + } + } + +/* + This benchmark compares pattern matching to alternatives, specifically: + 1. using virtual methods instead (like our Tree#transform/traverse) + 2. doing a tableswitch on int field (like our Promise.Transformation) + 3. using a ClassValue as a more efficient way to store the int (like exotic's TypeSwitch) + 4. using the instance's class's name's hash, which are all memoised, in a jumptable + + The results appear to indicate that: + + 1. < 16 cases, patmat beats virtual method calls + 2. = 16 cases, patmat vs virtual overlap in error margins + 3. > 16 cases, patmat loses to virtual method calls + 4. int switching seems to only out perform virtual at 32+ cases + 5. class name hash switching beats class value, up to 32 cases (and matches performance at 64) +*/ +} + +final class NameClassValue extends ClassValue[Int] { + def computeValue(runtimeClass: Class[_]) = runtimeClass match { + case ClsName0 => 0 case ClsName1 => 1 case ClsName2 => 2 case ClsName3 => 3 case ClsName4 => 4 + case ClsName5 => 5 case ClsName6 => 6 case ClsName7 => 7 case ClsName8 => 8 case ClsName9 => 9 + case ClsName10 => 10 case ClsName11 => 11 case ClsName12 => 12 case ClsName13 => 13 case ClsName14 => 14 + case ClsName15 => 15 case ClsName16 => 16 case ClsName17 => 17 case ClsName18 => 18 case ClsName19 => 19 + case ClsName20 => 20 case ClsName21 => 21 case ClsName22 => 22 case ClsName23 => 23 case ClsName24 => 24 + case ClsName25 => 25 case ClsName26 => 26 case ClsName27 => 27 case ClsName28 => 28 case ClsName29 => 29 + case ClsName30 => 30 case ClsName31 => 31 case ClsName32 => 32 case ClsName33 => 33 case ClsName34 => 34 + case ClsName35 => 35 case ClsName36 => 36 case ClsName37 => 37 case ClsName38 => 38 case ClsName39 => 39 + case ClsName40 => 40 case ClsName41 => 41 case ClsName42 => 42 case ClsName43 => 43 case ClsName44 => 44 + case ClsName45 => 45 case ClsName46 => 46 case ClsName47 => 47 case ClsName48 => 48 case ClsName49 => 49 + case ClsName50 => 50 case ClsName51 => 51 case ClsName52 => 52 case ClsName53 => 53 case ClsName54 => 54 + case ClsName55 => 55 case ClsName56 => 56 case ClsName57 => 57 case ClsName58 => 58 case ClsName59 => 59 + case ClsName60 => 60 case ClsName61 => 61 case ClsName62 => 62 case ClsName63 => 63 case ClsName64 => 64 + case ClsName65 => 65 case ClsName66 => 66 case ClsName67 => 67 case ClsName68 => 68 case ClsName69 => 69 + case ClsName70 => 70 case ClsName71 => 71 case ClsName72 => 72 case ClsName73 => 73 case ClsName74 => 74 + case ClsName75 => 75 case ClsName76 => 76 case ClsName77 => 77 case ClsName78 => 78 case ClsName79 => 79 + case ClsName80 => 80 case ClsName81 => 81 case ClsName82 => 82 case ClsName83 => 83 case ClsName84 => 84 + case ClsName85 => 85 case ClsName86 => 86 case ClsName87 => 87 case ClsName88 => 88 case ClsName89 => 89 + case ClsName90 => 90 case ClsName91 => 91 case ClsName92 => 92 case ClsName93 => 93 case ClsName94 => 94 + case ClsName95 => 95 case ClsName96 => 96 case ClsName97 => 97 case ClsName98 => 98 case ClsName99 => 99 + case ClsName100 => 100 case ClsName101 => 101 case ClsName102 => 102 case ClsName103 => 103 case ClsName104 => 104 + case ClsName105 => 105 case ClsName106 => 106 case ClsName107 => 107 case ClsName108 => 108 case ClsName109 => 109 + case ClsName110 => 110 case ClsName111 => 111 case ClsName112 => 112 case ClsName113 => 113 case ClsName114 => 114 + case ClsName115 => 115 case ClsName116 => 116 case ClsName117 => 117 case ClsName118 => 118 case ClsName119 => 119 + case ClsName120 => 120 case ClsName121 => 121 case ClsName122 => 122 case ClsName123 => 123 case ClsName124 => 124 + case ClsName125 => 125 case ClsName126 => 126 case ClsName127 => 127 case ClsName128 => 128 case ClsName129 => 129 + case ClsName130 => 130 case ClsName131 => 131 case ClsName132 => 132 case ClsName133 => 133 case ClsName134 => 134 + case ClsName135 => 135 case ClsName136 => 136 case ClsName137 => 137 case ClsName138 => 138 case ClsName139 => 139 + case ClsName140 => 140 case ClsName141 => 141 case ClsName142 => 142 case ClsName143 => 143 case ClsName144 => 144 + case ClsName145 => 145 case ClsName146 => 146 case ClsName147 => 147 case ClsName148 => 148 case ClsName149 => 149 + case ClsName150 => 150 case ClsName151 => 151 case ClsName152 => 152 case ClsName153 => 153 case ClsName154 => 154 + case ClsName155 => 155 case ClsName156 => 156 case ClsName157 => 157 case ClsName158 => 158 case ClsName159 => 159 + case ClsName160 => 160 case ClsName161 => 161 case ClsName162 => 162 case ClsName163 => 163 case ClsName164 => 164 + case ClsName165 => 165 case ClsName166 => 166 case ClsName167 => 167 case ClsName168 => 168 case ClsName169 => 169 + case ClsName170 => 170 case ClsName171 => 171 case ClsName172 => 172 case ClsName173 => 173 case ClsName174 => 174 + case ClsName175 => 175 case ClsName176 => 176 case ClsName177 => 177 case ClsName178 => 178 case ClsName179 => 179 + case ClsName180 => 180 case ClsName181 => 181 case ClsName182 => 182 case ClsName183 => 183 case ClsName184 => 184 + case ClsName185 => 185 case ClsName186 => 186 case ClsName187 => 187 case ClsName188 => 188 case ClsName189 => 189 + case ClsName190 => 190 case ClsName191 => 191 case ClsName192 => 192 case ClsName193 => 193 case ClsName194 => 194 + case ClsName195 => 195 case ClsName196 => 196 case ClsName197 => 197 case ClsName198 => 198 case ClsName199 => 199 + case ClsName200 => 200 case ClsName201 => 201 case ClsName202 => 202 case ClsName203 => 203 case ClsName204 => 204 + case ClsName205 => 205 case ClsName206 => 206 case ClsName207 => 207 case ClsName208 => 208 case ClsName209 => 209 + case ClsName210 => 210 case ClsName211 => 211 case ClsName212 => 212 case ClsName213 => 213 case ClsName214 => 214 + case ClsName215 => 215 case ClsName216 => 216 case ClsName217 => 217 case ClsName218 => 218 case ClsName219 => 219 + case ClsName220 => 220 case ClsName221 => 221 case ClsName222 => 222 case ClsName223 => 223 case ClsName224 => 224 + case ClsName225 => 225 case ClsName226 => 226 case ClsName227 => 227 case ClsName228 => 228 case ClsName229 => 229 + case ClsName230 => 230 case ClsName231 => 231 case ClsName232 => 232 case ClsName233 => 233 case ClsName234 => 234 + case ClsName235 => 235 case ClsName236 => 236 case ClsName237 => 237 case ClsName238 => 238 case ClsName239 => 239 + case ClsName240 => 240 case ClsName241 => 241 case ClsName242 => 242 case ClsName243 => 243 case ClsName244 => 244 + case ClsName245 => 245 case ClsName246 => 246 case ClsName247 => 247 case ClsName248 => 248 case ClsName249 => 249 + case ClsName250 => 250 case ClsName251 => 251 case ClsName252 => 252 case ClsName253 => 253 case ClsName254 => 254 + case ClsName255 => 255 + } + + private val ClsName0 = classOf[Name0] + private val ClsName1 = classOf[Name1] + private val ClsName2 = classOf[Name2] + private val ClsName3 = classOf[Name3] + private val ClsName4 = classOf[Name4] + private val ClsName5 = classOf[Name5] + private val ClsName6 = classOf[Name6] + private val ClsName7 = classOf[Name7] + private val ClsName8 = classOf[Name8] + private val ClsName9 = classOf[Name9] + private val ClsName10 = classOf[Name10] + private val ClsName11 = classOf[Name11] + private val ClsName12 = classOf[Name12] + private val ClsName13 = classOf[Name13] + private val ClsName14 = classOf[Name14] + private val ClsName15 = classOf[Name15] + private val ClsName16 = classOf[Name16] + private val ClsName17 = classOf[Name17] + private val ClsName18 = classOf[Name18] + private val ClsName19 = classOf[Name19] + private val ClsName20 = classOf[Name20] + private val ClsName21 = classOf[Name21] + private val ClsName22 = classOf[Name22] + private val ClsName23 = classOf[Name23] + private val ClsName24 = classOf[Name24] + private val ClsName25 = classOf[Name25] + private val ClsName26 = classOf[Name26] + private val ClsName27 = classOf[Name27] + private val ClsName28 = classOf[Name28] + private val ClsName29 = classOf[Name29] + private val ClsName30 = classOf[Name30] + private val ClsName31 = classOf[Name31] + private val ClsName32 = classOf[Name32] + private val ClsName33 = classOf[Name33] + private val ClsName34 = classOf[Name34] + private val ClsName35 = classOf[Name35] + private val ClsName36 = classOf[Name36] + private val ClsName37 = classOf[Name37] + private val ClsName38 = classOf[Name38] + private val ClsName39 = classOf[Name39] + private val ClsName40 = classOf[Name40] + private val ClsName41 = classOf[Name41] + private val ClsName42 = classOf[Name42] + private val ClsName43 = classOf[Name43] + private val ClsName44 = classOf[Name44] + private val ClsName45 = classOf[Name45] + private val ClsName46 = classOf[Name46] + private val ClsName47 = classOf[Name47] + private val ClsName48 = classOf[Name48] + private val ClsName49 = classOf[Name49] + private val ClsName50 = classOf[Name50] + private val ClsName51 = classOf[Name51] + private val ClsName52 = classOf[Name52] + private val ClsName53 = classOf[Name53] + private val ClsName54 = classOf[Name54] + private val ClsName55 = classOf[Name55] + private val ClsName56 = classOf[Name56] + private val ClsName57 = classOf[Name57] + private val ClsName58 = classOf[Name58] + private val ClsName59 = classOf[Name59] + private val ClsName60 = classOf[Name60] + private val ClsName61 = classOf[Name61] + private val ClsName62 = classOf[Name62] + private val ClsName63 = classOf[Name63] + private val ClsName64 = classOf[Name64] + private val ClsName65 = classOf[Name65] + private val ClsName66 = classOf[Name66] + private val ClsName67 = classOf[Name67] + private val ClsName68 = classOf[Name68] + private val ClsName69 = classOf[Name69] + private val ClsName70 = classOf[Name70] + private val ClsName71 = classOf[Name71] + private val ClsName72 = classOf[Name72] + private val ClsName73 = classOf[Name73] + private val ClsName74 = classOf[Name74] + private val ClsName75 = classOf[Name75] + private val ClsName76 = classOf[Name76] + private val ClsName77 = classOf[Name77] + private val ClsName78 = classOf[Name78] + private val ClsName79 = classOf[Name79] + private val ClsName80 = classOf[Name80] + private val ClsName81 = classOf[Name81] + private val ClsName82 = classOf[Name82] + private val ClsName83 = classOf[Name83] + private val ClsName84 = classOf[Name84] + private val ClsName85 = classOf[Name85] + private val ClsName86 = classOf[Name86] + private val ClsName87 = classOf[Name87] + private val ClsName88 = classOf[Name88] + private val ClsName89 = classOf[Name89] + private val ClsName90 = classOf[Name90] + private val ClsName91 = classOf[Name91] + private val ClsName92 = classOf[Name92] + private val ClsName93 = classOf[Name93] + private val ClsName94 = classOf[Name94] + private val ClsName95 = classOf[Name95] + private val ClsName96 = classOf[Name96] + private val ClsName97 = classOf[Name97] + private val ClsName98 = classOf[Name98] + private val ClsName99 = classOf[Name99] + private val ClsName100 = classOf[Name100] + private val ClsName101 = classOf[Name101] + private val ClsName102 = classOf[Name102] + private val ClsName103 = classOf[Name103] + private val ClsName104 = classOf[Name104] + private val ClsName105 = classOf[Name105] + private val ClsName106 = classOf[Name106] + private val ClsName107 = classOf[Name107] + private val ClsName108 = classOf[Name108] + private val ClsName109 = classOf[Name109] + private val ClsName110 = classOf[Name110] + private val ClsName111 = classOf[Name111] + private val ClsName112 = classOf[Name112] + private val ClsName113 = classOf[Name113] + private val ClsName114 = classOf[Name114] + private val ClsName115 = classOf[Name115] + private val ClsName116 = classOf[Name116] + private val ClsName117 = classOf[Name117] + private val ClsName118 = classOf[Name118] + private val ClsName119 = classOf[Name119] + private val ClsName120 = classOf[Name120] + private val ClsName121 = classOf[Name121] + private val ClsName122 = classOf[Name122] + private val ClsName123 = classOf[Name123] + private val ClsName124 = classOf[Name124] + private val ClsName125 = classOf[Name125] + private val ClsName126 = classOf[Name126] + private val ClsName127 = classOf[Name127] + private val ClsName128 = classOf[Name128] + private val ClsName129 = classOf[Name129] + private val ClsName130 = classOf[Name130] + private val ClsName131 = classOf[Name131] + private val ClsName132 = classOf[Name132] + private val ClsName133 = classOf[Name133] + private val ClsName134 = classOf[Name134] + private val ClsName135 = classOf[Name135] + private val ClsName136 = classOf[Name136] + private val ClsName137 = classOf[Name137] + private val ClsName138 = classOf[Name138] + private val ClsName139 = classOf[Name139] + private val ClsName140 = classOf[Name140] + private val ClsName141 = classOf[Name141] + private val ClsName142 = classOf[Name142] + private val ClsName143 = classOf[Name143] + private val ClsName144 = classOf[Name144] + private val ClsName145 = classOf[Name145] + private val ClsName146 = classOf[Name146] + private val ClsName147 = classOf[Name147] + private val ClsName148 = classOf[Name148] + private val ClsName149 = classOf[Name149] + private val ClsName150 = classOf[Name150] + private val ClsName151 = classOf[Name151] + private val ClsName152 = classOf[Name152] + private val ClsName153 = classOf[Name153] + private val ClsName154 = classOf[Name154] + private val ClsName155 = classOf[Name155] + private val ClsName156 = classOf[Name156] + private val ClsName157 = classOf[Name157] + private val ClsName158 = classOf[Name158] + private val ClsName159 = classOf[Name159] + private val ClsName160 = classOf[Name160] + private val ClsName161 = classOf[Name161] + private val ClsName162 = classOf[Name162] + private val ClsName163 = classOf[Name163] + private val ClsName164 = classOf[Name164] + private val ClsName165 = classOf[Name165] + private val ClsName166 = classOf[Name166] + private val ClsName167 = classOf[Name167] + private val ClsName168 = classOf[Name168] + private val ClsName169 = classOf[Name169] + private val ClsName170 = classOf[Name170] + private val ClsName171 = classOf[Name171] + private val ClsName172 = classOf[Name172] + private val ClsName173 = classOf[Name173] + private val ClsName174 = classOf[Name174] + private val ClsName175 = classOf[Name175] + private val ClsName176 = classOf[Name176] + private val ClsName177 = classOf[Name177] + private val ClsName178 = classOf[Name178] + private val ClsName179 = classOf[Name179] + private val ClsName180 = classOf[Name180] + private val ClsName181 = classOf[Name181] + private val ClsName182 = classOf[Name182] + private val ClsName183 = classOf[Name183] + private val ClsName184 = classOf[Name184] + private val ClsName185 = classOf[Name185] + private val ClsName186 = classOf[Name186] + private val ClsName187 = classOf[Name187] + private val ClsName188 = classOf[Name188] + private val ClsName189 = classOf[Name189] + private val ClsName190 = classOf[Name190] + private val ClsName191 = classOf[Name191] + private val ClsName192 = classOf[Name192] + private val ClsName193 = classOf[Name193] + private val ClsName194 = classOf[Name194] + private val ClsName195 = classOf[Name195] + private val ClsName196 = classOf[Name196] + private val ClsName197 = classOf[Name197] + private val ClsName198 = classOf[Name198] + private val ClsName199 = classOf[Name199] + private val ClsName200 = classOf[Name200] + private val ClsName201 = classOf[Name201] + private val ClsName202 = classOf[Name202] + private val ClsName203 = classOf[Name203] + private val ClsName204 = classOf[Name204] + private val ClsName205 = classOf[Name205] + private val ClsName206 = classOf[Name206] + private val ClsName207 = classOf[Name207] + private val ClsName208 = classOf[Name208] + private val ClsName209 = classOf[Name209] + private val ClsName210 = classOf[Name210] + private val ClsName211 = classOf[Name211] + private val ClsName212 = classOf[Name212] + private val ClsName213 = classOf[Name213] + private val ClsName214 = classOf[Name214] + private val ClsName215 = classOf[Name215] + private val ClsName216 = classOf[Name216] + private val ClsName217 = classOf[Name217] + private val ClsName218 = classOf[Name218] + private val ClsName219 = classOf[Name219] + private val ClsName220 = classOf[Name220] + private val ClsName221 = classOf[Name221] + private val ClsName222 = classOf[Name222] + private val ClsName223 = classOf[Name223] + private val ClsName224 = classOf[Name224] + private val ClsName225 = classOf[Name225] + private val ClsName226 = classOf[Name226] + private val ClsName227 = classOf[Name227] + private val ClsName228 = classOf[Name228] + private val ClsName229 = classOf[Name229] + private val ClsName230 = classOf[Name230] + private val ClsName231 = classOf[Name231] + private val ClsName232 = classOf[Name232] + private val ClsName233 = classOf[Name233] + private val ClsName234 = classOf[Name234] + private val ClsName235 = classOf[Name235] + private val ClsName236 = classOf[Name236] + private val ClsName237 = classOf[Name237] + private val ClsName238 = classOf[Name238] + private val ClsName239 = classOf[Name239] + private val ClsName240 = classOf[Name240] + private val ClsName241 = classOf[Name241] + private val ClsName242 = classOf[Name242] + private val ClsName243 = classOf[Name243] + private val ClsName244 = classOf[Name244] + private val ClsName245 = classOf[Name245] + private val ClsName246 = classOf[Name246] + private val ClsName247 = classOf[Name247] + private val ClsName248 = classOf[Name248] + private val ClsName249 = classOf[Name249] + private val ClsName250 = classOf[Name250] + private val ClsName251 = classOf[Name251] + private val ClsName252 = classOf[Name252] + private val ClsName253 = classOf[Name253] + private val ClsName254 = classOf[Name254] + private val ClsName255 = classOf[Name255] +} + +sealed abstract class Name(val _id: Int) { + def virtualShow: String +} + +final case class Name0() extends Name(0) { def virtualShow = "0" } +final case class Name1() extends Name(1) { def virtualShow = "1" } +final case class Name2() extends Name(2) { def virtualShow = "2" } +final case class Name3() extends Name(3) { def virtualShow = "3" } +final case class Name4() extends Name(4) { def virtualShow = "4" } +final case class Name5() extends Name(5) { def virtualShow = "5" } +final case class Name6() extends Name(6) { def virtualShow = "6" } +final case class Name7() extends Name(7) { def virtualShow = "7" } +final case class Name8() extends Name(8) { def virtualShow = "8" } +final case class Name9() extends Name(9) { def virtualShow = "9" } +final case class Name10() extends Name(10) { def virtualShow = "10" } +final case class Name11() extends Name(11) { def virtualShow = "11" } +final case class Name12() extends Name(12) { def virtualShow = "12" } +final case class Name13() extends Name(13) { def virtualShow = "13" } +final case class Name14() extends Name(14) { def virtualShow = "14" } +final case class Name15() extends Name(15) { def virtualShow = "15" } +final case class Name16() extends Name(16) { def virtualShow = "16" } +final case class Name17() extends Name(17) { def virtualShow = "17" } +final case class Name18() extends Name(18) { def virtualShow = "18" } +final case class Name19() extends Name(19) { def virtualShow = "19" } +final case class Name20() extends Name(20) { def virtualShow = "20" } +final case class Name21() extends Name(21) { def virtualShow = "21" } +final case class Name22() extends Name(22) { def virtualShow = "22" } +final case class Name23() extends Name(23) { def virtualShow = "23" } +final case class Name24() extends Name(24) { def virtualShow = "24" } +final case class Name25() extends Name(25) { def virtualShow = "25" } +final case class Name26() extends Name(26) { def virtualShow = "26" } +final case class Name27() extends Name(27) { def virtualShow = "27" } +final case class Name28() extends Name(28) { def virtualShow = "28" } +final case class Name29() extends Name(29) { def virtualShow = "29" } +final case class Name30() extends Name(30) { def virtualShow = "30" } +final case class Name31() extends Name(31) { def virtualShow = "31" } +final case class Name32() extends Name(32) { def virtualShow = "32" } +final case class Name33() extends Name(33) { def virtualShow = "33" } +final case class Name34() extends Name(34) { def virtualShow = "34" } +final case class Name35() extends Name(35) { def virtualShow = "35" } +final case class Name36() extends Name(36) { def virtualShow = "36" } +final case class Name37() extends Name(37) { def virtualShow = "37" } +final case class Name38() extends Name(38) { def virtualShow = "38" } +final case class Name39() extends Name(39) { def virtualShow = "39" } +final case class Name40() extends Name(40) { def virtualShow = "40" } +final case class Name41() extends Name(41) { def virtualShow = "41" } +final case class Name42() extends Name(42) { def virtualShow = "42" } +final case class Name43() extends Name(43) { def virtualShow = "43" } +final case class Name44() extends Name(44) { def virtualShow = "44" } +final case class Name45() extends Name(45) { def virtualShow = "45" } +final case class Name46() extends Name(46) { def virtualShow = "46" } +final case class Name47() extends Name(47) { def virtualShow = "47" } +final case class Name48() extends Name(48) { def virtualShow = "48" } +final case class Name49() extends Name(49) { def virtualShow = "49" } +final case class Name50() extends Name(50) { def virtualShow = "50" } +final case class Name51() extends Name(51) { def virtualShow = "51" } +final case class Name52() extends Name(52) { def virtualShow = "52" } +final case class Name53() extends Name(53) { def virtualShow = "53" } +final case class Name54() extends Name(54) { def virtualShow = "54" } +final case class Name55() extends Name(55) { def virtualShow = "55" } +final case class Name56() extends Name(56) { def virtualShow = "56" } +final case class Name57() extends Name(57) { def virtualShow = "57" } +final case class Name58() extends Name(58) { def virtualShow = "58" } +final case class Name59() extends Name(59) { def virtualShow = "59" } +final case class Name60() extends Name(60) { def virtualShow = "60" } +final case class Name61() extends Name(61) { def virtualShow = "61" } +final case class Name62() extends Name(62) { def virtualShow = "62" } +final case class Name63() extends Name(63) { def virtualShow = "63" } +final case class Name64() extends Name(64) { def virtualShow = "64" } +final case class Name65() extends Name(65) { def virtualShow = "65" } +final case class Name66() extends Name(66) { def virtualShow = "66" } +final case class Name67() extends Name(67) { def virtualShow = "67" } +final case class Name68() extends Name(68) { def virtualShow = "68" } +final case class Name69() extends Name(69) { def virtualShow = "69" } +final case class Name70() extends Name(70) { def virtualShow = "70" } +final case class Name71() extends Name(71) { def virtualShow = "71" } +final case class Name72() extends Name(72) { def virtualShow = "72" } +final case class Name73() extends Name(73) { def virtualShow = "73" } +final case class Name74() extends Name(74) { def virtualShow = "74" } +final case class Name75() extends Name(75) { def virtualShow = "75" } +final case class Name76() extends Name(76) { def virtualShow = "76" } +final case class Name77() extends Name(77) { def virtualShow = "77" } +final case class Name78() extends Name(78) { def virtualShow = "78" } +final case class Name79() extends Name(79) { def virtualShow = "79" } +final case class Name80() extends Name(80) { def virtualShow = "80" } +final case class Name81() extends Name(81) { def virtualShow = "81" } +final case class Name82() extends Name(82) { def virtualShow = "82" } +final case class Name83() extends Name(83) { def virtualShow = "83" } +final case class Name84() extends Name(84) { def virtualShow = "84" } +final case class Name85() extends Name(85) { def virtualShow = "85" } +final case class Name86() extends Name(86) { def virtualShow = "86" } +final case class Name87() extends Name(87) { def virtualShow = "87" } +final case class Name88() extends Name(88) { def virtualShow = "88" } +final case class Name89() extends Name(89) { def virtualShow = "89" } +final case class Name90() extends Name(90) { def virtualShow = "90" } +final case class Name91() extends Name(91) { def virtualShow = "91" } +final case class Name92() extends Name(92) { def virtualShow = "92" } +final case class Name93() extends Name(93) { def virtualShow = "93" } +final case class Name94() extends Name(94) { def virtualShow = "94" } +final case class Name95() extends Name(95) { def virtualShow = "95" } +final case class Name96() extends Name(96) { def virtualShow = "96" } +final case class Name97() extends Name(97) { def virtualShow = "97" } +final case class Name98() extends Name(98) { def virtualShow = "98" } +final case class Name99() extends Name(99) { def virtualShow = "99" } +final case class Name100() extends Name(100) { def virtualShow = "100" } +final case class Name101() extends Name(101) { def virtualShow = "101" } +final case class Name102() extends Name(102) { def virtualShow = "102" } +final case class Name103() extends Name(103) { def virtualShow = "103" } +final case class Name104() extends Name(104) { def virtualShow = "104" } +final case class Name105() extends Name(105) { def virtualShow = "105" } +final case class Name106() extends Name(106) { def virtualShow = "106" } +final case class Name107() extends Name(107) { def virtualShow = "107" } +final case class Name108() extends Name(108) { def virtualShow = "108" } +final case class Name109() extends Name(109) { def virtualShow = "109" } +final case class Name110() extends Name(110) { def virtualShow = "110" } +final case class Name111() extends Name(111) { def virtualShow = "111" } +final case class Name112() extends Name(112) { def virtualShow = "112" } +final case class Name113() extends Name(113) { def virtualShow = "113" } +final case class Name114() extends Name(114) { def virtualShow = "114" } +final case class Name115() extends Name(115) { def virtualShow = "115" } +final case class Name116() extends Name(116) { def virtualShow = "116" } +final case class Name117() extends Name(117) { def virtualShow = "117" } +final case class Name118() extends Name(118) { def virtualShow = "118" } +final case class Name119() extends Name(119) { def virtualShow = "119" } +final case class Name120() extends Name(120) { def virtualShow = "120" } +final case class Name121() extends Name(121) { def virtualShow = "121" } +final case class Name122() extends Name(122) { def virtualShow = "122" } +final case class Name123() extends Name(123) { def virtualShow = "123" } +final case class Name124() extends Name(124) { def virtualShow = "124" } +final case class Name125() extends Name(125) { def virtualShow = "125" } +final case class Name126() extends Name(126) { def virtualShow = "126" } +final case class Name127() extends Name(127) { def virtualShow = "127" } +final case class Name128() extends Name(128) { def virtualShow = "128" } +final case class Name129() extends Name(129) { def virtualShow = "129" } +final case class Name130() extends Name(130) { def virtualShow = "130" } +final case class Name131() extends Name(131) { def virtualShow = "131" } +final case class Name132() extends Name(132) { def virtualShow = "132" } +final case class Name133() extends Name(133) { def virtualShow = "133" } +final case class Name134() extends Name(134) { def virtualShow = "134" } +final case class Name135() extends Name(135) { def virtualShow = "135" } +final case class Name136() extends Name(136) { def virtualShow = "136" } +final case class Name137() extends Name(137) { def virtualShow = "137" } +final case class Name138() extends Name(138) { def virtualShow = "138" } +final case class Name139() extends Name(139) { def virtualShow = "139" } +final case class Name140() extends Name(140) { def virtualShow = "140" } +final case class Name141() extends Name(141) { def virtualShow = "141" } +final case class Name142() extends Name(142) { def virtualShow = "142" } +final case class Name143() extends Name(143) { def virtualShow = "143" } +final case class Name144() extends Name(144) { def virtualShow = "144" } +final case class Name145() extends Name(145) { def virtualShow = "145" } +final case class Name146() extends Name(146) { def virtualShow = "146" } +final case class Name147() extends Name(147) { def virtualShow = "147" } +final case class Name148() extends Name(148) { def virtualShow = "148" } +final case class Name149() extends Name(149) { def virtualShow = "149" } +final case class Name150() extends Name(150) { def virtualShow = "150" } +final case class Name151() extends Name(151) { def virtualShow = "151" } +final case class Name152() extends Name(152) { def virtualShow = "152" } +final case class Name153() extends Name(153) { def virtualShow = "153" } +final case class Name154() extends Name(154) { def virtualShow = "154" } +final case class Name155() extends Name(155) { def virtualShow = "155" } +final case class Name156() extends Name(156) { def virtualShow = "156" } +final case class Name157() extends Name(157) { def virtualShow = "157" } +final case class Name158() extends Name(158) { def virtualShow = "158" } +final case class Name159() extends Name(159) { def virtualShow = "159" } +final case class Name160() extends Name(160) { def virtualShow = "160" } +final case class Name161() extends Name(161) { def virtualShow = "161" } +final case class Name162() extends Name(162) { def virtualShow = "162" } +final case class Name163() extends Name(163) { def virtualShow = "163" } +final case class Name164() extends Name(164) { def virtualShow = "164" } +final case class Name165() extends Name(165) { def virtualShow = "165" } +final case class Name166() extends Name(166) { def virtualShow = "166" } +final case class Name167() extends Name(167) { def virtualShow = "167" } +final case class Name168() extends Name(168) { def virtualShow = "168" } +final case class Name169() extends Name(169) { def virtualShow = "169" } +final case class Name170() extends Name(170) { def virtualShow = "170" } +final case class Name171() extends Name(171) { def virtualShow = "171" } +final case class Name172() extends Name(172) { def virtualShow = "172" } +final case class Name173() extends Name(173) { def virtualShow = "173" } +final case class Name174() extends Name(174) { def virtualShow = "174" } +final case class Name175() extends Name(175) { def virtualShow = "175" } +final case class Name176() extends Name(176) { def virtualShow = "176" } +final case class Name177() extends Name(177) { def virtualShow = "177" } +final case class Name178() extends Name(178) { def virtualShow = "178" } +final case class Name179() extends Name(179) { def virtualShow = "179" } +final case class Name180() extends Name(180) { def virtualShow = "180" } +final case class Name181() extends Name(181) { def virtualShow = "181" } +final case class Name182() extends Name(182) { def virtualShow = "182" } +final case class Name183() extends Name(183) { def virtualShow = "183" } +final case class Name184() extends Name(184) { def virtualShow = "184" } +final case class Name185() extends Name(185) { def virtualShow = "185" } +final case class Name186() extends Name(186) { def virtualShow = "186" } +final case class Name187() extends Name(187) { def virtualShow = "187" } +final case class Name188() extends Name(188) { def virtualShow = "188" } +final case class Name189() extends Name(189) { def virtualShow = "189" } +final case class Name190() extends Name(190) { def virtualShow = "190" } +final case class Name191() extends Name(191) { def virtualShow = "191" } +final case class Name192() extends Name(192) { def virtualShow = "192" } +final case class Name193() extends Name(193) { def virtualShow = "193" } +final case class Name194() extends Name(194) { def virtualShow = "194" } +final case class Name195() extends Name(195) { def virtualShow = "195" } +final case class Name196() extends Name(196) { def virtualShow = "196" } +final case class Name197() extends Name(197) { def virtualShow = "197" } +final case class Name198() extends Name(198) { def virtualShow = "198" } +final case class Name199() extends Name(199) { def virtualShow = "199" } +final case class Name200() extends Name(200) { def virtualShow = "200" } +final case class Name201() extends Name(201) { def virtualShow = "201" } +final case class Name202() extends Name(202) { def virtualShow = "202" } +final case class Name203() extends Name(203) { def virtualShow = "203" } +final case class Name204() extends Name(204) { def virtualShow = "204" } +final case class Name205() extends Name(205) { def virtualShow = "205" } +final case class Name206() extends Name(206) { def virtualShow = "206" } +final case class Name207() extends Name(207) { def virtualShow = "207" } +final case class Name208() extends Name(208) { def virtualShow = "208" } +final case class Name209() extends Name(209) { def virtualShow = "209" } +final case class Name210() extends Name(210) { def virtualShow = "210" } +final case class Name211() extends Name(211) { def virtualShow = "211" } +final case class Name212() extends Name(212) { def virtualShow = "212" } +final case class Name213() extends Name(213) { def virtualShow = "213" } +final case class Name214() extends Name(214) { def virtualShow = "214" } +final case class Name215() extends Name(215) { def virtualShow = "215" } +final case class Name216() extends Name(216) { def virtualShow = "216" } +final case class Name217() extends Name(217) { def virtualShow = "217" } +final case class Name218() extends Name(218) { def virtualShow = "218" } +final case class Name219() extends Name(219) { def virtualShow = "219" } +final case class Name220() extends Name(220) { def virtualShow = "220" } +final case class Name221() extends Name(221) { def virtualShow = "221" } +final case class Name222() extends Name(222) { def virtualShow = "222" } +final case class Name223() extends Name(223) { def virtualShow = "223" } +final case class Name224() extends Name(224) { def virtualShow = "224" } +final case class Name225() extends Name(225) { def virtualShow = "225" } +final case class Name226() extends Name(226) { def virtualShow = "226" } +final case class Name227() extends Name(227) { def virtualShow = "227" } +final case class Name228() extends Name(228) { def virtualShow = "228" } +final case class Name229() extends Name(229) { def virtualShow = "229" } +final case class Name230() extends Name(230) { def virtualShow = "230" } +final case class Name231() extends Name(231) { def virtualShow = "231" } +final case class Name232() extends Name(232) { def virtualShow = "232" } +final case class Name233() extends Name(233) { def virtualShow = "233" } +final case class Name234() extends Name(234) { def virtualShow = "234" } +final case class Name235() extends Name(235) { def virtualShow = "235" } +final case class Name236() extends Name(236) { def virtualShow = "236" } +final case class Name237() extends Name(237) { def virtualShow = "237" } +final case class Name238() extends Name(238) { def virtualShow = "238" } +final case class Name239() extends Name(239) { def virtualShow = "239" } +final case class Name240() extends Name(240) { def virtualShow = "240" } +final case class Name241() extends Name(241) { def virtualShow = "241" } +final case class Name242() extends Name(242) { def virtualShow = "242" } +final case class Name243() extends Name(243) { def virtualShow = "243" } +final case class Name244() extends Name(244) { def virtualShow = "244" } +final case class Name245() extends Name(245) { def virtualShow = "245" } +final case class Name246() extends Name(246) { def virtualShow = "246" } +final case class Name247() extends Name(247) { def virtualShow = "247" } +final case class Name248() extends Name(248) { def virtualShow = "248" } +final case class Name249() extends Name(249) { def virtualShow = "249" } +final case class Name250() extends Name(250) { def virtualShow = "250" } +final case class Name251() extends Name(251) { def virtualShow = "251" } +final case class Name252() extends Name(252) { def virtualShow = "252" } +final case class Name253() extends Name(253) { def virtualShow = "253" } +final case class Name254() extends Name(254) { def virtualShow = "254" } +final case class Name255() extends Name(255) { def virtualShow = "255" } From 35d8002827e189a464057ba91a8c058f29e35ff4 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Fri, 14 May 2021 14:13:19 +1000 Subject: [PATCH 209/769] Address JMH warning about non-public @State classes ``` The instantiated @State annotation only supports public classes. [scala.collection.mutable.OpenHashMapBenchmark.AnyRefBulkGetState] ``` --- .../mutable/OpenHashMapBenchmark.scala | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/test/benchmarks/src/main/scala/scala/collection/mutable/OpenHashMapBenchmark.scala b/test/benchmarks/src/main/scala/scala/collection/mutable/OpenHashMapBenchmark.scala index 64e2244499a7..817b3ebda0f8 100644 --- a/test/benchmarks/src/main/scala/scala/collection/mutable/OpenHashMapBenchmark.scala +++ b/test/benchmarks/src/main/scala/scala/collection/mutable/OpenHashMapBenchmark.scala @@ -28,7 +28,7 @@ private object OpenHashMapBenchmark { * @tparam K type of the map keys to be used in the test */ @State(Scope.Thread) - private[this] abstract class BulkPutState[K](implicit keyBuilder: KeySeqBuilder[K]) { + abstract class BulkPutState[K](implicit keyBuilder: KeySeqBuilder[K]) { /** A lower-bound estimate of the number of nanoseconds per `put()` call */ private[this] val nanosPerPut: Double = 5 @@ -99,7 +99,7 @@ private object OpenHashMapBenchmark { * @tparam K type of the map keys to be used in the test */ @State(Scope.Thread) - private[this] abstract class BulkGetState[K](implicit keyBuilder: KeySeqBuilder[K]) { + abstract class BulkGetState[K](implicit keyBuilder: KeySeqBuilder[K]) { /** The sequence of keys to store into a map. */ private[this] var _keys: KeySeq[K] = _ def keys() = _keys @@ -124,7 +124,7 @@ private object OpenHashMapBenchmark { * @tparam K type of the map keys to be used in the test */ @State(Scope.Thread) - private[this] abstract class BulkRemovedGetState[K](implicit keyBuilder: KeySeqBuilder[K]) { + abstract class BulkRemovedGetState[K](implicit keyBuilder: KeySeqBuilder[K]) { /** The sequence of keys to store into a map. */ private[this] var _keys: KeySeq[K] = _ def keys() = _keys @@ -148,22 +148,22 @@ private object OpenHashMapBenchmark { */ @AuxCounters - private class IntBulkPutState extends BulkPutState[Int] { + class IntBulkPutState extends BulkPutState[Int] { override def mapEntries = super.mapEntries override def operations = super.operations override def memory = super.memory } - private class IntBulkGetState extends BulkGetState[Int] - private class IntBulkRemovedGetState extends BulkRemovedGetState[Int] + class IntBulkGetState extends BulkGetState[Int] + class IntBulkRemovedGetState extends BulkRemovedGetState[Int] @AuxCounters - private class AnyRefBulkPutState extends BulkPutState[AnyRef] { + class AnyRefBulkPutState extends BulkPutState[AnyRef] { override def mapEntries = super.mapEntries override def operations = super.operations override def memory = super.memory } - private class AnyRefBulkGetState extends BulkGetState[AnyRef] - private class AnyRefBulkRemovedGetState extends BulkRemovedGetState[AnyRef] + class AnyRefBulkGetState extends BulkGetState[AnyRef] + class AnyRefBulkRemovedGetState extends BulkRemovedGetState[AnyRef] /** Put entries into the given map. From 97bdd49d027b6fa013343cff912ae39f44b985bf Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Fri, 14 May 2021 10:54:11 +0200 Subject: [PATCH 210/769] refactor Erasure --- .../reflect/internal/transform/Erasure.scala | 355 +++++++++--------- 1 file changed, 178 insertions(+), 177 deletions(-) diff --git a/src/reflect/scala/reflect/internal/transform/Erasure.scala b/src/reflect/scala/reflect/internal/transform/Erasure.scala index ba1a683d076f..c42455575db8 100644 --- a/src/reflect/scala/reflect/internal/transform/Erasure.scala +++ b/src/reflect/scala/reflect/internal/transform/Erasure.scala @@ -261,27 +261,27 @@ trait Erasure { if (sym != NoSymbol && sym.enclClass.isJavaDefined) erasure(sym)(tp) else if (sym.isClassConstructor) - specialConstructorErasure(sym.owner, sym, tp) + specialConstructorErasure(sym.owner, tp) else { specialScalaErasureFor(sym)(tp) } - def specialConstructorErasure(clazz: Symbol, ctor: Symbol, tpe: Type): Type = { + def specialConstructorErasure(clazz: Symbol, tpe: Type): Type = { tpe match { case PolyType(tparams, restpe) => - specialConstructorErasure(clazz, ctor, restpe) + specialConstructorErasure(clazz, restpe) case ExistentialType(tparams, restpe) => - specialConstructorErasure(clazz, ctor, restpe) + specialConstructorErasure(clazz, restpe) case mt @ MethodType(params, restpe) => MethodType( - cloneSymbolsAndModify(params, specialScalaErasureFor(ctor)), - specialConstructorErasure(clazz, ctor, restpe)) + cloneSymbolsAndModify(params, specialScalaErasureFor(clazz)), + specialConstructorErasure(clazz, restpe)) case TypeRef(pre, `clazz`, args) => typeRef(pre, clazz, List()) case tp => if (!(clazz == ArrayClass || tp.isError)) assert(clazz == ArrayClass || tp.isError, s"!!! unexpected constructor erasure $tp for $clazz") - specialScalaErasureFor(ctor)(tp) + specialScalaErasureFor(clazz)(tp) } } @@ -339,6 +339,177 @@ trait Erasure { else typeRef(self(pre), ArrayClass, args map applyInArray) } + /** Scala 3 implementation of erasure for intersection types. + * @param components the erased component types of the intersection. + */ + private def erasedGlb(components: List[Type]): Type = { + + /** A comparison function that induces a total order on erased types, + * where `A <= B` implies that the erasure of `A & B` should be A. + * + * This order respects the following properties: + * - ErasedValueTypes <= non-ErasedValueTypes + * - arrays <= non-arrays + * - primitives <= non-primitives + * - real classes <= traits + * - subtypes <= supertypes + * + * Since this isn't enough to order to unrelated classes, we use + * lexicographic ordering of the class symbol full name as a tie-breaker. + * This ensure that `A <= B && B <= A` iff `A =:= B`. + */ + def compareErasedGlb(tp1: Type, tp2: Type): Int = { + // this check is purely an optimization. + if (tp1 eq tp2) return 0 + + val isEVT1 = tp1.isInstanceOf[ErasedValueType] + val isEVT2 = tp2.isInstanceOf[ErasedValueType] + if (isEVT1 && isEVT2) { + return compareErasedGlb( + tp1.asInstanceOf[ErasedValueType].valueClazz.tpe_*, + tp2.asInstanceOf[ErasedValueType].valueClazz.tpe_*) + } + else if (isEVT1) + return -1 + else if (isEVT2) + return 1 + + val sym1 = tp1.baseClasses.head + val sym2 = tp2.baseClasses.head + + def compareClasses: Int = { + if (sym1.isSubClass(sym2)) + -1 + else if (sym2.isSubClass(sym1)) + 1 + else + sym1.fullName.compareTo(sym2.fullName) + } + + val isArray1 = tp1.typeArgs.nonEmpty && sym1.isSubClass(definitions.ArrayClass) + val isArray2 = tp2.typeArgs.nonEmpty && sym2.isSubClass(definitions.ArrayClass) + if (isArray1 && isArray2) + return compareErasedGlb(tp1.typeArgs.head, tp2.typeArgs.head) + else if (isArray1) + return -1 + else if (isArray2) + return 1 + + val isPrimitive1 = sym1.isPrimitiveValueClass + val isPrimitive2 = sym2.isPrimitiveValueClass + if (isPrimitive1 && isPrimitive2) + return compareClasses + else if (isPrimitive1) + return -1 + else if (isPrimitive2) + return 1 + + val isRealClass1 = sym1.isClass && !sym1.isTrait + val isRealClass2 = sym2.isClass && !sym2.isTrait + if (isRealClass1 && isRealClass2) + return compareClasses + else if (isRealClass1) + return -1 + else if (isRealClass2) + return 1 + + compareClasses + } + + components.min((t, u) => compareErasedGlb(t, u)) + } + + /** Dotty implementation of Array Erasure: + * + * Is `Array[tp]` a generic Array that needs to be erased to `Object`? + * This is true if among the subtypes of `Array[tp]` there is either: + * - both a reference array type and a primitive array type + * (e.g. `Array[_ <: Int | String]`, `Array[_ <: Any]`) + * - or two different primitive array types (e.g. `Array[_ <: Int | Double]`) + * In both cases the erased lub of those array types on the JVM is `Object`. + */ + private def isGenericArrayElement(tp: Type): Boolean = { + + object DottyTypeProxy { + + def unapply(tp: Type): Option[Type] = { + val superTpe = translucentSuperType(tp) + if (superTpe ne NoType) Some(superTpe) else None + } + + def translucentSuperType(tp: Type): Type = tp match { + case tp: TypeRef => transparentDealias(tp.sym, tp.pre, tp.sym.owner) + case tp: SingleType => tp.underlying + case tp: ThisType => tp.sym.typeOfThis + case tp: ConstantType => tp.value.tpe + case tp: RefinedType if tp.decls.nonEmpty => intersectionType(tp.parents) + case tp: PolyType => tp.resultType + case tp: ExistentialType => tp.underlying + case tp: TypeBounds => tp.hi + case tp: AnnotatedType => tp.underlying + case tp: SuperType => tp.thistpe.baseType(tp.supertpe.typeSymbol) + case tp => NoType + } + + } + + object DottyAndType { + def unapply(tp: RefinedType): Boolean = tp.decls.isEmpty + } + + /** A symbol that represents the sort of JVM array that values of type `t` can be stored in: + * - If we can always store such values in a reference array, return Object + * - If we can always store them in a specific primitive array, return the + * corresponding primitive class + * - Otherwise, return `NoSymbol`. + */ + def arrayUpperBound(tp: Type): Symbol = tp.dealias match { + case tp: TypeRef if tp.sym.isClass => + val cls = tp.sym + // Only a few classes have both primitives and references as subclasses. + if ((cls eq AnyClass) || (cls eq AnyValClass) || (cls eq SingletonClass)) + NoSymbol + // We only need to check for primitives because derived value classes in arrays are always boxed. + else if (cls.isPrimitiveValueClass) + cls + else + ObjectClass + case DottyTypeProxy(unwrapped) => + arrayUpperBound(unwrapped) + case tp @ DottyAndType() => + // Find first `p` in `parents` where `arrayUpperBound(p) ne NoSymbol` + @tailrec def loop(tps: List[Type]): Symbol = tps match { + case tp :: tps1 => + val ub = arrayUpperBound(tp) + if (ub ne NoSymbol) ub + else loop(tps1) + case nil => NoSymbol + } + loop(tp.parents) + case _ => + NoSymbol + } + + /** Can one of the JVM Array type store all possible values of type `t`? */ + def fitsInJVMArray(tp: Type): Boolean = arrayUpperBound(tp) ne NoSymbol + + def isOpaque(sym: Symbol) = sym.isScala3Defined && !sym.isClass && sym.hasAttachment[DottyOpaqueTypeAlias] + + tp.dealias match { + case tp: TypeRef if !isOpaque(tp.sym) => + !tp.sym.isClass && + !tp.sym.isJavaDefined && // In Java code, Array[T] can never erase to Object + !fitsInJVMArray(tp) + case DottyTypeProxy(unwrapped) => + isGenericArrayElement(unwrapped) + case tp @ DottyAndType() => + tp.parents.forall(isGenericArrayElement) + case tp => + false + } + + } + } class JavaErasureMap extends ErasureMap with Scala2JavaArrayErasure { @@ -446,86 +617,6 @@ trait Erasure { } } - /** Scala 3 implementation of erasure for intersection types. - * @param components the erased component types of the intersection. - */ - def erasedGlb(components: List[Type]): Type = { - - /** A comparison function that induces a total order on erased types, - * where `A <= B` implies that the erasure of `A & B` should be A. - * - * This order respects the following properties: - * - ErasedValueTypes <= non-ErasedValueTypes - * - arrays <= non-arrays - * - primitives <= non-primitives - * - real classes <= traits - * - subtypes <= supertypes - * - * Since this isn't enough to order to unrelated classes, we use - * lexicographic ordering of the class symbol full name as a tie-breaker. - * This ensure that `A <= B && B <= A` iff `A =:= B`. - */ - def compareErasedGlb(tp1: Type, tp2: Type): Int = { - // this check is purely an optimization. - if (tp1 eq tp2) return 0 - - val isEVT1 = tp1.isInstanceOf[ErasedValueType] - val isEVT2 = tp2.isInstanceOf[ErasedValueType] - if (isEVT1 && isEVT2) { - return compareErasedGlb( - tp1.asInstanceOf[ErasedValueType].valueClazz.tpe_*, - tp2.asInstanceOf[ErasedValueType].valueClazz.tpe_*) - } - else if (isEVT1) - return -1 - else if (isEVT2) - return 1 - - val sym1 = tp1.baseClasses.head - val sym2 = tp2.baseClasses.head - - def compareClasses: Int = { - if (sym1.isSubClass(sym2)) - -1 - else if (sym2.isSubClass(sym1)) - 1 - else - sym1.fullName.compareTo(sym2.fullName) - } - - val isArray1 = tp1.typeArgs.nonEmpty && sym1.isSubClass(definitions.ArrayClass) - val isArray2 = tp2.typeArgs.nonEmpty && sym2.isSubClass(definitions.ArrayClass) - if (isArray1 && isArray2) - return compareErasedGlb(tp1.typeArgs.head, tp2.typeArgs.head) - else if (isArray1) - return -1 - else if (isArray2) - return 1 - - val isPrimitive1 = sym1.isPrimitiveValueClass - val isPrimitive2 = sym2.isPrimitiveValueClass - if (isPrimitive1 && isPrimitive2) - return compareClasses - else if (isPrimitive1) - return -1 - else if (isPrimitive2) - return 1 - - val isRealClass1 = sym1.isClass && !sym1.isTrait - val isRealClass2 = sym2.isClass && !sym2.isTrait - if (isRealClass1 && isRealClass2) - return compareClasses - else if (isRealClass1) - return -1 - else if (isRealClass2) - return 1 - - compareClasses - } - - components.min((t, u) => compareErasedGlb(t, u)) - } - /** For a type alias, get its info as seen from * the current prefix and owner. * Sees through opaque type aliases. @@ -541,96 +632,6 @@ trait Erasure { visible(sym.info) } - /** Dotty implementation of Array Erasure: - * - * Is `Array[tp]` a generic Array that needs to be erased to `Object`? - * This is true if among the subtypes of `Array[tp]` there is either: - * - both a reference array type and a primitive array type - * (e.g. `Array[_ <: Int | String]`, `Array[_ <: Any]`) - * - or two different primitive array types (e.g. `Array[_ <: Int | Double]`) - * In both cases the erased lub of those array types on the JVM is `Object`. - */ - def isGenericArrayElement(tp: Type): Boolean = { - - object DottyTypeProxy { - - def unapply(tp: Type): Option[Type] = { - val superTpe = translucentSuperType(tp) - if (superTpe ne NoType) Some(superTpe) else None - } - - def translucentSuperType(tp: Type): Type = tp match { - case tp: TypeRef => transparentDealias(tp.sym, tp.pre, tp.sym.owner) - case tp: SingleType => tp.underlying - case tp: ThisType => tp.sym.typeOfThis - case tp: ConstantType => tp.value.tpe - case tp: RefinedType if tp.decls.nonEmpty => intersectionType(tp.parents) - case tp: PolyType => tp.resultType - case tp: ExistentialType => tp.underlying - case tp: TypeBounds => tp.hi - case tp: AnnotatedType => tp.underlying - case tp: SuperType => tp.thistpe.baseType(tp.supertpe.typeSymbol) - case tp => NoType - } - - } - - object DottyAndType { - def unapply(tp: RefinedType): Boolean = tp.decls.isEmpty - } - - /** A symbol that represents the sort of JVM array that values of type `t` can be stored in: - * - If we can always store such values in a reference array, return Object - * - If we can always store them in a specific primitive array, return the - * corresponding primitive class - * - Otherwise, return `NoSymbol`. - */ - def arrayUpperBound(tp: Type): Symbol = tp.dealias match { - case tp: TypeRef if tp.sym.isClass => - val cls = tp.sym - // Only a few classes have both primitives and references as subclasses. - if ((cls eq AnyClass) || (cls eq AnyValClass) || (cls eq SingletonClass)) - NoSymbol - // We only need to check for primitives because derived value classes in arrays are always boxed. - else if (cls.isPrimitiveValueClass) - cls - else - ObjectClass - case DottyTypeProxy(unwrapped) => - arrayUpperBound(unwrapped) - case tp @ DottyAndType() => - // Find first `p` in `parents` where `arrayUpperBound(p) ne NoSymbol` - @tailrec def loop(tps: List[Type]): Symbol = tps match { - case tp :: tps1 => - val ub = arrayUpperBound(tp) - if (ub ne NoSymbol) ub - else loop(tps1) - case nil => NoSymbol - } - loop(tp.parents) - case _ => - NoSymbol - } - - /** Can one of the JVM Array type store all possible values of type `t`? */ - def fitsInJVMArray(tp: Type): Boolean = arrayUpperBound(tp) ne NoSymbol - - def isOpaque(sym: Symbol) = sym.isScala3Defined && !sym.isClass && sym.hasAttachment[DottyOpaqueTypeAlias] - - tp.dealias match { - case tp: TypeRef if !isOpaque(tp.sym) => - !tp.sym.isClass && - !tp.sym.isJavaDefined && // In Java code, Array[T] can never erase to Object - !fitsInJVMArray(tp) - case DottyTypeProxy(unwrapped) => - isGenericArrayElement(unwrapped) - case tp @ DottyAndType() => - tp.parents.forall(isGenericArrayElement) - case tp => - false - } - } - /** The symbol's erased info. This is the type's erasure, except for the following primitive symbols: * * - $asInstanceOf --> [T]T From 851903a13680fd246b9118b596ae7b3eba010801 Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Fri, 14 May 2021 10:58:03 +0200 Subject: [PATCH 211/769] support Scala 3.0.0 final --- project/DottySupport.scala | 6 ++--- .../scala/tools/tasty/TastyFormat.scala | 23 ++++++++++++++++--- .../tools/tasty/TastyHeaderUnpickler.scala | 16 +------------ 3 files changed, 24 insertions(+), 21 deletions(-) diff --git a/project/DottySupport.scala b/project/DottySupport.scala index 8f9f0b056f5a..59bb745f6bf8 100644 --- a/project/DottySupport.scala +++ b/project/DottySupport.scala @@ -12,9 +12,9 @@ import sbt.librarymanagement.{ * Settings to support validation of TastyUnpickler against the release of dotty with the matching TASTy version */ object TastySupport { - val supportedTASTyRelease = "3.0.0-RC3" // TASTy version 28.0.3 - val scala3Compiler = "org.scala-lang" % "scala3-compiler_3.0.0-RC3" % supportedTASTyRelease - val scala3Library = "org.scala-lang" % "scala3-library_3.0.0-RC3" % supportedTASTyRelease + val supportedTASTyRelease = "3.0.0" // TASTy version 28.0-0 + val scala3Compiler = "org.scala-lang" % "scala3-compiler_3" % supportedTASTyRelease + val scala3Library = "org.scala-lang" % "scala3-library_3" % supportedTASTyRelease val CompilerClasspath = Configuration.of("TastySupport.CompilerClasspath", "TastySupport.CompilerClasspath") val LibraryClasspath = Configuration.of("TastySupport.LibraryClasspath", "TastySupport.LibraryClasspath") diff --git a/src/compiler/scala/tools/tasty/TastyFormat.scala b/src/compiler/scala/tools/tasty/TastyFormat.scala index 8ca2ecd50203..858579cf8ac2 100644 --- a/src/compiler/scala/tools/tasty/TastyFormat.scala +++ b/src/compiler/scala/tools/tasty/TastyFormat.scala @@ -51,14 +51,31 @@ object TastyFormat { * is able to read final TASTy documents if the file's * `MinorVersion` is strictly less than the current value. */ - final val ExperimentalVersion: Int = 3 + final val ExperimentalVersion: Int = 0 /**This method implements a binary relation (`<:<`) between two TASTy versions. + * * We label the lhs `file` and rhs `compiler`. * if `file <:< compiler` then the TASTy file is valid to be read. * - * TASTy versions have a partial order, - * for example `a <:< b` and `b <:< a` are both false if `a` and `b` have different major versions. + * A TASTy version, e.g. `v := 28.0-3` is composed of three fields: + * - v.major == 28 + * - v.minor == 0 + * - v.experimental == 3 + * + * TASTy versions have a partial order, for example, + * `a <:< b` and `b <:< a` are both false if + * - `a` and `b` have different `major` fields. + * - `a` and `b` have the same `major` & `minor` fields, + * but different `experimental` fields, both non-zero. + * + * A TASTy version with a zero value for its `experimental` field + * is considered to be stable. Files with a stable TASTy version + * can be read by a compiler with an unstable TASTy version, + * (where the compiler's TASTy version has a higher `minor` field). + * + * A compiler with a stable TASTy version can never read a file + * with an unstable TASTy version. * * We follow the given algorithm: * ``` diff --git a/src/compiler/scala/tools/tasty/TastyHeaderUnpickler.scala b/src/compiler/scala/tools/tasty/TastyHeaderUnpickler.scala index 57c36d0ffb29..546cdc15e23c 100644 --- a/src/compiler/scala/tools/tasty/TastyHeaderUnpickler.scala +++ b/src/compiler/scala/tools/tasty/TastyHeaderUnpickler.scala @@ -51,7 +51,7 @@ class TastyHeaderUnpickler(reader: TastyReader) { compilerMajor = MajorVersion, compilerMinor = MinorVersion, compilerExperimental = ExperimentalVersion - ) || scala3finalException(fileMajor, fileMinor, fileExperimental) + ) check(validVersion, { val signature = signatureString(fileMajor, fileMinor, fileExperimental) @@ -76,20 +76,6 @@ class TastyHeaderUnpickler(reader: TastyReader) { object TastyHeaderUnpickler { - /** This escape hatch allows 28.0.3 compiler to read - * 28.0.0 TASTy files (aka produced by Scala 3.0.0 final) - * @note this should be removed if we are able to test against - * Scala 3.0.0 before releasing Scala 2.13.6 - */ - private def scala3finalException( - fileMajor: Int, - fileMinor: Int, - fileExperimental: Int): Boolean = ( - MajorVersion == 28 && fileMajor == 28 - && MinorVersion == 0 && fileMinor == 0 - && ExperimentalVersion == 3 && fileExperimental == 0 - ) - private def toolingAddendum = ( if (ExperimentalVersion > 0) "\nNote that your tooling is currently using an unstable TASTy version." From 35a935404ecf7debc749c951533ff7f655be1179 Mon Sep 17 00:00:00 2001 From: Dale Wijnand Date: Fri, 14 May 2021 16:56:27 +0100 Subject: [PATCH 212/769] Restore SubstMap's public API (unsealed, constructors, etc) --- .../tools/nsc/transform/SpecializeTypes.scala | 9 ++-- .../scala/reflect/internal/Symbols.scala | 2 +- .../scala/reflect/internal/Types.scala | 2 +- .../scala/reflect/internal/tpe/TypeMaps.scala | 41 ++++++++++--------- .../scala/reflect/internal/SubstMapTest.scala | 13 ++++++ 5 files changed, 41 insertions(+), 26 deletions(-) create mode 100644 test/junit/scala/reflect/internal/SubstMapTest.scala diff --git a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala index 89b1e4e73dfe..0e68021ae7ca 100644 --- a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala +++ b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala @@ -1439,10 +1439,11 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { to: List[Symbol], targetClass: Symbol, addressFields: Boolean) extends TreeSymSubstituter(from, to) { - private def matcher(sym1: Symbol, sym2: Symbol) = - if (sym2.isTypeSkolem) sym2.deSkolemize eq sym1 - else sym1 eq sym2 - override val symSubst = SubstSymMap(from, to, matcher) + override val symSubst = new SubstSymMap(from, to) { + override def matches(sym1: Symbol, sym2: Symbol) = + if (sym2.isTypeSkolem) sym2.deSkolemize eq sym1 + else sym1 eq sym2 + } private def isAccessible(sym: Symbol): Boolean = if (currentOwner.isAnonymousFunction) { diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala index 19f9b36ad640..203d29ecf477 100644 --- a/src/reflect/scala/reflect/internal/Symbols.scala +++ b/src/reflect/scala/reflect/internal/Symbols.scala @@ -3764,7 +3764,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => else { val syms1 = mapList(syms)(_.cloneSymbol) cloneSymbolsSubstSymMap.using { (msm: SubstSymMap) => - msm.reload(syms, syms1) + msm.reset(syms, syms1) syms1.foreach(_.modifyInfo(msm)) } syms1 diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala index 7b3dc375f2a3..1cefcf355dfd 100644 --- a/src/reflect/scala/reflect/internal/Types.scala +++ b/src/reflect/scala/reflect/internal/Types.scala @@ -4059,7 +4059,7 @@ trait Types val resultThis = result.typeSymbol.thisType val substThisMap = new SubstThisMap(original.typeSymbol, resultThis) copyRefinedTypeSSM.using { (msm: SubstSymMap) => - msm.reload(syms1, syms2) + msm.reset(syms1, syms2) syms2.foreach(_.modifyInfo(info => msm.apply(substThisMap.apply(info)))) } } diff --git a/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala b/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala index 5d8e55f2c170..96684ffe9f3e 100644 --- a/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala +++ b/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala @@ -20,7 +20,6 @@ import Flags._ import scala.annotation.{nowarn, tailrec} import Variance._ import scala.collection.mutable.ListBuffer -import scala.util.chaining._ private[internal] trait TypeMaps { self: SymbolTable => @@ -665,24 +664,27 @@ private[internal] trait TypeMaps { } /** A base class to compute all substitutions. */ - sealed abstract class SubstMap[T >: Null] extends TypeMap { - private[this] var _from: List[Symbol] = Nil - private[this] var _to: List[T] = Nil + abstract class SubstMap[T >: Null](from0: List[Symbol], to0: List[T]) extends TypeMap { + private[this] var from: List[Symbol] = from0 + private[this] var to: List[T] = to0 private[this] var fromHasTermSymbol = false private[this] var fromMin = Int.MaxValue private[this] var fromMax = Int.MinValue private[this] var fromSize = 0 - final def from: List[Symbol] = _from - final def to: List[T] = _to + // So SubstTypeMap can expose them publicly + // while SubstMap can continue to access them as private fields + protected[this] final def accessFrom: List[Symbol] = from + protected[this] final def accessTo: List[T] = to - def reload(from0: List[Symbol], to0: List[T]): this.type = { + reset(from0, to0) + def reset(from0: List[Symbol], to0: List[T]): this.type = { // OPT this check was 2-3% of some profiles, demoted to -Xdev if (isDeveloper) assert(sameLength(from, to), "Unsound substitution from "+ from +" to "+ to) - _from = from0 - _to = to0 + from = from0 + to = to0 fromHasTermSymbol = false fromMin = Int.MaxValue @@ -783,7 +785,11 @@ private[internal] trait TypeMaps { } /** A map to implement the `substSym` method. */ - sealed class SubstSymMap private () extends SubstMap[Symbol] { + class SubstSymMap(from0: List[Symbol], to0: List[Symbol]) extends SubstMap[Symbol](from0, to0) { + def this(pairs: (Symbol, Symbol)*) = this(pairs.toList.map(_._1), pairs.toList.map(_._2)) + + private[this] final def from: List[Symbol] = accessFrom + private[this] final def to: List[Symbol] = accessTo protected def toType(fromTpe: Type, sym: Symbol) = fromTpe match { case TypeRef(pre, _, args) => copyTypeRef(fromTpe, pre, sym, args) @@ -845,19 +851,14 @@ private[internal] trait TypeMaps { object SubstSymMap { def apply(): SubstSymMap = new SubstSymMap() - def apply(from: List[Symbol], to: List[Symbol]): SubstSymMap = new SubstSymMap().tap(_.reload(from, to)) - def apply(from: List[Symbol], to: List[Symbol], cmp: (Symbol, Symbol) => Boolean): SubstSymMap = { - val ssm = new SubstSymMap() { - override protected def matches(sym: Symbol, sym1: Symbol): Boolean = cmp(sym, sym1) - } - ssm.tap(_.reload(from, to)) - } - def apply(fromto: (Symbol, Symbol)): SubstSymMap = apply(List(fromto._1), List(fromto._2)) + def apply(from: List[Symbol], to: List[Symbol]): SubstSymMap = new SubstSymMap(from, to) + def apply(fromto: (Symbol, Symbol)): SubstSymMap = new SubstSymMap(fromto) } /** A map to implement the `subst` method. */ - class SubstTypeMap(from0: List[Symbol], to0: List[Type]) extends SubstMap[Type] { - super.reload(from0, to0) + class SubstTypeMap(from0: List[Symbol], to0: List[Type]) extends SubstMap[Type](from0, to0) { + final def from: List[Symbol] = accessFrom + final def to: List[Type] = accessTo override protected def toType(fromtp: Type, tp: Type) = tp diff --git a/test/junit/scala/reflect/internal/SubstMapTest.scala b/test/junit/scala/reflect/internal/SubstMapTest.scala new file mode 100644 index 000000000000..7719e3a9a968 --- /dev/null +++ b/test/junit/scala/reflect/internal/SubstMapTest.scala @@ -0,0 +1,13 @@ +package scala.reflect.internal + +import scala.tools.nsc.symtab.SymbolTableForUnitTesting + +class SubstMapTest { + object symbolTable extends SymbolTableForUnitTesting + import symbolTable._ + + // compile-test for https://github.com/scala/community-build/pull/1413 + new SubstMap[String](Nil, Nil) { + protected def toType(fromtp: Type, tp: String) = fromtp + } +} From 76252974f67ef149d9ea70ab03d7ea4a219e94e3 Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Mon, 17 May 2021 13:34:56 +0200 Subject: [PATCH 213/769] Restarr on 2.13.6 --- build.sbt | 2 +- project/MimaFilters.scala | 18 ++---------------- versions.properties | 2 +- 3 files changed, 4 insertions(+), 18 deletions(-) diff --git a/build.sbt b/build.sbt index ddc570a3fb25..ff6183de1c8b 100644 --- a/build.sbt +++ b/build.sbt @@ -70,7 +70,7 @@ lazy val publishSettings : Seq[Setting[_]] = Seq( // should not be set directly. It is the same as the Maven version and derived automatically from `baseVersion` and // `baseVersionSuffix`. globalVersionSettings -Global / baseVersion := "2.13.6" +Global / baseVersion := "2.13.7" Global / baseVersionSuffix := "SNAPSHOT" ThisBuild / organization := "org.scala-lang" ThisBuild / homepage := Some(url("https://www.scala-lang.org")) diff --git a/project/MimaFilters.scala b/project/MimaFilters.scala index 0cde580c4f63..f18a4b360a2b 100644 --- a/project/MimaFilters.scala +++ b/project/MimaFilters.scala @@ -13,7 +13,7 @@ object MimaFilters extends AutoPlugin { import autoImport._ override val globalSettings = Seq( - mimaReferenceVersion := Some("2.13.5"), + mimaReferenceVersion := Some("2.13.6"), ) val mimaFilters: Seq[ProblemFilter] = Seq[ProblemFilter]( @@ -25,21 +25,7 @@ object MimaFilters extends AutoPlugin { // don't publish the artifact built with JDK 11 anyways ProblemFilters.exclude[DirectMissingMethodProblem]("scala.collection.convert.JavaCollectionWrappers#IteratorWrapper.asIterator"), - // for the method this(Long)Unit in class scala.math.BigInt does not have a correspondent in other versions - // this new constructor is nevertheless private, and can only be called from the BigInt class and its companion - // object - ProblemFilters.exclude[DirectMissingMethodProblem]("scala.math.BigInt.this"), - - // PR: https://github.com/scala/scala/pull/9336; remove after re-STARR - ProblemFilters.exclude[MissingTypesProblem]("scala.deprecatedOverriding"), - ProblemFilters.exclude[MissingTypesProblem]("scala.deprecatedInheritance"), - ProblemFilters.exclude[MissingTypesProblem]("scala.deprecated"), - ProblemFilters.exclude[MissingTypesProblem]("scala.annotation.elidable"), - ProblemFilters.exclude[MissingTypesProblem]("scala.annotation.implicitAmbiguous"), - ProblemFilters.exclude[MissingTypesProblem]("scala.annotation.implicitNotFound"), - ProblemFilters.exclude[MissingTypesProblem]("scala.annotation.migration"), - - // when building on a recent JDK, classes implementing `CharSequence` get a mixin forwarder for + // KEEP: when building on a recent JDK, classes implementing `CharSequence` get a mixin forwarder for // the `isEmpty` default method that was added in JDK 15 ProblemFilters.exclude[DirectMissingMethodProblem]("scala.Predef#SeqCharSequence.isEmpty"), ProblemFilters.exclude[DirectMissingMethodProblem]("scala.Predef#ArrayCharSequence.isEmpty"), diff --git a/versions.properties b/versions.properties index e9902399194f..971b4a002731 100644 --- a/versions.properties +++ b/versions.properties @@ -1,5 +1,5 @@ # Scala version used for bootstrapping (see README.md) -starr.version=2.13.5 +starr.version=2.13.6 # These are the versions of the modules that go with this release. # Artifact dependencies: From 78bedf2879ae6ae9882f779e2e87fb779ef3b991 Mon Sep 17 00:00:00 2001 From: Seth Tisue Date: Mon, 17 May 2021 14:21:56 -0700 Subject: [PATCH 214/769] sbt 1.5.2 (was 1.5.1) --- project/build.properties | 2 +- scripts/common | 2 +- src/intellij/scala.ipr.SAMPLE | 36 +++++++++++++------------- test/jcstress/project/build.properties | 2 +- 4 files changed, 21 insertions(+), 21 deletions(-) diff --git a/project/build.properties b/project/build.properties index f0be67b9f729..19479ba46ff2 100644 --- a/project/build.properties +++ b/project/build.properties @@ -1 +1 @@ -sbt.version=1.5.1 +sbt.version=1.5.2 diff --git a/scripts/common b/scripts/common index 82c41790df0e..178ea86dbce9 100644 --- a/scripts/common +++ b/scripts/common @@ -11,7 +11,7 @@ else fi SBT_CMD=${SBT_CMD-sbt} -SBT_CMD="$SBT_CMD -sbt-version 1.5.1" +SBT_CMD="$SBT_CMD -sbt-version 1.5.2" # repo to publish builds integrationRepoUrl=${integrationRepoUrl-"https://scala-ci.typesafe.com/artifactory/scala-integration/"} diff --git a/src/intellij/scala.ipr.SAMPLE b/src/intellij/scala.ipr.SAMPLE index a6057f96db8b..70d7b2a8f6ee 100644 --- a/src/intellij/scala.ipr.SAMPLE +++ b/src/intellij/scala.ipr.SAMPLE @@ -325,13 +325,13 @@ - + - + - - + + @@ -355,7 +355,7 @@ - + @@ -369,8 +369,8 @@ - - + + @@ -382,13 +382,13 @@ - + - + @@ -402,16 +402,16 @@ - + - + - + - + @@ -435,18 +435,18 @@ - - + + - + - + - + diff --git a/test/jcstress/project/build.properties b/test/jcstress/project/build.properties index f0be67b9f729..19479ba46ff2 100644 --- a/test/jcstress/project/build.properties +++ b/test/jcstress/project/build.properties @@ -1 +1 @@ -sbt.version=1.5.1 +sbt.version=1.5.2 From 5bee27aa51a7bb4b7c47b2aee8b707a04e28dd06 Mon Sep 17 00:00:00 2001 From: Li Haoyi Date: Sun, 16 May 2021 20:56:50 +0800 Subject: [PATCH 215/769] Fixes https://github.com/scala/bug/issues/12397 by turning the long change of `&&`s in the synthetic `def equals` method: ```scala a && b && c && d && e && f && g && h ``` Which currently parses into an unbalanced depth O(n) tree as follows: ```scala (((((((a && b) && c) && d) && e) && f) && g) && h) ``` into a binary tree of depth O(log n): ```scala (((a && b) && (c && d)) && ((e && f) && (g && h))) ``` Tested manually by pasting the following snippet into the `sbt scala` interpreter: ```scala case class Big150(_0: Int, _1: Int, _2: Int, _3: Int, _4: Int, _5: Int, _6: Int, _7: Int, _8: Int, _9: Int, _10: Int, _11: Int, _12: Int, _13: Int, _14: Int, _15: Int, _16: Int, _17: Int, _18: Int, _19: Int, _20: Int, _21: Int, _22: Int, _23: Int, _24: Int, _25: Int, _26: Int, _27: Int, _28: Int, _29: Int, _30: Int, _31: Int, _32: Int, _33: Int, _34: Int, _35: Int, _36: Int, _37: Int, _38: Int, _39: Int, _40: Int, _41: Int, _42: Int, _43: Int, _44: Int, _45: Int, _46: Int, _47: Int, _48: Int, _49: Int, _50: Int, _51: Int, _52: Int, _53: Int, _54: Int, _55: Int, _56: Int, _57: Int, _58: Int, _59: Int, _60: Int, _61: Int, _62: Int, _63: Int, _64: Int, _65: Int, _66: Int, _67: Int, _68: Int, _69: Int, _70: Int, _71: Int, _72: Int, _73: Int, _74: Int, _75: Int, _76: Int, _77: Int, _78: Int, _79: Int, _80: Int, _81: Int, _82: Int, _83: Int, _84: Int, _85: Int, _86: Int, _87: Int, _88: Int, _89: Int, _90: Int, _91: Int, _92: Int, _93: Int, _94: Int, _95: Int, _96: Int, _97: Int, _98: Int, _99: Int, _100: Int, _101: Int, _102: Int, _103: Int, _104: Int, _105: Int, _106: Int, _107: Int, _108: Int, _109: Int, _110: Int, _111: Int, _112: Int, _113: Int, _114: Int, _115: Int, _116: Int, _117: Int, _118: Int, _119: Int, _120: Int, _121: Int, _122: Int, _123: Int, _124: Int, _125: Int, _126: Int, _127: Int, _128: Int, _129: Int, _130: Int, _131: Int, _132: Int, _133: Int, _134: Int, _135: Int, _136: Int, _137: Int, _138: Int, _139: Int, _140: Int, _141: Int, _142: Int, _143: Int, _144: Int, _145: Int, _146: Int, _147: Int, _148: Int, _149: Int) ``` This semi-reliably crashes the interpreter with a StackOverflow on 2.13.x, and works without issue on this PR. I'm not sure where the tests should go, but let me know and I'll happily paste that snippet into your test suite (or you guys could do it on my behalf when merging!) It's not clear to me if the other generated methods suffer the same unbalanced-AST issue, but glancing over the code it seems they don't: e.g. `.hashCode` has a long chain of `val` assignments of AST depth O(1), `.productElement` is one big pattern match of depth O(1), etc. The fact that this seems to fix the StackOverflow without it turning up somewhere else also supports the idea that `.equals` is the only generated method with this issue Seems the problematic behavior was introduced 14 years ago in https://github.com/scala/scala/commit/8397c7b73c2930229eae509e089550b0c3020ce2#diff-205537ac4c08ea690ada72e398df0018dcaf2a7c4987c0d8d8df322314469578R162 --- src/compiler/scala/tools/nsc/ast/TreeDSL.scala | 12 +++++++++++- test/files/run/idempotency-case-classes.check | 2 +- 2 files changed, 12 insertions(+), 2 deletions(-) diff --git a/src/compiler/scala/tools/nsc/ast/TreeDSL.scala b/src/compiler/scala/tools/nsc/ast/TreeDSL.scala index 1dfb5d72ac59..f3979f6c94a2 100644 --- a/src/compiler/scala/tools/nsc/ast/TreeDSL.scala +++ b/src/compiler/scala/tools/nsc/ast/TreeDSL.scala @@ -140,7 +140,17 @@ trait TreeDSL { def NEW(tpt: Tree, args: Tree*): Tree = New(tpt, List(args.toList)) def NOT(tree: Tree) = Select(tree, Boolean_not) - def AND(guards: Tree*) = if (guards.isEmpty) EmptyTree else guards reduceLeft gen.mkAnd + def AND(guards: Tree*) = { + def binaryTreeAnd(tests: Seq[Tree]): Tree = tests match{ + case Seq() => EmptyTree + case Seq(single) => single + case multiple => + val (before, after) = multiple.splitAt(tests.size / 2) + gen.mkAnd(binaryTreeAnd(before), binaryTreeAnd(after)) + } + + binaryTreeAnd(guards) + } def IF(tree: Tree) = new IfStart(tree, EmptyTree) def TRY(tree: Tree) = new TryStart(tree, Nil, EmptyTree) diff --git a/test/files/run/idempotency-case-classes.check b/test/files/run/idempotency-case-classes.check index 78ee0af219a2..7339a68be71b 100644 --- a/test/files/run/idempotency-case-classes.check +++ b/test/files/run/idempotency-case-classes.check @@ -40,7 +40,7 @@ C(2,3) case _ => false }.&&({ val C$1: C = x$1.asInstanceOf[C]; - C.this.x.==(C$1.x).&&(C.this.y.==(C$1.y)).&&(C$1.canEqual(C.this)) + C.this.x.==(C$1.x).&&(C.this.y.==(C$1.y).&&(C$1.canEqual(C.this))) })) }; object C extends scala.runtime.AbstractFunction2[Int,Int,C] with java.io.Serializable { From 6ab61aafd9fb6bf04f5f30808b6b90d12ceb9fad Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Thu, 20 May 2021 10:24:22 -0700 Subject: [PATCH 216/769] Handle Scala 3 star in import braces --- .../scala/tools/nsc/ast/parser/Parsers.scala | 5 ++--- test/files/neg/import-syntax.check | 7 +++++++ test/files/neg/import-syntax.scala | 12 ++++++++++++ test/files/pos/import-future.scala | 8 ++++++++ 4 files changed, 29 insertions(+), 3 deletions(-) create mode 100644 test/files/neg/import-syntax.check create mode 100644 test/files/neg/import-syntax.scala diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala index 602b5f1280a9..c3712f7b562f 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala @@ -2719,10 +2719,9 @@ self => selectors } - def wildcardOrIdent() = { - if (in.token == USCORE) { in.nextToken() ; nme.WILDCARD } + def wildcardOrIdent() = + if (in.token == USCORE || settings.isScala3 && isRawStar) { in.nextToken() ; nme.WILDCARD } else ident() - } /** {{{ * ImportSelector ::= Id [`=>` Id | `=>` `_`] diff --git a/test/files/neg/import-syntax.check b/test/files/neg/import-syntax.check new file mode 100644 index 000000000000..887677e3cfd9 --- /dev/null +++ b/test/files/neg/import-syntax.check @@ -0,0 +1,7 @@ +import-syntax.scala:10: error: Wildcard import cannot be renamed + import d.{* => huh} + ^ +import-syntax.scala:11: error: Wildcard import cannot be renamed + import d.{_ => also_no} + ^ +2 errors diff --git a/test/files/neg/import-syntax.scala b/test/files/neg/import-syntax.scala new file mode 100644 index 000000000000..0e3deb00cce0 --- /dev/null +++ b/test/files/neg/import-syntax.scala @@ -0,0 +1,12 @@ +// scalac: -Xsource:3 + +class D { + def *(y: Int): Int = y + def unrelated(y: Int): Int = y +} + +object nope { + val d = new D + import d.{* => huh} + import d.{_ => also_no} +} diff --git a/test/files/pos/import-future.scala b/test/files/pos/import-future.scala index cfaff804af02..1c0c3410f36a 100644 --- a/test/files/pos/import-future.scala +++ b/test/files/pos/import-future.scala @@ -23,3 +23,11 @@ class C { import mut.* val ab = ArrayBuffer(1) } + +object starring { + + import scala.concurrent.*, duration.{Duration as D, *}, ExecutionContext.Implicits.* + + val f = Future(42) + val r = Await.result(f, D.Inf) +} From 96438f2a1e73e491a5221b3c5e6a96b28fd23e36 Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Thu, 20 May 2021 18:36:31 -0700 Subject: [PATCH 217/769] Adapted multiarg infix is just a tuple --- .../scala/tools/nsc/typechecker/Adaptations.scala | 14 ++++++++++---- test/files/neg/t8035-removed.check | 7 +++++++ test/files/neg/t8035-removed.scala | 5 ++++- 3 files changed, 21 insertions(+), 5 deletions(-) diff --git a/src/compiler/scala/tools/nsc/typechecker/Adaptations.scala b/src/compiler/scala/tools/nsc/typechecker/Adaptations.scala index 133b299e5412..4fc3c1fdddd8 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Adaptations.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Adaptations.scala @@ -36,6 +36,10 @@ trait Adaptations { case Apply(_, arg :: Nil) => arg case _ => EmptyTree } + def isInfix = t match { + case Apply(_, arg :: Nil) => t.hasAttachment[MultiargInfixAttachment.type] + case _ => false + } def callString = ( ( if (t.symbol.isConstructor) "new " else "" ) + ( t.symbol.owner.decodedName ) + @@ -86,15 +90,17 @@ trait Adaptations { true // keep adaptation } @inline def warnAdaptation = { - if (settings.warnAdaptedArgs) context.warning(t.pos, adaptWarningMessage( + if (settings.warnAdaptedArgs && !isInfix) context.warning(t.pos, adaptWarningMessage( s"adapted the argument list to the expected ${args.size}-tuple: add additional parens instead"), WarningCategory.LintAdaptedArgs) true // keep adaptation } - if (args.isEmpty) { - if (currentRun.isScala3) noAdaptation else deprecatedAdaptation - } else + if (args.nonEmpty) warnAdaptation + else if (currentRun.isScala3) + noAdaptation + else + deprecatedAdaptation } } } diff --git a/test/files/neg/t8035-removed.check b/test/files/neg/t8035-removed.check index 1938c010d557..7c444dcd6840 100644 --- a/test/files/neg/t8035-removed.check +++ b/test/files/neg/t8035-removed.check @@ -13,4 +13,11 @@ t8035-removed.scala:11: error: adaptation of an empty argument list by inserting given arguments: sdf.format() ^ +t8035-removed.scala:14: warning: adapted the argument list to the expected 2-tuple: add additional parens instead + signature: List.::[B >: A](elem: B): List[B] + given arguments: 42, 27 + after adaptation: List.::((42, 27): (Int, Int)) + Nil.::(42, 27) // yeswarn + ^ +1 warning 3 errors diff --git a/test/files/neg/t8035-removed.scala b/test/files/neg/t8035-removed.scala index e3bc04d8ea10..bada37b7d2f1 100644 --- a/test/files/neg/t8035-removed.scala +++ b/test/files/neg/t8035-removed.scala @@ -1,4 +1,4 @@ -// scalac: -Xsource:3.0 +// scalac: -Xsource:3.0 -Xlint -Werror // object Foo { List(1,2,3).toSet() @@ -9,4 +9,7 @@ object Foo { import java.text.SimpleDateFormat val sdf = new SimpleDateFormat("yyyyMMdd-HH0000") sdf.format() + + (42, 27) :: Nil // nowarn + Nil.::(42, 27) // yeswarn } From f18fd49de1980d56cc50e5cfbe567aaf042e08b8 Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Fri, 27 Mar 2020 15:41:25 +0100 Subject: [PATCH 218/769] No protected accessor when accessing through self type --- project/MimaFilters.scala | 3 + .../nsc/typechecker/SuperAccessors.scala | 1 + test/files/run/t11924.check | 12 +++ test/files/run/t11924.scala | 93 +++++++++++++++++++ 4 files changed, 109 insertions(+) create mode 100644 test/files/run/t11924.check create mode 100644 test/files/run/t11924.scala diff --git a/project/MimaFilters.scala b/project/MimaFilters.scala index f18a4b360a2b..ad847e7b0a31 100644 --- a/project/MimaFilters.scala +++ b/project/MimaFilters.scala @@ -30,6 +30,9 @@ object MimaFilters extends AutoPlugin { ProblemFilters.exclude[DirectMissingMethodProblem]("scala.Predef#SeqCharSequence.isEmpty"), ProblemFilters.exclude[DirectMissingMethodProblem]("scala.Predef#ArrayCharSequence.isEmpty"), ProblemFilters.exclude[DirectMissingMethodProblem]("scala.runtime.ArrayCharSequence.isEmpty"), + + // #8835 + ProblemFilters.exclude[ReversedMissingMethodProblem]("scala.reflect.runtime.SynchronizedOps#SynchronizedBaseTypeSeq.scala$reflect$runtime$SynchronizedOps$SynchronizedBaseTypeSeq$$super$maxDepthOfElems"), ) override val buildSettings = Seq( diff --git a/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala b/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala index ccdbabaff4c4..ef168e5926c9 100644 --- a/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala +++ b/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala @@ -356,6 +356,7 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT && !sym.owner.isTrait && sym.owner.enclosingPackageClass != currentClass.enclosingPackageClass && qual.symbol.info.member(sym.name).exists + && !(currentClass.typeOfThis.typeSymbol.isSubClass(sym.owner)) // scala/bug#11924 && !needsProtectedAccessor(sym, tree.pos) ) if (shouldEnsureAccessor) { diff --git a/test/files/run/t11924.check b/test/files/run/t11924.check new file mode 100644 index 000000000000..edee5862ce53 --- /dev/null +++ b/test/files/run/t11924.check @@ -0,0 +1,12 @@ +B1-a +B1-b +B2 +A +B3-a +B3-b +A +B4-a +B4-b +B5-a +B5-b +A diff --git a/test/files/run/t11924.scala b/test/files/run/t11924.scala new file mode 100644 index 000000000000..56211a1bffb2 --- /dev/null +++ b/test/files/run/t11924.scala @@ -0,0 +1,93 @@ +package pkg { + class A { + protected def f(): Unit = println("A") + } +} + +import pkg.A + +trait B1 { self: A => + private[this] var go = true + override def f(): Unit = if (go) { + go = false + println("B1-a") + self.f() + } else + println("B1-b") +} + +trait B2 extends A { + override def f(): Unit = { + println("B2") + super.f() + } +} + +trait B3 extends A { self: A => + private[this] var go = true + override def f(): Unit = if (go) { + go = false + println("B3-a") + self.f() + } else { + println("B3-b") + super.f() + } +} + +class C1 extends A with B1 +class C2 extends A with B2 +class C3 extends A with B3 + +// test case from pull request comment + +package l1 { + class I { + class A { + protected def f(): Unit = println("A") + } + } + object O extends I +} + +package l2 { + class I { + trait B4 { self: l1.O.A => + private[this] var go = true + override def f(): Unit = if (go) { + go = false + println("B4-a") + self.f() + } else { + println("B4-b") + } + } + + trait B5 extends l1.O.A { self: l1.O.A => + private[this] var go = true + override def f(): Unit = if (go) { + go = false + println("B5-a") + self.f() + } else { + println("B5-b") + super.f() + } + } + } + object O extends I +} + +class C4 extends l1.O.A with l2.O.B4 +class C5 extends l1.O.A with l2.O.B5 + + +object Test { + def main(args: Array[String]): Unit = { + new C1().f() + new C2().f() + new C3().f() + new C4().f() + new C5().f() + } +} From b793b64e93a65a7a336d1cfa22462c61842cea33 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Fri, 28 May 2021 18:40:40 +1000 Subject: [PATCH 219/769] re-STARR on 2.12.14 --- build.sbt | 2 +- project/MimaFilters.scala | 16 +--------------- src/intellij/scala.ipr.SAMPLE | 8 ++++---- versions.properties | 2 +- 4 files changed, 7 insertions(+), 21 deletions(-) diff --git a/build.sbt b/build.sbt index b951b11ca682..6024296ad517 100644 --- a/build.sbt +++ b/build.sbt @@ -89,7 +89,7 @@ lazy val publishSettings : Seq[Setting[_]] = Seq( // should not be set directly. It is the same as the Maven version and derived automatically from `baseVersion` and // `baseVersionSuffix`. globalVersionSettings -baseVersion in Global := "2.12.14" +baseVersion in Global := "2.12.15" baseVersionSuffix in Global := "SNAPSHOT" organization in ThisBuild := "org.scala-lang" homepage in ThisBuild := Some(url("https://www.scala-lang.org")) diff --git a/project/MimaFilters.scala b/project/MimaFilters.scala index 41cf89665189..6df340f475e6 100644 --- a/project/MimaFilters.scala +++ b/project/MimaFilters.scala @@ -13,26 +13,12 @@ object MimaFilters extends AutoPlugin { import autoImport._ override val globalSettings = Seq( - mimaReferenceVersion := Some("2.12.13"), + mimaReferenceVersion := Some("2.12.14"), ) val mimaFilters: Seq[ProblemFilter] = Seq[ProblemFilter]( // KEEP: scala.reflect.internal isn't public API ProblemFilters.exclude[Problem]("scala.reflect.internal.*"), - - // #9314 introduced private[this] object - ProblemFilters.exclude[MissingClassProblem]("scala.collection.immutable.TreeSet$unitsIterator$"), - - // #9314 #9315 #9507 NewRedBlackTree is private[collection] - ProblemFilters.exclude[Problem]("scala.collection.immutable.NewRedBlackTree*"), - - // #9166 add missing serialVersionUID - ProblemFilters.exclude[MissingFieldProblem]("*.serialVersionUID"), - - // private[scala] Internal API - ProblemFilters.exclude[IncompatibleMethTypeProblem]("scala.reflect.io.FileZipArchive#LeakyEntry.this"), - ProblemFilters.exclude[IncompatibleMethTypeProblem]("scala.reflect.io.FileZipArchive#LeakyEntry.this"), - ProblemFilters.exclude[MissingClassProblem]("scala.reflect.io.FileZipArchive$zipFilePool$"), ) override val buildSettings = Seq( diff --git a/src/intellij/scala.ipr.SAMPLE b/src/intellij/scala.ipr.SAMPLE index 9fafee581e74..5bfb74e6f218 100644 --- a/src/intellij/scala.ipr.SAMPLE +++ b/src/intellij/scala.ipr.SAMPLE @@ -308,7 +308,7 @@ - + @@ -544,9 +544,9 @@ - - - + + + diff --git a/versions.properties b/versions.properties index e8f059f92d92..7621a21f96d6 100644 --- a/versions.properties +++ b/versions.properties @@ -1,5 +1,5 @@ # Scala version used for bootstrapping (see README.md) -starr.version=2.12.13 +starr.version=2.12.14 # The scala.binary.version determines how modules are resolved. It is set as follows: # - After 2.x.0 is released, the binary version is 2.x From 94ea45f4d5ca54d034cbd070b0bd61ff247cd1e6 Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Mon, 2 Dec 2019 14:19:40 -0800 Subject: [PATCH 220/769] Check that varargs is applied only to methods --- .../scala/tools/nsc/typechecker/RefChecks.scala | 14 ++++++++++++-- test/files/neg/varargs2.check | 13 +++++++++++++ test/files/neg/varargs2.scala | 13 +++++++++++++ 3 files changed, 38 insertions(+), 2 deletions(-) create mode 100644 test/files/neg/varargs2.check create mode 100644 test/files/neg/varargs2.scala diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala index eaffb019aee6..d69f02710abc 100644 --- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala +++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala @@ -1404,14 +1404,14 @@ abstract class RefChecks extends Transform { false } - private def checkTypeRef(tp: Type, tree: Tree, skipBounds: Boolean) = tp match { + private def checkTypeRef(tp: Type, tree: Tree, skipBounds: Boolean): Unit = tp match { case TypeRef(pre, sym, args) => tree match { case tt: TypeTree if tt.original == null => // scala/bug#7783 don't warn about inferred types // FIXME: reconcile this check with one in resetAttrs case _ => checkUndesiredProperties(sym, tree.pos) } - if(sym.isJavaDefined) + if (sym.isJavaDefined) sym.typeParams foreach (_.cookJavaRawInfo()) if (!tp.isHigherKinded && !skipBounds) checkBounds(tree, pre, sym.owner, sym.typeParams, args) @@ -1434,8 +1434,18 @@ abstract class RefChecks extends Transform { } private def applyRefchecksToAnnotations(tree: Tree): Unit = { + def checkVarArgs(tp: Type, tree: Tree): Unit = tp match { + case TypeRef(_, VarargsClass, _) => + tree match { + case tt: TypeTree if tt.original == null => // same exception as in checkTypeRef + case _: DefDef => + case _ => reporter.error(tree.pos, s"Only methods can be marked @varargs") + } + case _ => + } def applyChecks(annots: List[AnnotationInfo]): List[AnnotationInfo] = if (annots.isEmpty) Nil else { annots.foreach { ann => + checkVarArgs(ann.atp, tree) checkTypeRef(ann.atp, tree, skipBounds = false) checkTypeRefBounds(ann.atp, tree) if (ann.original != null && ann.original.hasExistingSymbol) diff --git a/test/files/neg/varargs2.check b/test/files/neg/varargs2.check new file mode 100644 index 000000000000..23d13ec6bf09 --- /dev/null +++ b/test/files/neg/varargs2.check @@ -0,0 +1,13 @@ +varargs2.scala:7: error: Only methods can be marked @varargs + @varargs val x = 42 // nok + ^ +varargs2.scala:8: error: Only methods can be marked @varargs + def f(@varargs y: Int) = 42 // nok + ^ +varargs2.scala:9: error: Only methods can be marked @varargs + def g(z: Int @varargs) = 42 // nok + ^ +varargs2.scala:10: error: Only methods can be marked @varargs + def h(z: Int) = 42: @varargs // nok + ^ +4 errors diff --git a/test/files/neg/varargs2.scala b/test/files/neg/varargs2.scala new file mode 100644 index 000000000000..82ccf97cb03a --- /dev/null +++ b/test/files/neg/varargs2.scala @@ -0,0 +1,13 @@ +// scalac: -Xsource:3 + +import annotation.* + +trait T { + @varargs def d(n: Int*) = 42 // ok + @varargs val x = 42 // nok + def f(@varargs y: Int) = 42 // nok + def g(z: Int @varargs) = 42 // nok + def h(z: Int) = 42: @varargs // nok + + lazy val VarargsClass = List.empty[varargs] // good one +} From dbab5a132c892d2ab98409bc5a7f2c16579fb541 Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Mon, 31 May 2021 15:01:46 +0200 Subject: [PATCH 221/769] Skip null check on unapply calls for value classes --- .../transform/patmat/MatchTreeMaking.scala | 4 +- test/files/run/t12405.check | 96 +++++++++++++++++++ test/files/run/t12405.scala | 30 ++++++ 3 files changed, 128 insertions(+), 2 deletions(-) create mode 100644 test/files/run/t12405.check create mode 100644 test/files/run/t12405.scala diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala index 6896c16fb36a..6d62def995e3 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala @@ -214,11 +214,11 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging { val nullCheck = REF(prevBinder) OBJ_NE NULL lazy val localSubstitution = Substitution(Nil, Nil) - def isExpectedPrimitiveType = isPrimitiveValueType(expectedTp) + def skipNullTest = isPrimitiveValueType(expectedTp) || expectedTp.typeSymbol.isDerivedValueClass def chainBefore(next: Tree)(casegen: Casegen): Tree = atPos(pos) { - if (isExpectedPrimitiveType) next + if (skipNullTest) next else casegen.ifThenElseZero(nullCheck, next) } diff --git a/test/files/run/t12405.check b/test/files/run/t12405.check new file mode 100644 index 000000000000..a7a8f9bd39f6 --- /dev/null +++ b/test/files/run/t12405.check @@ -0,0 +1,96 @@ +[[syntax trees at end of patmat]] // newSource1.scala +package { + final class C[A] extends scala.AnyVal { + private[this] val x: A = _; + def x: A = C.this.x; + def (x: A): C[A] = { + C.super.(); + () + }; + def isEmpty: Boolean = C.isEmpty$extension[A](C.this); + def get: A = C.get$extension[A](C.this); + override def hashCode(): Int = C.hashCode$extension[A](C.this)(); + override def equals(x$1: Any): Boolean = C.equals$extension[A](C.this)(x$1) + }; + object C extends scala.AnyRef { + def (): C.type = { + C.super.(); + () + }; + def unapply[T](c: C[T]): C[T] = c; + final def isEmpty$extension[A]($this: C[A]): Boolean = scala.Predef.???; + final def get$extension[A]($this: C[A]): A = scala.Predef.???; + final def hashCode$extension[A]($this: C[A])(): Int = $this.x.hashCode(); + final def equals$extension[A]($this: C[A])(x$1: Any): Boolean = { + case val x1: Any = x$1; + case5(){ + if (x1.isInstanceOf[C[A]]) + matchEnd4(true) + else + case6() + }; + case6(){ + matchEnd4(false) + }; + matchEnd4(x: Boolean){ + x + } +}.&&({ + val C$1: C[A] = x$1.asInstanceOf[C[A]]; + $this.x.==(C$1.x) + }) + }; + class Test extends scala.AnyRef { + def (): Test = { + Test.super.(); + () + }; + def m1(a: Any): Any = { + case val x1: Any = a; + case6(){ + if (x1.isInstanceOf[C[T]]) + { + val x2: C[T] = (x1.asInstanceOf[C[T]]: C[T]); + { + val o8: C[T] = C.unapply[T](x2); + if (o8.isEmpty.unary_!) + { + val x: T = o8.get; + matchEnd5(x) + } + else + case7() + } + } + else + case7() + }; + case7(){ + matchEnd5(null) + }; + matchEnd5(x: Any){ + x + } + }; + def m2(c: C[String]): String = { + case val x1: C[String] = c; + case5(){ + val o7: C[String] = C.unapply[String](x1); + if (o7.isEmpty.unary_!) + { + val x: String = o7.get; + matchEnd4(x) + } + else + case6() + }; + case6(){ + matchEnd4("") + }; + matchEnd4(x: String){ + x + } + } + } +} + diff --git a/test/files/run/t12405.scala b/test/files/run/t12405.scala new file mode 100644 index 000000000000..f44e19fd99ea --- /dev/null +++ b/test/files/run/t12405.scala @@ -0,0 +1,30 @@ +import scala.tools.partest._ + +object Test extends DirectTest { + override def extraSettings: String = "-usejavacp -Vprint:patmat -Ystop-after:patmat" + + override val code = + """final class C[A](val x: A) extends AnyVal { + | def isEmpty: Boolean = ??? + | def get: A = ??? + |} + |object C { + | def unapply[T](c: C[T]): C[T] = c + |} + |class Test { + | def m1(a: Any) = a match { + | case C(x) => x + | case _ => null + | } + | + | def m2(c: C[String]) = c match { + | case C(x) => x + | case _ => "" + | } + |} + |""".stripMargin + + override def show(): Unit = Console.withErr(System.out) { + compile() + } +} From 67d4bba7ea2b12f20105227a83a8a4f9ba322000 Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Mon, 31 May 2021 15:06:57 +0200 Subject: [PATCH 222/769] Assert redundant boolean --- .../scala/tools/nsc/transform/patmat/MatchTranslation.scala | 1 + 1 file changed, 1 insertion(+) diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala index 108d0e646e68..cd9af31a4090 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala @@ -124,6 +124,7 @@ trait MatchTranslation { // (the prefix of the argument passed to the unapply must equal the prefix of the type of the binder) val typeTest = TypeTestTreeMaker(binder, binder, paramType, paramType)(pos, extractorArgTypeTest = true) val binderKnownNonNull = typeTest impliesBinderNonNull binder + assert(binderKnownNonNull, s"$binder") // skip null test if it's implied if (binderKnownNonNull) { val unappBinder = typeTest.nextBinder From e100788e3653068d6bee6f08957d64e6903ce078 Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Mon, 31 May 2021 15:08:09 +0200 Subject: [PATCH 223/769] Remove dead code path --- .../nsc/transform/patmat/MatchTranslation.scala | 14 +++----------- .../nsc/transform/patmat/MatchTreeMaking.scala | 13 ------------- 2 files changed, 3 insertions(+), 24 deletions(-) diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala index cd9af31a4090..c02bf8d339d6 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala @@ -123,17 +123,9 @@ trait MatchTranslation { // TODO: the outer check is mandated by the spec for case classes, but we do it for user-defined unapplies as well [SPEC] // (the prefix of the argument passed to the unapply must equal the prefix of the type of the binder) val typeTest = TypeTestTreeMaker(binder, binder, paramType, paramType)(pos, extractorArgTypeTest = true) - val binderKnownNonNull = typeTest impliesBinderNonNull binder - assert(binderKnownNonNull, s"$binder") - // skip null test if it's implied - if (binderKnownNonNull) { - val unappBinder = typeTest.nextBinder - (typeTest :: treeMakers(unappBinder, pos), unappBinder) - } else { - val nonNullTest = NonNullTestTreeMaker(typeTest.nextBinder, paramType, pos) - val unappBinder = nonNullTest.nextBinder - (typeTest :: nonNullTest :: treeMakers(unappBinder, pos), unappBinder) - } + // binder is known non-null because the type test would not succeed on `null` + val unappBinder = typeTest.nextBinder + (typeTest :: treeMakers(unappBinder, pos), unappBinder) } } diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala index 6d62def995e3..0c7646fb03b4 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala @@ -454,17 +454,6 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging { def and(a: Result, b: Result): Result = false // we don't and type tests, so the conjunction must include at least one false def tru = true } - - def nonNullImpliedByTestChecker(binder: Symbol) = new TypeTestCondStrategy { - type Result = Boolean - - def typeTest(testedBinder: Symbol, expectedTp: Type): Result = testedBinder eq binder - def nonNullTest(testedBinder: Symbol): Result = testedBinder eq binder - def equalsTest(pat: Tree, testedBinder: Symbol): Result = false // could in principle analyse pat and see if it's statically known to be non-null - def eqTest(pat: Tree, testedBinder: Symbol): Result = false // could in principle analyse pat and see if it's statically known to be non-null - def and(a: Result, b: Result): Result = a || b - def tru = false - } } /** implements the run-time aspects of (§8.2) (typedPattern has already done the necessary type transformations) @@ -561,8 +550,6 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging { // is this purely a type test, e.g. no outer check, no equality tests (used in switch emission) def isPureTypeTest = renderCondition(pureTypeTestChecker) - def impliesBinderNonNull(binder: Symbol) = renderCondition(nonNullImpliedByTestChecker(binder)) - override def toString = "TT"+((expectedTp, testedBinder.name, nextBinderTp)) } From c36f73928a5a836b75632513564bd8d257048fbd Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Mon, 31 May 2021 10:58:56 -0700 Subject: [PATCH 224/769] Regression test for varargs and seq override --- test/files/neg/t7052.check | 7 +++++++ test/files/neg/t7052.scala | 21 +++++++++++++++++++++ test/files/neg/t7052b.check | 6 ++++++ test/files/neg/t7052b.scala | 21 +++++++++++++++++++++ 4 files changed, 55 insertions(+) create mode 100644 test/files/neg/t7052.check create mode 100644 test/files/neg/t7052.scala create mode 100644 test/files/neg/t7052b.check create mode 100644 test/files/neg/t7052b.scala diff --git a/test/files/neg/t7052.check b/test/files/neg/t7052.check new file mode 100644 index 000000000000..6816f79bde81 --- /dev/null +++ b/test/files/neg/t7052.check @@ -0,0 +1,7 @@ +t7052.scala:9: error: name clash between defined and inherited member: +def apply(xs: Int*): Int in class A and +def apply(xs: Seq[Int]): Int at line 9 +have same type after erasure: (xs: Seq): Int + def apply(xs: Seq[Int]) = 27 + ^ +1 error diff --git a/test/files/neg/t7052.scala b/test/files/neg/t7052.scala new file mode 100644 index 000000000000..0cfad0dce678 --- /dev/null +++ b/test/files/neg/t7052.scala @@ -0,0 +1,21 @@ + +class A { + def apply(xs: Int*) = 42 +} + +/* name clash between defined and inherited member: + */ +class B extends A { + def apply(xs: Seq[Int]) = 27 +} + +/* method apply overrides nothing. +class C extends A { + override def apply(xs: Seq[Int]) = 17 +} + */ + +// ok because different return type +class D extends A { + def apply(xs: Seq[Int]) = "42" +} diff --git a/test/files/neg/t7052b.check b/test/files/neg/t7052b.check new file mode 100644 index 000000000000..c45d895b65c0 --- /dev/null +++ b/test/files/neg/t7052b.check @@ -0,0 +1,6 @@ +t7052b.scala:15: error: method apply overrides nothing. +Note: the super classes of class C contain the following, non final members named apply: +def apply(xs: Int*): Int + override def apply(xs: Seq[Int]) = 17 + ^ +1 error diff --git a/test/files/neg/t7052b.scala b/test/files/neg/t7052b.scala new file mode 100644 index 000000000000..8c410e8bf0ef --- /dev/null +++ b/test/files/neg/t7052b.scala @@ -0,0 +1,21 @@ + +class A { + def apply(xs: Int*) = 42 +} + +/* name clash between defined and inherited member: +class B extends A { + def apply(xs: Seq[Int]) = 27 +} + */ + +/* method apply overrides nothing. + */ +class C extends A { + override def apply(xs: Seq[Int]) = 17 +} + +// ok because different return type +class D extends A { + def apply(xs: Seq[Int]) = "42" +} From 927c0131de90fa8c416fdc202c907bb1065b6201 Mon Sep 17 00:00:00 2001 From: Seth Tisue Date: Mon, 31 May 2021 21:27:26 -0700 Subject: [PATCH 225/769] sbt 1.5.3 (was 1.5.2) --- project/build.properties | 2 +- scripts/common | 2 +- src/intellij/scala.ipr.SAMPLE | 36 +++++++++++++------------- test/jcstress/project/build.properties | 2 +- 4 files changed, 21 insertions(+), 21 deletions(-) diff --git a/project/build.properties b/project/build.properties index 19479ba46ff2..67d27a1dfe00 100644 --- a/project/build.properties +++ b/project/build.properties @@ -1 +1 @@ -sbt.version=1.5.2 +sbt.version=1.5.3 diff --git a/scripts/common b/scripts/common index 178ea86dbce9..8f6c3aa3bef8 100644 --- a/scripts/common +++ b/scripts/common @@ -11,7 +11,7 @@ else fi SBT_CMD=${SBT_CMD-sbt} -SBT_CMD="$SBT_CMD -sbt-version 1.5.2" +SBT_CMD="$SBT_CMD -sbt-version 1.5.3" # repo to publish builds integrationRepoUrl=${integrationRepoUrl-"https://scala-ci.typesafe.com/artifactory/scala-integration/"} diff --git a/src/intellij/scala.ipr.SAMPLE b/src/intellij/scala.ipr.SAMPLE index 70d7b2a8f6ee..797c804d9fcf 100644 --- a/src/intellij/scala.ipr.SAMPLE +++ b/src/intellij/scala.ipr.SAMPLE @@ -325,13 +325,13 @@ - + - + - - + + @@ -355,7 +355,7 @@ - + @@ -369,8 +369,8 @@ - - + + @@ -382,13 +382,13 @@ - + - + @@ -402,16 +402,16 @@ - + - + - + - + @@ -435,18 +435,18 @@ - - + + - + - + - + diff --git a/test/jcstress/project/build.properties b/test/jcstress/project/build.properties index 19479ba46ff2..67d27a1dfe00 100644 --- a/test/jcstress/project/build.properties +++ b/test/jcstress/project/build.properties @@ -1 +1 @@ -sbt.version=1.5.2 +sbt.version=1.5.3 From 10eea5fe619295b694927abb5868095a2540e28f Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Tue, 1 Jun 2021 11:31:11 +0200 Subject: [PATCH 226/769] Generalize condition when to skip override checking for Java members --- .../tools/nsc/transform/OverridingPairs.scala | 18 +++++++++--------- test/files/pos/t12407/A.java | 10 ++++++++++ test/files/pos/t12407/Test.scala | 1 + 3 files changed, 20 insertions(+), 9 deletions(-) create mode 100644 test/files/pos/t12407/A.java create mode 100644 test/files/pos/t12407/Test.scala diff --git a/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala b/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala index 1eeb283560f3..fc9592732517 100644 --- a/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala +++ b/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala @@ -55,15 +55,15 @@ abstract class OverridingPairs extends SymbolPairs { ) // TODO we don't call exclude(high), should we? override protected def skipOwnerPair(lowClass: Symbol, highClass: Symbol): Boolean = { - // Two Java-defined methods can be skipped in most cases, as javac will check the overrides; skipping is - // actually necessary to avoid false errors, as Java doesn't have the Scala's linearization rules. However, when - // a Java interface is mixed into a Scala class, mixed-in default methods need to go through override checking - // (neg/t12394). Checking is also required if the "mixed-in" Java interface method is abstract (neg/t12380). - lowClass.isJavaDefined && highClass.isJavaDefined && { - !lowClass.isJavaInterface && !highClass.isJavaInterface || { - !base.info.parents.tail.exists(p => { - val psym = p.typeSymbol - psym.isNonBottomSubClass(lowClass) || psym.isNonBottomSubClass(highClass) + // Two Java-defined methods can be skipped if javac will check the overrides. Skipping is actually necessary to + // avoid false errors, as Java doesn't have the Scala's linearization rules and subtyping rules + // (`Array[String] <:< Array[Object]`). However, when a Java interface is mixed into a Scala class, mixed-in + // methods need to go through override checking (neg/t12394, neg/t12380). + lowClass.isJavaDefined && highClass.isJavaDefined && { // skip if both are java-defined, and + lowClass.isNonBottomSubClass(highClass) || { // - low <:< high, which means they are overrides in Java and javac is doing the check; or + base.info.parents.tail.forall(p => { // - every mixin parent is unrelated to (not a subclass of) low and high, i.e., + val psym = p.typeSymbol // we're not mixing in high or low, both are coming from the superclass + !psym.isNonBottomSubClass(lowClass) && !psym.isNonBottomSubClass(highClass) }) } } diff --git a/test/files/pos/t12407/A.java b/test/files/pos/t12407/A.java new file mode 100644 index 000000000000..fd2c83a43298 --- /dev/null +++ b/test/files/pos/t12407/A.java @@ -0,0 +1,10 @@ +public class A { + public interface I { + I[] getArray(); + } + + public interface J extends I { + @Override + J[] getArray(); + } +} diff --git a/test/files/pos/t12407/Test.scala b/test/files/pos/t12407/Test.scala new file mode 100644 index 000000000000..6ef6c534d423 --- /dev/null +++ b/test/files/pos/t12407/Test.scala @@ -0,0 +1 @@ +trait Test extends A.J From 99af0f133e9e608997278d657c1cdac465d78f33 Mon Sep 17 00:00:00 2001 From: Georgi Krastev Date: Tue, 1 Jun 2021 18:49:14 +0300 Subject: [PATCH 227/769] Fix specialization of methods with dependent return types Substitute both type and value parameter symbols in return type --- .../tools/nsc/transform/SpecializeTypes.scala | 35 ++++++++++--------- test/files/pos/t12210.scala | 20 +++++++++++ 2 files changed, 39 insertions(+), 16 deletions(-) create mode 100644 test/files/pos/t12210.scala diff --git a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala index 0e68021ae7ca..7c10c86a7bbd 100644 --- a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala +++ b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala @@ -1903,32 +1903,35 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { debuglog("specializing body of" + symbol.defString) val DefDef(_, _, tparams, vparams :: Nil, tpt, _) = tree: @unchecked val env = typeEnv(symbol) - val origtparams = source.typeParams.filter(tparam => !env.contains(tparam) || !isPrimitiveValueType(env(tparam))) - if (origtparams.nonEmpty || symbol.typeParams.nonEmpty) - debuglog("substituting " + origtparams + " for " + symbol.typeParams) + + val srcVparams = parameters(source) + val srcTparams = source.typeParams.filter(tparam => !env.contains(tparam) || !isPrimitiveValueType(env(tparam))) + if (settings.isDebug && (srcTparams.nonEmpty || symbol.typeParams.nonEmpty)) + debuglog("substituting " + srcTparams + " for " + symbol.typeParams) // skolemize type parameters - val oldtparams = tparams map (_.symbol) - val newtparams = deriveFreshSkolems(oldtparams) - map2(tparams, newtparams)(_ setSymbol _) + val oldTparams = tparams.map(_.symbol) + val newTparams = deriveFreshSkolems(oldTparams) + map2(tparams, newTparams)(_ setSymbol _) // create fresh symbols for value parameters to hold the skolem types - val newSyms = cloneSymbolsAtOwnerAndModify(vparams map (_.symbol), symbol, _.substSym(oldtparams, newtparams)) + val oldVparams = vparams.map(_.symbol) + val newVparams = cloneSymbolsAtOwnerAndModify(oldVparams, symbol, _.substSym(oldTparams, newTparams)) + + val srcParams = srcVparams ::: srcTparams + val oldParams = oldVparams ::: oldTparams + val newParams = newVparams ::: newTparams // replace value and type parameters of the old method with the new ones // log("Adding body for " + tree.symbol + " - origtparams: " + origtparams + "; tparams: " + tparams) // log("Type vars of: " + source + ": " + source.typeParams) // log("Type env of: " + tree.symbol + ": " + boundTvars) // log("newtparams: " + newtparams) - val symSubstituter = new ImplementationAdapter( - parameters(source) ::: origtparams, - newSyms ::: newtparams, - source.enclClass, - false) // don't make private fields public - - val newBody = symSubstituter(body(source).duplicate) - tpt modifyType (_.substSym(oldtparams, newtparams)) - copyDefDef(tree)(vparamss = List(newSyms map ValDef.apply), rhs = newBody) + // don't make private fields public + val substituter = new ImplementationAdapter(srcParams, newParams, source.enclClass, false) + val newRhs = substituter(body(source).duplicate) + tpt.modifyType(_.substSym(oldParams, newParams)) + copyDefDef(tree)(vparamss = newVparams.map(ValDef.apply) :: Nil, rhs = newRhs) } /** Create trees for specialized members of 'sClass', based on the diff --git a/test/files/pos/t12210.scala b/test/files/pos/t12210.scala new file mode 100644 index 000000000000..35d6cdbf8c87 --- /dev/null +++ b/test/files/pos/t12210.scala @@ -0,0 +1,20 @@ +trait SpecFun[@specialized T] { + type Res + def res: Res +} + +object Test { + def m[@specialized T](op: SpecFun[T]): op.Res = op.res +} + +trait ValuesVisitor[A] { + def visit(a: A): Unit + def visitArray(arr: Array[A]): Unit = ??? +} + +class OpArray[@specialized A] { + def traverse(from: Array[A], fn: ValuesVisitor[A]): fn.type = { + fn.visitArray(from) + fn + } +} From 836c5a904bf3845403d69dfebbfb145e7209b04b Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Mon, 24 May 2021 23:55:09 -0700 Subject: [PATCH 228/769] ArrayOps must convert to result array type --- src/library/scala/collection/ArrayOps.scala | 26 +++++++++---------- test/files/run/t12403.scala | 9 +++++++ .../junit/scala/collection/ArrayOpsTest.scala | 20 ++++++++++++++ 3 files changed, 42 insertions(+), 13 deletions(-) create mode 100644 test/files/run/t12403.scala diff --git a/src/library/scala/collection/ArrayOps.scala b/src/library/scala/collection/ArrayOps.scala index 370acfce2f1a..aec8156599be 100644 --- a/src/library/scala/collection/ArrayOps.scala +++ b/src/library/scala/collection/ArrayOps.scala @@ -1569,18 +1569,18 @@ final class ArrayOps[A](private val xs: Array[A]) extends AnyVal { * ''n'' times in `that`, then the first ''n'' occurrences of `x` will not form * part of the result, but any following occurrences will. */ - def diff[B >: A](that: Seq[B]): Array[A] = mutable.ArraySeq.make(xs).diff(that).array.asInstanceOf[Array[A]] + def diff[B >: A](that: Seq[B]): Array[A] = mutable.ArraySeq.make(xs).diff(that).toArray[A] /** Computes the multiset intersection between this array and another sequence. - * - * @param that the sequence of elements to intersect with. - * @return a new array which contains all elements of this array - * which also appear in `that`. - * If an element value `x` appears - * ''n'' times in `that`, then the first ''n'' occurrences of `x` will be retained - * in the result, but any following occurrences will be omitted. - */ - def intersect[B >: A](that: Seq[B]): Array[A] = mutable.ArraySeq.make(xs).intersect(that).array.asInstanceOf[Array[A]] + * + * @param that the sequence of elements to intersect with. + * @return a new array which contains all elements of this array + * which also appear in `that`. + * If an element value `x` appears + * ''n'' times in `that`, then the first ''n'' occurrences of `x` will be retained + * in the result, but any following occurrences will be omitted. + */ + def intersect[B >: A](that: Seq[B]): Array[A] = mutable.ArraySeq.make(xs).intersect(that).toArray[A] /** Groups elements in fixed size blocks by passing a "sliding window" * over them (as opposed to partitioning them, as is done in grouped.) @@ -1592,7 +1592,7 @@ final class ArrayOps[A](private val xs: Array[A]) extends AnyVal { * last element (which may be the only element) will be truncated * if there are fewer than `size` elements remaining to be grouped. */ - def sliding(size: Int, step: Int = 1): Iterator[Array[A]] = mutable.ArraySeq.make(xs).sliding(size, step).map(_.array.asInstanceOf[Array[A]]) + def sliding(size: Int, step: Int = 1): Iterator[Array[A]] = mutable.ArraySeq.make(xs).sliding(size, step).map(_.toArray[A]) /** Iterates over combinations. A _combination_ of length `n` is a subsequence of * the original array, with the elements taken in order. Thus, `Array("x", "y")` and `Array("y", "y")` @@ -1609,7 +1609,7 @@ final class ArrayOps[A](private val xs: Array[A]) extends AnyVal { * Array("a", "b", "b", "b", "c").combinations(2) == Iterator(Array(a, b), Array(a, c), Array(b, b), Array(b, c)) * }}} */ - def combinations(n: Int): Iterator[Array[A]] = mutable.ArraySeq.make(xs).combinations(n).map(_.array.asInstanceOf[Array[A]]) + def combinations(n: Int): Iterator[Array[A]] = mutable.ArraySeq.make(xs).combinations(n).map(_.toArray[A]) /** Iterates over distinct permutations. * @@ -1618,7 +1618,7 @@ final class ArrayOps[A](private val xs: Array[A]) extends AnyVal { * Array("a", "b", "b").permutations == Iterator(Array(a, b, b), Array(b, a, b), Array(b, b, a)) * }}} */ - def permutations: Iterator[Array[A]] = mutable.ArraySeq.make(xs).permutations.map(_.array.asInstanceOf[Array[A]]) + def permutations: Iterator[Array[A]] = mutable.ArraySeq.make(xs).permutations.map(_.toArray[A]) // we have another overload here, so we need to duplicate this method /** Tests whether this array contains the given sequence at a given index. diff --git a/test/files/run/t12403.scala b/test/files/run/t12403.scala new file mode 100644 index 000000000000..76342193e786 --- /dev/null +++ b/test/files/run/t12403.scala @@ -0,0 +1,9 @@ + +object Test extends App { + val xs = + Array.empty[Double] + val ys = + Array(0.0) + assert(xs.intersect(ys).getClass.getComponentType == classOf[Double]) + assert(Array.empty[Double].intersect(Array(0.0)).getClass.getComponentType == classOf[Double]) +} diff --git a/test/junit/scala/collection/ArrayOpsTest.scala b/test/junit/scala/collection/ArrayOpsTest.scala index 06a1cc2713f3..3283caa252fd 100644 --- a/test/junit/scala/collection/ArrayOpsTest.scala +++ b/test/junit/scala/collection/ArrayOpsTest.scala @@ -122,4 +122,24 @@ class ArrayOpsTest { val a: Array[Byte] = new Array[Byte](1000).sortWith { _ < _ } assertEquals(0, a(0)) } + + @Test + def `empty intersection has correct component type for array`(): Unit = { + val something = Array(3.14) + val nothing = Array[Double]() + val empty = Array.empty[Double] + + assertEquals(classOf[Double], nothing.intersect(something).getClass.getComponentType) + assertTrue(nothing.intersect(something).isEmpty) + + assertEquals(classOf[Double], empty.intersect(something).getClass.getComponentType) + assertTrue(empty.intersect(something).isEmpty) + assertEquals(classOf[Double], empty.intersect(nothing).getClass.getComponentType) + assertTrue(empty.intersect(nothing).isEmpty) + + assertEquals(classOf[Double], something.intersect(nothing).getClass.getComponentType) + assertTrue(something.intersect(nothing).isEmpty) + assertEquals(classOf[Double], something.intersect(empty).getClass.getComponentType) + assertTrue(something.intersect(empty).isEmpty) + } } From 9a1274feb1600a6428c9803ed4018d94a5649c80 Mon Sep 17 00:00:00 2001 From: Georgi Krastev Date: Thu, 3 Jun 2021 23:03:07 +0300 Subject: [PATCH 229/769] More details to forward reference error messages Include the referenced symbol and the line where it's defined. --- .../scala/tools/nsc/typechecker/RefChecks.scala | 13 ++++++++----- test/files/neg/forward.check | 11 +++++++---- test/files/neg/forward.scala | 11 +++++++++++ test/files/neg/t2910.check | 10 +++++----- test/files/neg/t4098.check | 8 ++++---- test/files/neg/t4419.check | 2 +- test/files/neg/t5390.check | 2 +- test/files/neg/t5390b.check | 2 +- test/files/neg/t5390c.check | 2 +- test/files/neg/t5390d.check | 2 +- 10 files changed, 40 insertions(+), 23 deletions(-) diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala index d69f02710abc..88dd49c3417e 100644 --- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala +++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala @@ -1215,14 +1215,18 @@ abstract class RefChecks extends Transform { finally popLevel() } + private def showCurrentRef: String = { + val refsym = currentLevel.refsym + s"$refsym defined on line ${refsym.pos.line}" + } + def transformStat(tree: Tree, index: Int): Tree = tree match { case t if treeInfo.isSelfConstrCall(t) => assert(index == 0, index) try transform(tree) finally if (currentLevel.maxindex > 0) { - // An implementation restriction to avoid VerifyErrors and lazyvals mishaps; see scala/bug#4717 - debuglog("refsym = " + currentLevel.refsym) - reporter.error(currentLevel.refpos, "forward reference not allowed from self constructor invocation") + // An implementation restriction to avoid VerifyErrors and lazy vals mishaps; see scala/bug#4717 + reporter.error(currentLevel.refpos, s"forward reference to $showCurrentRef not allowed from self constructor invocation") } case ValDef(_, _, _, _) => val tree1 = transform(tree) // important to do before forward reference check @@ -1230,8 +1234,7 @@ abstract class RefChecks extends Transform { else { val sym = tree.symbol if (sym.isLocalToBlock && index <= currentLevel.maxindex) { - debuglog("refsym = " + currentLevel.refsym) - reporter.error(currentLevel.refpos, "forward reference extends over definition of " + sym) + reporter.error(currentLevel.refpos, s"forward reference to $showCurrentRef extends over definition of $sym") } tree1 } diff --git a/test/files/neg/forward.check b/test/files/neg/forward.check index 12051a1c14f7..79630f888fbd 100644 --- a/test/files/neg/forward.check +++ b/test/files/neg/forward.check @@ -1,10 +1,13 @@ -forward.scala:6: error: forward reference extends over definition of value x +forward.scala:8: error: forward reference to value x defined on line 9 extends over definition of value x def f: Int = x; ^ -forward.scala:10: error: forward reference extends over definition of value x +forward.scala:12: error: forward reference to method g defined on line 14 extends over definition of value x def f: Int = g; ^ -forward.scala:15: error: forward reference extends over definition of variable x +forward.scala:17: error: forward reference to method g defined on line 19 extends over definition of variable x def f: Int = g; ^ -3 errors +forward.scala:29: error: forward reference to value ec defined on line 32 extends over definition of value z + a <- fInt + ^ +4 errors diff --git a/test/files/neg/forward.scala b/test/files/neg/forward.scala index d5c0851f09e3..bf1fc7ac8c95 100644 --- a/test/files/neg/forward.scala +++ b/test/files/neg/forward.scala @@ -1,3 +1,5 @@ +import scala.concurrent._ + object Test { def f: Int = x; val x: Int = f; @@ -21,4 +23,13 @@ object Test { Console.println("foo"); def g: Int = f; } + { + val fInt = Future.successful(1) + val z = for { + a <- fInt + } yield a + + implicit val ec: ExecutionContext = ExecutionContext.Implicits.global + z + } } diff --git a/test/files/neg/t2910.check b/test/files/neg/t2910.check index cdf36f9eaa14..fd98de338b06 100644 --- a/test/files/neg/t2910.check +++ b/test/files/neg/t2910.check @@ -1,16 +1,16 @@ -t2910.scala:3: error: forward reference extends over definition of value ret +t2910.scala:3: error: forward reference to value MyMatch defined on line 4 extends over definition of value ret val ret = l.collect({ case MyMatch(id) => id }) ^ -t2910.scala:9: error: forward reference extends over definition of value z +t2910.scala:9: error: forward reference to lazy value s defined on line 11 extends over definition of value z println(s.length) ^ -t2910.scala:16: error: forward reference extends over definition of value z +t2910.scala:16: error: forward reference to lazy value x defined on line 18 extends over definition of value z x ^ -t2910.scala:30: error: forward reference extends over definition of value x +t2910.scala:30: error: forward reference to value x defined on line 31 extends over definition of value x lazy val f: Int = x ^ -t2910.scala:35: error: forward reference extends over definition of variable x +t2910.scala:35: error: forward reference to lazy value g defined on line 37 extends over definition of variable x lazy val f: Int = g ^ 5 errors diff --git a/test/files/neg/t4098.check b/test/files/neg/t4098.check index 590cee98698d..8e15e90abaa3 100644 --- a/test/files/neg/t4098.check +++ b/test/files/neg/t4098.check @@ -1,13 +1,13 @@ -t4098.scala:3: error: forward reference not allowed from self constructor invocation +t4098.scala:3: error: forward reference to method b defined on line 4 not allowed from self constructor invocation this(b) ^ -t4098.scala:8: error: forward reference not allowed from self constructor invocation +t4098.scala:8: error: forward reference to lazy value b defined on line 9 not allowed from self constructor invocation this(b) ^ -t4098.scala:13: error: forward reference not allowed from self constructor invocation +t4098.scala:13: error: forward reference to value b defined on line 14 not allowed from self constructor invocation this(b) ^ -t4098.scala:18: error: forward reference not allowed from self constructor invocation +t4098.scala:18: error: forward reference to method b defined on line 20 not allowed from self constructor invocation this(b) ^ 4 errors diff --git a/test/files/neg/t4419.check b/test/files/neg/t4419.check index 7cf623541a9d..cce4223ecf24 100644 --- a/test/files/neg/t4419.check +++ b/test/files/neg/t4419.check @@ -1,4 +1,4 @@ -t4419.scala:2: error: forward reference extends over definition of value b +t4419.scala:2: error: forward reference to value a defined on line 2 extends over definition of value b { val b = a; val a = 1 ; println(a) } ^ 1 error diff --git a/test/files/neg/t5390.check b/test/files/neg/t5390.check index ddd56cd611ae..0f5b2a3a4e02 100644 --- a/test/files/neg/t5390.check +++ b/test/files/neg/t5390.check @@ -1,4 +1,4 @@ -t5390.scala:7: error: forward reference extends over definition of value b +t5390.scala:7: error: forward reference to value a defined on line 8 extends over definition of value b val b = a.B("") ^ 1 error diff --git a/test/files/neg/t5390b.check b/test/files/neg/t5390b.check index d54d6110b977..55c13c06d7d5 100644 --- a/test/files/neg/t5390b.check +++ b/test/files/neg/t5390b.check @@ -1,4 +1,4 @@ -t5390b.scala:7: error: forward reference extends over definition of value b +t5390b.scala:7: error: forward reference to value a defined on line 8 extends over definition of value b val b = a.B("") ^ 1 error diff --git a/test/files/neg/t5390c.check b/test/files/neg/t5390c.check index 861d6447b81d..1688bb3f4afb 100644 --- a/test/files/neg/t5390c.check +++ b/test/files/neg/t5390c.check @@ -1,4 +1,4 @@ -t5390c.scala:7: error: forward reference extends over definition of value b +t5390c.scala:7: error: forward reference to value a defined on line 8 extends over definition of value b val b = new a.B("") ^ 1 error diff --git a/test/files/neg/t5390d.check b/test/files/neg/t5390d.check index ed117ea9dac2..c814ddd53cb8 100644 --- a/test/files/neg/t5390d.check +++ b/test/files/neg/t5390d.check @@ -1,4 +1,4 @@ -t5390d.scala:7: error: forward reference extends over definition of value b +t5390d.scala:7: error: forward reference to value a defined on line 8 extends over definition of value b val b = a.B.toString ^ 1 error From bcf44e4e53beb19eb42118cdeb4bf37143f8b686 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Mon, 25 Jan 2021 17:00:45 +1000 Subject: [PATCH 230/769] Improvements to code assist in the REPL Re-enable acronym-style completion, e.g. getClass.gdm` offers `getDeclaredMethod[s]`. Under JLine completion, move all filtering up in the UI layer. Reimplement #9510 (dealing with overloads that contain some deprecated alternatives) in the UI layer Fix completion of keyword-starting-idents (e.g. `this.for` offers `formatted`. Register a widget on CTRL-SHIFT-T that prints the type of the expression at the cursor. A second invokation prints the desugared AST. Enable levenstien based typo matching, but disable it for short strings which IMO tends to offer confusing results. Enable levenstien based typo matching: ``` scala> scala.tools.nsc.util.EditDistance.levenshtien scala> scala.tools.nsc.util.EditDistance.levenshtein ``` --- .../scala/tools/nsc/interactive/Global.scala | 64 ++++---- .../scala/reflect/internal/Positions.scala | 3 +- .../scala/reflect/internal/Printers.scala | 40 ++--- .../tools/nsc/interpreter/jline/Reader.scala | 113 ++++++++++---- .../nsc/interpreter/shell/Completion.scala | 13 +- .../tools/nsc/interpreter/shell/ILoop.scala | 16 +- .../nsc/interpreter/shell/LoopCommands.scala | 16 +- .../interpreter/shell/ReplCompletion.scala | 35 ++--- .../scala/tools/nsc/interpreter/IMain.scala | 31 ++-- .../tools/nsc/interpreter/Interface.scala | 11 +- .../interpreter/PresentationCompilation.scala | 138 +++++++++--------- test/files/run/repl-completions.check | 3 +- .../nsc/interpreter/CompletionTest.scala | 47 +++--- versions.properties | 2 +- 14 files changed, 298 insertions(+), 234 deletions(-) diff --git a/src/interactive/scala/tools/nsc/interactive/Global.scala b/src/interactive/scala/tools/nsc/interactive/Global.scala index c99fe6637aff..00743ffb8f7a 100644 --- a/src/interactive/scala/tools/nsc/interactive/Global.scala +++ b/src/interactive/scala/tools/nsc/interactive/Global.scala @@ -1197,54 +1197,36 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "") override def positionDelta = 0 override def forImport: Boolean = false } - private val CamelRegex = "([A-Z][^A-Z]*)".r - private def camelComponents(s: String, allowSnake: Boolean): List[String] = { - if (allowSnake && s.forall(c => c.isUpper || c == '_')) s.split('_').toList.filterNot(_.isEmpty) - else CamelRegex.findAllIn("X" + s).toList match { case head :: tail => head.drop(1) :: tail; case Nil => Nil } - } - def camelMatch(entered: Name): Name => Boolean = { - val enteredS = entered.toString - val enteredLowercaseSet = enteredS.toLowerCase().toSet - val allowSnake = !enteredS.contains('_') - - { - candidate: Name => - def candidateChunks = camelComponents(candidate.dropLocal.toString, allowSnake) - // Loosely based on IntelliJ's autocompletion: the user can just write everything in - // lowercase, as we'll let `isl` match `GenIndexedSeqLike` or `isLovely`. - def lenientMatch(entered: String, candidate: List[String], matchCount: Int): Boolean = { - candidate match { - case Nil => entered.isEmpty && matchCount > 0 - case head :: tail => - val enteredAlternatives = Set(entered, entered.capitalize) - val n = head.toIterable.lazyZip(entered).count {case (c, e) => c == e || (c.isUpper && c == e.toUpper)} - head.take(n).inits.exists(init => - enteredAlternatives.exists(entered => - lenientMatch(entered.stripPrefix(init), tail, matchCount + (if (init.isEmpty) 0 else 1)) - ) - ) - } - } - val containsAllEnteredChars = { - // Trying to rule out some candidates quickly before the more expensive `lenientMatch` - val candidateLowercaseSet = candidate.toString.toLowerCase().toSet - enteredLowercaseSet.diff(candidateLowercaseSet).isEmpty - } - containsAllEnteredChars && lenientMatch(enteredS, candidateChunks, 0) - } - } } final def completionsAt(pos: Position): CompletionResult = { val focus1: Tree = typedTreeAt(pos) def typeCompletions(tree: Tree, qual: Tree, nameStart: Int, name: Name): CompletionResult = { val qualPos = qual.pos - val allTypeMembers = typeMembers(qualPos).last + val saved = tree.tpe + // Force `typeMembers` to complete via the prefix, not the type of the Select itself. + tree.setType(ErrorType) + val allTypeMembers = try { + typeMembers(qualPos).last + } finally { + tree.setType(saved) + } val positionDelta: Int = pos.start - nameStart val subName: Name = name.newName(new String(pos.source.content, nameStart, pos.start - nameStart)).encodedName CompletionResult.TypeMembers(positionDelta, qual, tree, allTypeMembers, subName) } focus1 match { + case Apply(Select(qual, name), _) if qual.hasAttachment[InterpolatedString.type] => + // This special case makes CompletionTest.incompleteStringInterpolation work. + // In incomplete code, the parser treats `foo""` as a nested string interpolation, even + // though it is likely that the user wanted to complete `fooBar` before adding the closing brace. + // val fooBar = 42; s"abc ${foo" + // + // TODO: We could also complete the selection here to expand `ra"..."` to `raw"..."`. + val allMembers = scopeMembers(pos) + val positionDelta: Int = pos.start - focus1.pos.start + val subName = name.subName(0, positionDelta) + CompletionResult.ScopeMembers(positionDelta, allMembers, subName, forImport = false) case imp@Import(i @ Ident(name), head :: Nil) if head.name == nme.ERROR => val allMembers = scopeMembers(pos) val nameStart = i.pos.start @@ -1259,9 +1241,13 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "") } case sel@Select(qual, name) => val qualPos = qual.pos - def fallback = qualPos.end + 2 + val effectiveQualEnd = if (qualPos.isRange) qualPos.end else qualPos.point - 1 + def fallback = { + effectiveQualEnd + 2 + } val source = pos.source - val nameStart: Int = (focus1.pos.end - 1 to qualPos.end by -1).find(p => + + val nameStart: Int = (focus1.pos.end - 1 to effectiveQualEnd by -1).find(p => source.identifier(source.position(p)).exists(_.length == 0) ).map(_ + 1).getOrElse(fallback) typeCompletions(sel, qual, nameStart, name) diff --git a/src/reflect/scala/reflect/internal/Positions.scala b/src/reflect/scala/reflect/internal/Positions.scala index 54183d7f3867..bfc995d96cc9 100644 --- a/src/reflect/scala/reflect/internal/Positions.scala +++ b/src/reflect/scala/reflect/internal/Positions.scala @@ -345,7 +345,8 @@ trait Positions extends api.Positions { self: SymbolTable => if (t.pos includes pos) { if (isEligible(t)) last = t super.traverse(t) - } else t match { + } + t match { case mdef: MemberDef => val annTrees = mdef.mods.annotations match { case Nil if mdef.symbol != null => diff --git a/src/reflect/scala/reflect/internal/Printers.scala b/src/reflect/scala/reflect/internal/Printers.scala index efc2da391027..8d62aea85931 100644 --- a/src/reflect/scala/reflect/internal/Printers.scala +++ b/src/reflect/scala/reflect/internal/Printers.scala @@ -781,26 +781,30 @@ trait Printers extends api.Printers { self: SymbolTable => print("class ", printedName(name)) printTypeParams(tparams) - val build.SyntacticClassDef(_, _, _, ctorMods, vparamss, earlyDefs, parents, selfType, body) = cl: @unchecked - - // constructor's modifier - if (ctorMods.hasFlag(AccessFlags) || ctorMods.hasAccessBoundary) { - print(" ") - printModifiers(ctorMods, primaryCtorParam = false) - } + cl match { + case build.SyntacticClassDef(_, _, _, ctorMods, vparamss, earlyDefs, parents, selfType, body) => + // constructor's modifier + if (ctorMods.hasFlag(AccessFlags) || ctorMods.hasAccessBoundary) { + print(" ") + printModifiers(ctorMods, primaryCtorParam = false) + } - def printConstrParams(ts: List[ValDef]): Unit = { - parenthesize() { - printImplicitInParamsList(ts) - printSeq(ts)(printVParam(_, primaryCtorParam = true))(print(", ")) - } - } - // constructor's params processing (don't print single empty constructor param list) - vparamss match { - case Nil | List(Nil) if !mods.isCase && !ctorMods.hasFlag(AccessFlags) => - case _ => vparamss foreach printConstrParams + def printConstrParams(ts: List[ValDef]): Unit = { + parenthesize() { + printImplicitInParamsList(ts) + printSeq(ts)(printVParam(_, primaryCtorParam = true))(print(", ")) + } + } + // constructor's params processing (don't print single empty constructor param list) + vparamss match { + case Nil | List(Nil) if !mods.isCase && !ctorMods.hasFlag(AccessFlags) => + case _ => vparamss foreach printConstrParams + } + parents + case _ => + // Can get here with erroneous code, like `{@deprecatedName ` + Nil } - parents } // get trees without default classes and traits (when they are last) diff --git a/src/repl-frontend/scala/tools/nsc/interpreter/jline/Reader.scala b/src/repl-frontend/scala/tools/nsc/interpreter/jline/Reader.scala index 7302966ac16d..1b472935a9fe 100644 --- a/src/repl-frontend/scala/tools/nsc/interpreter/jline/Reader.scala +++ b/src/repl-frontend/scala/tools/nsc/interpreter/jline/Reader.scala @@ -14,15 +14,17 @@ package scala.tools.nsc.interpreter package jline import org.jline.builtins.InputRC +import org.jline.keymap.KeyMap import org.jline.reader.Parser.ParseContext import org.jline.reader._ -import org.jline.reader.impl.{DefaultParser, LineReaderImpl} +import org.jline.reader.impl.{CompletionMatcherImpl, DefaultParser, LineReaderImpl} import org.jline.terminal.Terminal import java.io.{ByteArrayInputStream, File} import java.net.{MalformedURLException, URL} import java.util.{List => JList} import scala.io.Source +import scala.reflect.internal.Chars import scala.tools.nsc.interpreter.shell.{Accumulator, ShellConfig} import scala.util.Using import scala.util.control.NonFatal @@ -122,17 +124,67 @@ object Reader { .variable(SECONDARY_PROMPT_PATTERN, config.encolor(config.continueText)) // Continue prompt .variable(WORDCHARS, LineReaderImpl.DEFAULT_WORDCHARS.filterNot("*?.[]~=/&;!#%^(){}<>".toSet)) .option(Option.DISABLE_EVENT_EXPANSION, true) // Otherwise `scala> println(raw"\n".toList)` gives `List(n)` !! + .option(Option.COMPLETE_MATCHER_CAMELCASE, true) + .option(Option.COMPLETE_MATCHER_TYPO, true) } + object customCompletionMatcher extends CompletionMatcherImpl { + override def compile(options: java.util.Map[LineReader.Option, java.lang.Boolean], prefix: Boolean, line: CompletingParsedLine, caseInsensitive: Boolean, errors: Int, originalGroupName: String): Unit = { + val errorsReduced = line.wordCursor() match { + case 0 | 1 | 2 | 3 => 0 // disable JLine's levenshtein-distance based typo matcher for short strings + case 4 | 5 => math.max(errors, 1) + case _ => errors + } + super.compile(options, prefix, line, caseInsensitive, errorsReduced, originalGroupName) + } + + override def matches(candidates: JList[Candidate]): JList[Candidate] = { + val matching = super.matches(candidates) + matching + } + } + + builder.completionMatcher(customCompletionMatcher) val reader = builder.build() try inputrcFileContents.foreach(f => InputRC.configure(reader, new ByteArrayInputStream(f))) catch { case NonFatal(_) => } //ignore + + val keyMap = reader.getKeyMaps.get("main") + + object ScalaShowType { + val Name = "scala-show-type" + private var lastInvokeLocation: Option[(String, Int)] = None + def apply(): Boolean = { + val nextInvokeLocation = Some((reader.getBuffer.toString, reader.getBuffer.cursor())) + val cursor = reader.getBuffer.cursor() + val text = reader.getBuffer.toString + val result = completer.complete(text, cursor, filter = true) + if (lastInvokeLocation == nextInvokeLocation) { + show(Naming.unmangle(result.typedTree)) + lastInvokeLocation = None + } else { + show(result.typeAtCursor) + lastInvokeLocation = nextInvokeLocation + } + true + } + def show(text: String): Unit = { + reader.getTerminal.writer.println() + reader.getTerminal.writer.println(text) + reader.callWidget(LineReader.REDRAW_LINE) + reader.callWidget(LineReader.REDISPLAY) + reader.getTerminal.flush() + } + } + reader.getWidgets().put(ScalaShowType.Name, () => ScalaShowType()) + locally { import LineReader._ // VIINS, VICMD, EMACS val keymap = if (config.viMode) VIINS else EMACS reader.getKeyMaps.put(MAIN, reader.getKeyMaps.get(keymap)); + keyMap.bind(new Reference(ScalaShowType.Name), KeyMap.ctrl('T')) } def secure(p: java.nio.file.Path): Unit = { try scala.reflect.internal.util.OwnerOnlyChmod.chmodFileOrCreateEmpty(p) @@ -201,6 +253,12 @@ object Reader { val (wordCursor, wordIndex) = current match { case Some(t) if t.isIdentifier => (cursor - t.start, tokens.indexOf(t)) + case Some(t) => + val isIdentifierStartKeyword = (t.start until t.end).forall(i => Chars.isIdentifierPart(line.charAt(i))) + if (isIdentifierStartKeyword) + (cursor - t.start, tokens.indexOf(t)) + else + (0, -1) case _ => (0, -1) } @@ -259,45 +317,50 @@ object Reader { class Completion(delegate: shell.Completion) extends shell.Completion with Completer { require(delegate != null) // REPL Completion - def complete(buffer: String, cursor: Int): shell.CompletionResult = delegate.complete(buffer, cursor) + def complete(buffer: String, cursor: Int, filter: Boolean): shell.CompletionResult = delegate.complete(buffer, cursor, filter) // JLine Completer def complete(lineReader: LineReader, parsedLine: ParsedLine, newCandidates: JList[Candidate]): Unit = { - def candidateForResult(line: String, cc: CompletionCandidate): Candidate = { - val value = if (line.startsWith(":")) ":" + cc.defString else cc.defString - val displayed = cc.defString + (cc.arity match { + def candidateForResult(cc: CompletionCandidate, deprecated: Boolean, universal: Boolean): Candidate = { + val value = cc.name + val displayed = cc.name + (cc.arity match { case CompletionCandidate.Nullary => "" case CompletionCandidate.Nilary => "()" case _ => "(" }) val group = null // results may be grouped val descr = // displayed alongside - if (cc.isDeprecated) "deprecated" - else if (cc.isUniversal) "universal" + if (deprecated) "deprecated" + else if (universal) "universal" else null val suffix = null // such as slash after directory name val key = null // same key implies mergeable result val complete = false // more to complete? new Candidate(value, displayed, group, descr, suffix, key, complete) } - val result = complete(parsedLine.line, parsedLine.cursor) - result.candidates.map(_.defString) match { - // the presence of the empty string here is a signal that the symbol - // is already complete and so instead of completing, we want to show - // the user the method signature. there are various JLine 3 features - // one might use to do this instead; sticking to basics for now - case "" :: defStrings if defStrings.nonEmpty => - // specifics here are cargo-culted from Ammonite - lineReader.getTerminal.writer.println() - for (cc <- result.candidates.tail) - lineReader.getTerminal.writer.println(cc.defString) - lineReader.callWidget(LineReader.REDRAW_LINE) - lineReader.callWidget(LineReader.REDISPLAY) - lineReader.getTerminal.flush() - // normal completion - case _ => - for (cc <- result.candidates) - newCandidates.add(candidateForResult(result.line, cc)) + val result = complete(parsedLine.line, parsedLine.cursor, filter = false) + for (group <- result.candidates.groupBy(_.name)) { + // scala/bug#12238 + // Currently, only when all methods are Deprecated should they be displayed `Deprecated` to users. Only handle result of PresentationCompilation#toCandidates. + // We don't handle result of PresentationCompilation#defStringCandidates, because we need to show the deprecated here. + val allDeprecated = group._2.forall(_.isDeprecated) + val allUniversal = group._2.forall(_.isUniversal) + group._2.foreach(cc => newCandidates.add(candidateForResult(cc, allDeprecated, allUniversal))) + } + + val parsedLineWord = parsedLine.word() + result.candidates.filter(_.name == parsedLineWord) match { + case Nil => + case exacts => + val declStrings = exacts.map(_.declString()).filterNot(_ == "") + if (declStrings.nonEmpty) { + lineReader.getTerminal.writer.println() + for (declString <- declStrings) + lineReader.getTerminal.writer.println(declString) + lineReader.callWidget(LineReader.REDRAW_LINE) + lineReader.callWidget(LineReader.REDISPLAY) + lineReader.getTerminal.flush() + } } } } diff --git a/src/repl-frontend/scala/tools/nsc/interpreter/shell/Completion.scala b/src/repl-frontend/scala/tools/nsc/interpreter/shell/Completion.scala index 17f8c72eb57e..389dd194e824 100644 --- a/src/repl-frontend/scala/tools/nsc/interpreter/shell/Completion.scala +++ b/src/repl-frontend/scala/tools/nsc/interpreter/shell/Completion.scala @@ -14,22 +14,23 @@ package scala.tools.nsc.interpreter package shell trait Completion { - def complete(buffer: String, cursor: Int): CompletionResult + final def complete(buffer: String, cursor: Int): CompletionResult = complete(buffer, cursor, filter = true) + def complete(buffer: String, cursor: Int, filter: Boolean): CompletionResult } object NoCompletion extends Completion { - def complete(buffer: String, cursor: Int) = NoCompletions + def complete(buffer: String, cursor: Int, filter: Boolean) = NoCompletions } -case class CompletionResult(line: String, cursor: Int, candidates: List[CompletionCandidate]) { +case class CompletionResult(line: String, cursor: Int, candidates: List[CompletionCandidate], typeAtCursor: String = "", typedTree: String = "") { final def orElse(other: => CompletionResult): CompletionResult = if (candidates.nonEmpty) this else other } object CompletionResult { val empty: CompletionResult = NoCompletions } -object NoCompletions extends CompletionResult("", -1, Nil) +object NoCompletions extends CompletionResult("", -1, Nil, "", "") case class MultiCompletion(underlying: Completion*) extends Completion { - override def complete(buffer: String, cursor: Int) = - underlying.foldLeft(CompletionResult.empty)((r, c) => r.orElse(c.complete(buffer, cursor))) + override def complete(buffer: String, cursor: Int, filter: Boolean) = + underlying.foldLeft(CompletionResult.empty)((r,c) => r.orElse(c.complete(buffer, cursor, filter))) } diff --git a/src/repl-frontend/scala/tools/nsc/interpreter/shell/ILoop.scala b/src/repl-frontend/scala/tools/nsc/interpreter/shell/ILoop.scala index aece63c03b50..8f51bc84e691 100644 --- a/src/repl-frontend/scala/tools/nsc/interpreter/shell/ILoop.scala +++ b/src/repl-frontend/scala/tools/nsc/interpreter/shell/ILoop.scala @@ -228,7 +228,7 @@ class ILoop(config: ShellConfig, inOverride: BufferedReader = null, .map(d => CompletionResult(buffer, i, d.toDirectory.list.map(x => CompletionCandidate(x.name)).toList)) .getOrElse(NoCompletions) def listedIn(dir: Directory, name: String) = dir.list.filter(_.name.startsWith(name)).map(_.name).toList - def complete(buffer: String, cursor: Int): CompletionResult = + def complete(buffer: String, cursor: Int, filter: Boolean): CompletionResult = buffer.substring(0, cursor) match { case emptyWord(s) => listed(buffer, cursor, Directory.Current) case directorily(s) => listed(buffer, cursor, Option(Path(s))) @@ -247,13 +247,13 @@ class ILoop(config: ShellConfig, inOverride: BufferedReader = null, // complete settings name val settingsCompletion: Completion = new Completion { val trailingWord = """(\S+)$""".r.unanchored - def complete(buffer: String, cursor: Int): CompletionResult = { + def complete(buffer: String, cursor: Int, filter: Boolean): CompletionResult = { buffer.substring(0, cursor) match { case trailingWord(s) => - val maybes = intp.visibleSettings.filter(_.name.startsWith(s)).map(_.name) + val maybes = intp.visibleSettings.filter(x => if (filter) x.name.startsWith(s) else true).map(_.name) .filterNot(cond(_) { case "-"|"-X"|"-Y" => true }).sorted if (maybes.isEmpty) NoCompletions - else CompletionResult(buffer, cursor - s.length, maybes.map(CompletionCandidate(_))) + else CompletionResult(buffer, cursor - s.length, maybes.map(CompletionCandidate(_)), "", "") case _ => NoCompletions } } @@ -541,8 +541,8 @@ class ILoop(config: ShellConfig, inOverride: BufferedReader = null, MultiCompletion(shellCompletion, rc) } val shellCompletion = new Completion { - override def complete(buffer: String, cursor: Int) = - if (buffer.startsWith(":")) colonCompletion(buffer, cursor).complete(buffer, cursor) + override def complete(buffer: String, cursor: Int, filter: Boolean) = + if (buffer.startsWith(":")) colonCompletion(buffer, cursor).complete(buffer, cursor, filter) else NoCompletions } @@ -554,13 +554,13 @@ class ILoop(config: ShellConfig, inOverride: BufferedReader = null, // condition here is a bit weird because of the weird hack we have where // the first candidate having an empty defString means it's not really // completion, but showing the method signature instead - if (candidates.headOption.exists(_.defString.nonEmpty)) { + if (candidates.headOption.exists(_.name.nonEmpty)) { val prefix = if (completions == NoCompletions) "" else what.substring(0, completions.cursor) // hvesalai (emacs sbt-mode maintainer) says it's important to echo only once and not per-line echo( - candidates.map(c => s"[completions] $prefix${c.defString}") + candidates.map(c => s"[completions] $prefix${c.name}") .mkString("\n") ) } diff --git a/src/repl-frontend/scala/tools/nsc/interpreter/shell/LoopCommands.scala b/src/repl-frontend/scala/tools/nsc/interpreter/shell/LoopCommands.scala index 1063971b5f2b..07c9b8da8d95 100644 --- a/src/repl-frontend/scala/tools/nsc/interpreter/shell/LoopCommands.scala +++ b/src/repl-frontend/scala/tools/nsc/interpreter/shell/LoopCommands.scala @@ -14,7 +14,6 @@ package scala.tools.nsc.interpreter package shell import java.io.{PrintWriter => JPrintWriter} - import scala.language.implicitConversions import scala.collection.mutable.ListBuffer import scala.tools.nsc.interpreter.ReplStrings.words @@ -60,6 +59,7 @@ trait LoopCommands { // subclasses may provide completions def completion: Completion = NoCompletion + override def toString(): String = name } object LoopCommand { def nullary(name: String, help: String, f: () => Result): LoopCommand = @@ -91,6 +91,10 @@ trait LoopCommands { echo("All commands can be abbreviated, e.g., :he instead of :help.") for (cmd <- commands) echo(formatStr.format(cmd.usageMsg, cmd.help)) + echo("") + echo("Useful default key bindings:") + echo(" TAB code completion") + echo(" CTRL-SHIFT-T type at cursor, hit again to see the code with all types/implicits inferred.") } def ambiguousError(cmd: String): Result = { matchingCommands(cmd) match { @@ -135,15 +139,15 @@ trait LoopCommands { case cmd :: Nil if !cursorAtName => cmd.completion case cmd :: Nil if cmd.name == name => NoCompletion case cmd :: Nil => - val completion = if (cmd.isInstanceOf[NullaryCmd] || cursor < line.length) cmd.name else cmd.name + " " + val completion = ":" + cmd.name new Completion { - def complete(buffer: String, cursor: Int) = - CompletionResult(buffer, cursor = 1, List(CompletionCandidate(completion))) + def complete(buffer: String, cursor: Int, filter: Boolean) = + CompletionResult(buffer, cursor = 1, List(CompletionCandidate(completion)), "", "") } case cmd :: rest => new Completion { - def complete(buffer: String, cursor: Int) = - CompletionResult(buffer, cursor = 1, cmds.map(cmd => CompletionCandidate(cmd.name))) + def complete(buffer: String, cursor: Int, filter: Boolean) = + CompletionResult(buffer, cursor = 1, cmds.map(cmd => CompletionCandidate(":" + cmd.name)), "", "") } } case _ => NoCompletion diff --git a/src/repl-frontend/scala/tools/nsc/interpreter/shell/ReplCompletion.scala b/src/repl-frontend/scala/tools/nsc/interpreter/shell/ReplCompletion.scala index afbc38103e4d..6aedd90048dc 100644 --- a/src/repl-frontend/scala/tools/nsc/interpreter/shell/ReplCompletion.scala +++ b/src/repl-frontend/scala/tools/nsc/interpreter/shell/ReplCompletion.scala @@ -19,7 +19,7 @@ import scala.util.control.NonFatal */ class ReplCompletion(intp: Repl, val accumulator: Accumulator = new Accumulator) extends Completion { - def complete(buffer: String, cursor: Int): CompletionResult = { + def complete(buffer: String, cursor: Int, filter: Boolean): CompletionResult = { // special case for: // // scala> 1 @@ -30,13 +30,13 @@ class ReplCompletion(intp: Repl, val accumulator: Accumulator = new Accumulator) val bufferWithMultiLine = accumulator.toString + bufferWithVar val cursor1 = cursor + (bufferWithMultiLine.length - buffer.length) - codeCompletion(bufferWithMultiLine, cursor1) + codeCompletion(bufferWithMultiLine, cursor1, filter) } // A convenience for testing def complete(before: String, after: String = ""): CompletionResult = complete(before + after, before.length) - private def codeCompletion(buf: String, cursor: Int): CompletionResult = { + private def codeCompletion(buf: String, cursor: Int, filter: Boolean): CompletionResult = { require(cursor >= 0 && cursor <= buf.length) // secret handshakes @@ -49,37 +49,24 @@ class ReplCompletion(intp: Repl, val accumulator: Accumulator = new Accumulator) case Right(result) => try { buf match { case slashPrint() if cursor == buf.length => - CompletionResult(buf, cursor, CompletionCandidate.fromStrings("" :: Naming.unmangle(result.print) :: Nil)) + CompletionResult(buf, cursor, CompletionCandidate.fromStrings("" :: Naming.unmangle(result.print) :: Nil), "", "") case slashPrintRaw() if cursor == buf.length => - CompletionResult(buf, cursor, CompletionCandidate.fromStrings("" :: result.print :: Nil)) + CompletionResult(buf, cursor, CompletionCandidate.fromStrings("" :: result.print :: Nil), "", "") case slashTypeAt(start, end) if cursor == buf.length => - CompletionResult(buf, cursor, CompletionCandidate.fromStrings("" :: result.typeAt(start.toInt, end.toInt) :: Nil)) + CompletionResult(buf, cursor, CompletionCandidate.fromStrings("" :: result.typeAt(start.toInt, end.toInt) :: Nil), "", "") case _ => // under JLine 3, we no longer use the tabCount concept, so tabCount is always 1 // which always gives us all completions - val (c, r) = result.completionCandidates(tabCount = 1) - // scala/bug#12238 - // Currently, only when all methods are Deprecated should they be displayed `Deprecated` to users. Only handle result of PresentationCompilation#toCandidates. - // We don't handle result of PresentationCompilation#defStringCandidates, because we need to show the deprecated here. - if (r.nonEmpty && r.forall(!_.defString.startsWith("def"))) { - val groupByDef = r.groupBy(_.defString) - val allOverrideIsUniversal = groupByDef.filter(f => f._2.forall(_.isUniversal)).keySet - val allOverrideIsDeprecated = groupByDef.filter(f => f._2.forall(_.isDeprecated)).keySet - def isOverrideMethod(candidate: CompletionCandidate): Boolean = groupByDef(candidate.defString).size > 1 - val rewriteDecr = r.map(candidate => { - // If not all overloaded methods are deprecated, but they are overloaded methods, they (all) should be set to false. - val isUniv = if (!allOverrideIsUniversal.contains(candidate.defString) && isOverrideMethod(candidate)) false else candidate.isUniversal - val isDepr = if (!allOverrideIsDeprecated.contains(candidate.defString) && isOverrideMethod(candidate)) false else candidate.isDeprecated - candidate.copy(isUniversal = isUniv, isDeprecated = isDepr) - }) - CompletionResult(buf, c, rewriteDecr) - } else CompletionResult(buf, c, r) + val (c, r) = result.completionCandidates(filter, tabCount = 1) + val typeAtCursor = result.typeAt(cursor, cursor) + CompletionResult(buf, c, r, typeAtCursor, result.print) } } finally result.cleanup() } } catch { case NonFatal(e) => - // e.printStackTrace() + if (intp.settings.debug) + e.printStackTrace() NoCompletions } } diff --git a/src/repl/scala/tools/nsc/interpreter/IMain.scala b/src/repl/scala/tools/nsc/interpreter/IMain.scala index 70ca0d8e227d..b3e12067e26b 100644 --- a/src/repl/scala/tools/nsc/interpreter/IMain.scala +++ b/src/repl/scala/tools/nsc/interpreter/IMain.scala @@ -787,9 +787,12 @@ class IMain(val settings: Settings, parentClassLoaderOverride: Option[ClassLoade // The source file contents only has the code originally input by the user, // with unit's body holding the synthetic trees. // When emitting errors, be careful not to refer to the synthetic code - private val unit = new CompilationUnit(new BatchSourceFile(if (synthetic) "" else label, line)) + // pad with a trailing " " so that the synthetic position for enclosing trees does not exactly coincide with the + // position of the user-written code, these seems to confuse the presentation compiler. + private val paddedLine = line + " " + private val unit = new CompilationUnit(new BatchSourceFile(if (synthetic) "" else label, paddedLine)) // a dummy position used for synthetic trees (needed for pres compiler to locate the trees for user input) - private val wholeUnit = Position.range(unit.source, 0, 0, line.length) + private val wholeUnit = Position.range(unit.source, 0, 0, paddedLine.length) private def storeInVal(tree: Tree): Tree = { val resName = newTermName(if (synthetic) freshInternalVarName() else freshUserVarName()) @@ -797,15 +800,17 @@ class IMain(val settings: Settings, parentClassLoaderOverride: Option[ClassLoade } // Wrap last tree in a valdef to give user a nice handle for it (`resN`) - val trees: List[Tree] = origTrees.init :+ { - val tree = origTrees.last - @tailrec def loop(scrut: Tree): Tree = scrut match { - case _: Assign => tree - case _: RefTree | _: TermTree => storeInVal(tree) - case Annotated(_, arg) => loop(arg) - case _ => tree - } - loop(tree) + val trees: List[Tree] = origTrees match { + case init :+ tree => + @tailrec def loop(scrut: Tree): Tree = scrut match { + case _: Assign => tree + case _: RefTree | _: TermTree => storeInVal(tree) + case Annotated(_, arg) => loop(arg) + case _ => tree + } + init :+ loop(tree) + case xs => + xs // can get here in completion of erroneous code } /** handlers for each tree in this request */ @@ -889,13 +894,13 @@ class IMain(val settings: Settings, parentClassLoaderOverride: Option[ClassLoade else ModuleDef(NoMods, readName, wrapperTempl)) if (isClassBased) - stats += q"""object $readName { val INSTANCE = new ${tq"""${readName.toTypeName}"""} }""" + stats += atPos(wholeUnit.focus)(q"""object $readName { val INSTANCE = new ${tq"""${readName.toTypeName}"""} }""") val unspliced = PackageDef(atPos(wholeUnit.focus)(Ident(lineRep.packageName)), stats.toList) unit.body = spliceUserCode.transform(unspliced) unit.encounteredXml(firstXmlPos) -// settings.Xprintpos.value = true + // settings.Xprintpos.value = true showCode(asCompactString(unit.body)) unit diff --git a/src/repl/scala/tools/nsc/interpreter/Interface.scala b/src/repl/scala/tools/nsc/interpreter/Interface.scala index 73f27ed749e9..790750daf367 100644 --- a/src/repl/scala/tools/nsc/interpreter/Interface.scala +++ b/src/repl/scala/tools/nsc/interpreter/Interface.scala @@ -323,21 +323,24 @@ trait PresentationCompilationResult { def candidates(tabCount: Int): (Int, List[String]) = completionCandidates(tabCount) match { case (cursor, cands) => - (cursor, cands.map(_.defString)) + (cursor, cands.map(_.name)) } - def completionCandidates(tabCount: Int = -1): (Int, List[CompletionCandidate]) + final def completionCandidates(tabCount: Int = -1): (Int, List[CompletionCandidate]) = completionCandidates(filter = true, tabCount) + def completionCandidates(filter: Boolean, tabCount: Int): (Int, List[CompletionCandidate]) } case class CompletionCandidate( - defString: String, + name: String, arity: CompletionCandidate.Arity = CompletionCandidate.Nullary, isDeprecated: Boolean = false, - isUniversal: Boolean = false) + isUniversal: Boolean = false, + declString: () => String = () => "") object CompletionCandidate { sealed trait Arity case object Nullary extends Arity case object Nilary extends Arity + case object Infix extends Arity case object Other extends Arity // purely for convenience def fromStrings(defStrings: List[String]): List[CompletionCandidate] = diff --git a/src/repl/scala/tools/nsc/interpreter/PresentationCompilation.scala b/src/repl/scala/tools/nsc/interpreter/PresentationCompilation.scala index 91df89362548..04e1f790afb5 100644 --- a/src/repl/scala/tools/nsc/interpreter/PresentationCompilation.scala +++ b/src/repl/scala/tools/nsc/interpreter/PresentationCompilation.scala @@ -12,7 +12,9 @@ package scala.tools.nsc.interpreter -import scala.reflect.internal.util.{Position, RangePosition, StringOps} +import scala.collection.mutable +import scala.reflect.internal.util.{Position, RangePosition} +import scala.tools.nsc.ast.parser.Tokens import scala.tools.nsc.backend.JavaPlatform import scala.tools.nsc.util.ClassPath import scala.tools.nsc.{Settings, interactive} @@ -22,7 +24,7 @@ import scala.tools.nsc.interpreter.Results.{Error, Result} trait PresentationCompilation { self: IMain => - private final val Cursor = IMain.DummyCursorFragment + " " + private final val Cursor = IMain.DummyCursorFragment /** Typecheck a line of REPL input, suitably wrapped with "interpreter wrapper" objects/classes, with the * presentation compiler. The result of this method gives access to the typechecked tree and to autocompletion @@ -34,8 +36,28 @@ trait PresentationCompilation { self: IMain => if (global == null) Left(Error) else { val pc = newPresentationCompiler() - val line1 = buf.patch(cursor, Cursor, 0) - val trees = pc.newUnitParser(line1).parseStats() + def cursorIsInKeyword(): Boolean = { + val scanner = pc.newUnitParser(buf).newScanner() + scanner.init() + while (scanner.token != Tokens.EOF) { + val token = scanner.token + val o = scanner.offset + scanner.nextToken() + if ((o to scanner.lastOffset).contains(cursor)) { + return (!Tokens.isIdentifier(token) && pc.syntaxAnalyzer.token2name.contains(token)) + } + } + false + } + // Support completion of "def format = 42; for" by replacing the keyword with foo_CURSOR_ before + // typechecking. Only do this when needed to be able ot correctly return the type of `foo.bar` + // where `bar` is the complete name of a member. + val line1 = if (!cursorIsInKeyword()) buf else buf.patch(cursor, Cursor, 0) + + val trees = pc.newUnitParser(line1).parseStats() match { + case Nil => List(pc.EmptyTree) + case xs => xs + } val importer = global.mkImporter(pc) //println(s"pc: [[$line1]], <<${trees.size}>>") val request = new Request(line1, trees map (t => importer.importTree(t)), generousImports = true) @@ -89,8 +111,6 @@ trait PresentationCompilation { self: IMain => interactiveGlobal } - private var lastCommonPrefixCompletion: Option[String] = None - abstract class PresentationCompileResult(val compiler: interactive.Global, val inputRange: Position, val cursor: Int, val buf: String) extends PresentationCompilationResult { val unit: compiler.RichCompilationUnit // depmet broken for constructors, can't be ctor arg @@ -120,15 +140,23 @@ trait PresentationCompilation { self: IMain => } } - def typeString(tree: compiler.Tree): String = - compiler.exitingTyper(tree.tpe.toString) + def typeString(tree: compiler.Tree): String = { + tree.tpe match { + case null | compiler.NoType | compiler.ErrorType => "" + case tp => compiler.exitingTyper(tp.toString) + } + } def treeString(tree: compiler.Tree): String = compiler.showCode(tree) override def print = { val tree = treeAt(inputRange) - treeString(tree) + " // : " + tree.tpe.safeToString + val tpString = typeString(tree) match { + case "" => "" + case s => " // : " + s + } + treeString(tree) + tpString } @@ -138,7 +166,7 @@ trait PresentationCompilation { self: IMain => val NoCandidates = (-1, Nil) type Candidates = (Int, List[CompletionCandidate]) - override def completionCandidates(tabCount: Int): Candidates = { + override def completionCandidates(filter: Boolean, tabCount: Int): Candidates = { import compiler._ import CompletionResult.NoResults @@ -161,76 +189,56 @@ trait PresentationCompilation { self: IMain => if (m.sym.paramss.isEmpty) CompletionCandidate.Nullary else if (m.sym.paramss.size == 1 && m.sym.paramss.head.isEmpty) CompletionCandidate.Nilary else CompletionCandidate.Other - def defStringCandidates(matching: List[Member], name: Name, isNew: Boolean): Candidates = { + def defStringCandidates(matching: List[Member], name: Name, isNew: Boolean) = { + val seen = new mutable.HashSet[Symbol]() val ccs = for { member <- matching - if member.symNameDropLocal == name + if seen.add(member.sym) sym <- if (member.sym.isClass && isNew) member.sym.info.decl(nme.CONSTRUCTOR).alternatives else member.sym.alternatives sugared = sym.sugaredSymbolOrSelf } yield { - val tp = member.prefix memberType sym - val desc = Seq(if (isMemberDeprecated(member)) "(deprecated)" else "", if (isMemberUniversal(member)) "(universal)" else "") - val methodOtherDesc = if (!desc.exists(_ != "")) "" else " " + desc.filter(_ != "").mkString(" ") CompletionCandidate( - defString = sugared.defStringSeenAs(tp) + methodOtherDesc, + name = member.symNameDropLocal.decoded, arity = memberArity(member), isDeprecated = isMemberDeprecated(member), - isUniversal = isMemberUniversal(member)) + isUniversal = isMemberUniversal(member), + declString = () => { + if (sym.isPackageObjectOrClass) "" + else { + val tp = member.prefix memberType sym + val desc = Seq(if (isMemberDeprecated(member)) "(deprecated)" else "", if (isMemberUniversal(member)) "(universal)" else "") + val methodOtherDesc = if (!desc.exists(_ != "")) "" else " " + desc.filter(_ != "").mkString(" ") + sugared.defStringSeenAs(tp) + methodOtherDesc + } + }) } - (cursor, CompletionCandidate("") :: ccs.distinct) + ccs } - def toCandidates(members: List[Member]): List[CompletionCandidate] = - members - .map(m => CompletionCandidate(m.symNameDropLocal.decoded, memberArity(m), isMemberDeprecated(m), isMemberUniversal(m))) - .sortBy(_.defString) val found = this.completionsAt(cursor) match { case NoResults => NoCandidates case r => def shouldHide(m: Member): Boolean = - tabCount == 0 && (isMemberDeprecated(m) || isMemberUniversal(m)) - val matching = r.matchingResults().filterNot(shouldHide) - val tabAfterCommonPrefixCompletion = lastCommonPrefixCompletion.contains(buf.substring(inputRange.start, cursor)) && matching.exists(_.symNameDropLocal == r.name) - val doubleTab = tabCount > 0 && matching.forall(_.symNameDropLocal == r.name) - if (tabAfterCommonPrefixCompletion || doubleTab) { - val pos1 = positionOf(cursor) - import compiler._ - val locator = new Locator(pos1) - val tree = locator locateIn unit.body - var isNew = false - new TreeStackTraverser { - override def traverse(t: Tree): Unit = { - if (t eq tree) { - isNew = path.dropWhile { case _: Select | _: Annotated => true; case _ => false}.headOption match { - case Some(_: New) => true - case _ => false - } - } else super.traverse(t) - } - }.traverse(unit.body) - defStringCandidates(matching, r.name, isNew) - } else if (matching.isEmpty) { - // Lenient matching based on camel case and on eliding JavaBean "get" / "is" boilerplate - val camelMatches: List[Member] = r.matchingResults(CompletionResult.camelMatch(_)).filterNot(shouldHide) - val memberCompletions: List[CompletionCandidate] = toCandidates(camelMatches) - def allowCompletion = ( - (memberCompletions.size == 1) - || CompletionResult.camelMatch(r.name)(r.name.newName(StringOps.longestCommonPrefix(memberCompletions.map(_.defString)))) - ) - if (memberCompletions.isEmpty) NoCandidates - else if (allowCompletion) (cursor - r.positionDelta, memberCompletions) - else (cursor, CompletionCandidate("") :: memberCompletions) - } else if (matching.nonEmpty && matching.forall(_.symNameDropLocal == r.name)) - NoCandidates // don't offer completion if the only option has been fully typed already - else { - // regular completion - (cursor - r.positionDelta, toCandidates(matching)) - } + filter && tabCount == 0 && (isMemberDeprecated(m) || isMemberUniversal(m)) + val matching = r.matchingResults(nameMatcher = if (filter) {entered => candidate => candidate.startsWith(entered)} else _ => _ => true).filterNot(shouldHide) + val pos1 = positionOf(cursor) + import compiler._ + val locator = new Locator(pos1) + val tree = locator locateIn unit.body + var isNew = false + new TreeStackTraverser { + override def traverse(t: Tree): Unit = { + if (t eq tree) { + isNew = path.dropWhile { case _: Select | _: Annotated => true; case _ => false}.headOption match { + case Some(_: New) => true + case _ => false + } + } else super.traverse(t) + } + }.traverse(unit.body) + val candidates = defStringCandidates(matching, r.name, isNew) + val pos = cursor - r.positionDelta + (pos, candidates.sortBy(_.name)) } - lastCommonPrefixCompletion = - if (found != NoCandidates && buf.length >= found._1) - Some(buf.substring(inputRange.start, found._1) + StringOps.longestCommonPrefix(found._2.map(_.defString))) - else - None found } diff --git a/test/files/run/repl-completions.check b/test/files/run/repl-completions.check index 90d463fdf751..224c7b7e3155 100644 --- a/test/files/run/repl-completions.check +++ b/test/files/run/repl-completions.check @@ -9,6 +9,7 @@ scala> :completions O.x [completions] O.x_y_z scala> :completions O.x_y_x +[completions] O.x_y_x scala> :completions O.x_y_a @@ -27,6 +28,6 @@ scala> :completions object O2 { val x = O. [completions] object O2 { val x = O.x_y_z scala> :completions :completion -[completions] :completions +[completions] ::completions scala> :quit diff --git a/test/junit/scala/tools/nsc/interpreter/CompletionTest.scala b/test/junit/scala/tools/nsc/interpreter/CompletionTest.scala index 870b9e987bb1..d37fad76419d 100644 --- a/test/junit/scala/tools/nsc/interpreter/CompletionTest.scala +++ b/test/junit/scala/tools/nsc/interpreter/CompletionTest.scala @@ -50,7 +50,7 @@ class CompletionTest { } val acc = new Accumulator val shellCompletion = new Completion { - override def complete(buffer: String, cursor: Int) = + override def complete(buffer: String, cursor: Int, filter: Boolean) = if (buffer.startsWith(":")) new CommandMock().colonCompletion(buffer, cursor).complete(buffer, cursor) else NoCompletions } @@ -106,7 +106,7 @@ class CompletionTest { checkExact(completer, "asInstanceO", "", includeUniversal = true)("asInstanceOf") // Output is sorted - assertEquals(List("prefix_aaa", "prefix_nnn", "prefix_zzz"), completer.complete( """class C { def prefix_nnn = 0; def prefix_zzz = 0; def prefix_aaa = 0; prefix_""").candidates.filter(!_.isUniversal).map(_.defString)) + assertEquals(List("prefix_aaa", "prefix_nnn", "prefix_zzz"), completer.complete( """class C { def prefix_nnn = 0; def prefix_zzz = 0; def prefix_aaa = 0; prefix_""").candidates.filter(!_.isUniversal).map(_.name)) // Enable implicits to check completion enrichment checkExact(completer, """'c'.toU""")("toUpper") @@ -172,11 +172,9 @@ class CompletionTest { def defStringConstructor(): Unit = { val intp = newIMain() val completer = new ReplCompletion(intp) - checkExact(completer, "class Shazam(i: Int); new Shaza")("Shazam") - checkExact(completer, "class Shazam(i: Int); new Shazam")(EmptyString, "def (i: Int): Shazam") - - checkExact(completer, "class Shazam(i: Int) { def this(x: String) = this(0) }; new Shaza")("Shazam") - checkExact(completer, "class Shazam(i: Int) { def this(x: String) = this(0) }; new Shazam")(EmptyString, "def (i: Int): Shazam", "def (x: String): Shazam") + // : String to workaround https://github.com/scala/bug/issues/11964 + checkExact(completer, "class Shazam(i: Int); new Shazam", result = _.declString())("def (i: Int): Shazam" : String) + checkExact(completer, "class Shazam(i: Int) { def this(x: String) = this(0) }; new Shazam", result = _.declString())("def (i: Int): Shazam", "def (x: String): Shazam": String) } @Test @@ -212,7 +210,7 @@ class CompletionTest { | .map(_ + 1) /* then we do reverse */ | .rev""".stripMargin assertTrue( - completer.complete(withMultilineCommit).candidates.map(_.defString).contains("reverseMap") + completer.complete(withMultilineCommit).candidates.map(_.name).contains("reverseMap") ) val withInlineCommit = @@ -220,7 +218,7 @@ class CompletionTest { | .map(_ + 1) // then we do reverse | .rev""".stripMargin assertTrue( - completer.complete(withInlineCommit).candidates.map(_.defString).contains("reverseMap") + completer.complete(withInlineCommit).candidates.map(_.name).contains("reverseMap") ) } @@ -245,7 +243,9 @@ class CompletionTest { ) val candidates1 = completer.complete("Stale.ol").candidates assertEquals(2, candidates1.size) - assertEquals(candidates1.head.isDeprecated, false) + // Our JLine Reader is now responsible for only displaying @deprecated if all candidates with the name are + // deprecated. That isn't covered by this test. + assertEquals(candidates1.head.isDeprecated, true) assertEquals(candidates1.last.isDeprecated, false) } @@ -255,8 +255,8 @@ class CompletionTest { """object Stale { def oldie(i: Int) = ???; @deprecated("","") def oldie = ??? }""" ) val candidates1 = completer.complete("Stale.oldie").candidates - assertEquals(3, candidates1.size) - assertEquals(candidates1.filter(_.isDeprecated).map(_.defString.contains("deprecated")).head, true) + assertEquals(2, candidates1.size) + assertEquals(candidates1.filter(_.isDeprecated).map(_.declString().contains("deprecated")).head, true) assertEquals(candidates1.last.isDeprecated, false) } @@ -267,11 +267,11 @@ class CompletionTest { """object Stuff { @deprecated("","") def `this` = ??? ; @deprecated("","") def `that` = ??? }""" ) val candidates1 = completer.complete("Stale.oldie").candidates - assertEquals(2, candidates1.size) // When exactly matched, there is an empty character - assertTrue(candidates1.filter(_.defString.contains("oldie")).head.defString.contains("deprecated")) + assertEquals(1, candidates1.size) // When exactly matched, there is an empty character + assertTrue(candidates1.filter(_.declString().contains("oldie")).head.declString().contains("deprecated")) val candidates2 = completer.complete("Stuff.that").candidates - assertEquals(2, candidates2.size) - assertTrue(candidates2.filter(_.defString.contains("that")).head.defString.contains("deprecated")) + assertEquals(1, candidates2.size) + assertTrue(candidates2.filter(_.declString().contains("that")).head.declString().contains("deprecated")) } @Test @@ -301,9 +301,9 @@ class CompletionTest { """object A { class Type; object Term }""" ) val candidates1 = completer.complete("A.T").candidates - assertEquals("Term", candidates1.map(_.defString).mkString(" ")) + assertEquals("Term", candidates1.map(_.name).mkString(" ")) val candidates2 = completer.complete("import A.T").candidates - assertEquals("Term Type", candidates2.map(_.defString).sorted.mkString(" ")) + assertEquals("Term Type", candidates2.map(_.name).sorted.mkString(" ")) } @Test @@ -348,11 +348,12 @@ object Test2 { checkExact(completer, "test.Test.withoutParens.charA")("charAt") } - def checkExact(completer: Completion, before: String, after: String = "", includeUniversal: Boolean = false)(expected: String*): Unit = { - val actual = - completer.complete(before, after).candidates - .filter(c => includeUniversal || !c.isUniversal) - .map(_.defString) + def checkExact(completer: Completion, before: String, after: String = "", includeUniversal: Boolean = false, + result: CompletionCandidate => String = _.name)(expected: String*): Unit = { + val candidates = completer.complete(before, after).candidates + .filter(c => includeUniversal || !c.isUniversal) + val actual = candidates.map(result) assertEquals(expected.sorted.mkString(" "), actual.toSeq.distinct.sorted.mkString(" ")) } + } diff --git a/versions.properties b/versions.properties index 971b4a002731..0bb7a75f549e 100644 --- a/versions.properties +++ b/versions.properties @@ -9,5 +9,5 @@ starr.version=2.13.6 scala-asm.version=9.1.0-scala-1 # jna.version must be updated together with jline-terminal-jna -jline.version=3.19.0 +jline.version=3.20.0 jna.version=5.3.1 From 1b2f9b3e43556e62ca397c87d2758855d175ec46 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Sun, 6 Jun 2021 01:05:36 +1000 Subject: [PATCH 231/769] Make API / Type / AST help compat with multi-line iinput --- .../scala/tools/nsc/interpreter/jline/Reader.scala | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/repl-frontend/scala/tools/nsc/interpreter/jline/Reader.scala b/src/repl-frontend/scala/tools/nsc/interpreter/jline/Reader.scala index 1b472935a9fe..aa83d492122b 100644 --- a/src/repl-frontend/scala/tools/nsc/interpreter/jline/Reader.scala +++ b/src/repl-frontend/scala/tools/nsc/interpreter/jline/Reader.scala @@ -170,6 +170,7 @@ object Reader { true } def show(text: String): Unit = { + reader.callWidget(LineReader.CLEAR) reader.getTerminal.writer.println() reader.getTerminal.writer.println(text) reader.callWidget(LineReader.REDRAW_LINE) @@ -354,6 +355,7 @@ class Completion(delegate: shell.Completion) extends shell.Completion with Compl case exacts => val declStrings = exacts.map(_.declString()).filterNot(_ == "") if (declStrings.nonEmpty) { + lineReader.callWidget(LineReader.CLEAR) lineReader.getTerminal.writer.println() for (declString <- declStrings) lineReader.getTerminal.writer.println(declString) From f3bce2f3fca0f0a75bfb9c5a74dd971ae538b0eb Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Sun, 6 Jun 2021 01:16:24 +1000 Subject: [PATCH 232/769] Don't introduce synthetic val res = in REPL completion --- src/repl/scala/tools/nsc/interpreter/IMain.scala | 4 +++- .../scala/tools/nsc/interpreter/PresentationCompilation.scala | 2 +- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/src/repl/scala/tools/nsc/interpreter/IMain.scala b/src/repl/scala/tools/nsc/interpreter/IMain.scala index b3e12067e26b..099220d7cf4c 100644 --- a/src/repl/scala/tools/nsc/interpreter/IMain.scala +++ b/src/repl/scala/tools/nsc/interpreter/IMain.scala @@ -775,7 +775,8 @@ class IMain(val settings: Settings, parentClassLoaderOverride: Option[ClassLoade } /** One line of code submitted by the user for interpretation */ - class Request(val line: String, origTrees: List[Tree], firstXmlPos: Position = NoPosition, generousImports: Boolean = false, synthetic: Boolean = false) extends ReplRequest { + class Request(val line: String, origTrees: List[Tree], firstXmlPos: Position = NoPosition, + generousImports: Boolean = false, synthetic: Boolean = false, storeResultInVal: Boolean = true) extends ReplRequest { def defines = defHandlers flatMap (_.definedSymbols) def definesTermNames: List[String] = defines collect { case s: TermSymbol => s.decodedName.toString } def imports = importedSymbols @@ -801,6 +802,7 @@ class IMain(val settings: Settings, parentClassLoaderOverride: Option[ClassLoade // Wrap last tree in a valdef to give user a nice handle for it (`resN`) val trees: List[Tree] = origTrees match { + case xs if !storeResultInVal => xs case init :+ tree => @tailrec def loop(scrut: Tree): Tree = scrut match { case _: Assign => tree diff --git a/src/repl/scala/tools/nsc/interpreter/PresentationCompilation.scala b/src/repl/scala/tools/nsc/interpreter/PresentationCompilation.scala index 04e1f790afb5..609fd0619345 100644 --- a/src/repl/scala/tools/nsc/interpreter/PresentationCompilation.scala +++ b/src/repl/scala/tools/nsc/interpreter/PresentationCompilation.scala @@ -60,7 +60,7 @@ trait PresentationCompilation { self: IMain => } val importer = global.mkImporter(pc) //println(s"pc: [[$line1]], <<${trees.size}>>") - val request = new Request(line1, trees map (t => importer.importTree(t)), generousImports = true) + val request = new Request(line1, trees map (t => importer.importTree(t)), generousImports = true, storeResultInVal = false) val origUnit = request.mkUnit val unit = new pc.CompilationUnit(origUnit.source) unit.body = pc.mkImporter(global).importTree(origUnit.body) From b79b16bb7606ede8cc15005eecf94f3739e36a02 Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Sun, 6 Jun 2021 11:50:51 -0700 Subject: [PATCH 233/769] Test for fixed issue --- test/files/pos/t7745.scala | 38 ++++++++++++++++++++++++++++++++++++++ 1 file changed, 38 insertions(+) create mode 100644 test/files/pos/t7745.scala diff --git a/test/files/pos/t7745.scala b/test/files/pos/t7745.scala new file mode 100644 index 000000000000..d1b0ed9b2c8f --- /dev/null +++ b/test/files/pos/t7745.scala @@ -0,0 +1,38 @@ + +package bug + +import scala.language.implicitConversions + +class Base[T] + +class BaseOps[T] { + type OpT[U] = Op[T, U] // Fails below + //type OpT[U] = List[U] // OK + //trait OpT[U] extends Op[T, U] // OK + + def op(tgt: OpTarget[OpT]) = tgt +} + +object Base { + implicit def baseOps[T](b: Base[T]): BaseOps[T] = new BaseOps[T] +} + +class Op[A, B] + +class OpTarget[TC[_]] + +object OpTarget { + implicit def apply[TC[_]](a: Any): OpTarget[TC] = new OpTarget[TC] +} + +object TestBase { + val baseOps = new BaseOps[String] + baseOps.op(23) // OK in all cases + + val base = new Base[String] + base.op(23) // In the failing case: + // found : Int(23) + // required: shapeless.OpTarget[[U]shapeless.Op[String,U]] + // base.op(23) + // ^ +} From a8ec10d02c81f3eeb8f6787ba2c041bfec5d6221 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Mon, 7 Jun 2021 15:54:47 +1000 Subject: [PATCH 234/769] Fix intermittent auth failure to artifactory from Jenkins In 41e376a, the build was updated to support publishing from Travis CI. However, on Jenkins, the old means of supplying the publish credentials to pr-validation snapshots was retained, it has a ~/.credentials file. So we provided two credentials for the same host/realm to SBT, and on Jenkins the DirectCredentials contains an empty password. Which one of these would SBT pick? ``` sbt 'setupPublishCore https://scala-ci.typesafe.com/artifactory/scala-pr-validation-snapshots/' 'show credentials' [info] compiler / credentials [info] List(DirectCredentials("Artifactory Realm", "scala-ci.typesafe.com", "scala-ci", ****), FileCredentials("/Users/jz/.credentials")) [info] scalap / credentials ... <10 more like this> [info] credentials [info] List(DirectCredentials("Artifactory Realm", "scala-ci.typesafe.com", "scala-ci", ****)) ``` The `ivySbt` task in SBT registers the credentials in order in a global map in Ivy (`CredentialStore`). So on Jenkins, the invalid `DirectCredentials` would be overwritten in the map by he `FileCredentials`. But the fact that this is global state in Ivy appears to be a source of cross talk between the configured credentials for different modules in the build. Even though the publish task is serialized through the ivy lock, this lock does not enclose the previous execution of the `ivySbt` which sets up the credentials in `CredentialStore`. In our build, notice that the root project does _not_ have the `FileCredentials` set. So if the `ivySBT` task for this project runs last, the global map will have the incorrect `DirectCredentials`. The fix in our build is easy, avoid configuring the `DirectCredentials` if the environment variables are absent. We can also standardize on using `Global/credentials := `. The principled fix in SBT would be to thread the credentials down to the HTTP client without using global state. It could also emit a warning if conflicting credentials are configured for a given host/realm. --- build.sbt | 11 ++++++----- project/ScriptCommands.scala | 15 +++++++++++++-- 2 files changed, 19 insertions(+), 7 deletions(-) diff --git a/build.sbt b/build.sbt index ff6183de1c8b..af33ac8f0dec 100644 --- a/build.sbt +++ b/build.sbt @@ -54,12 +54,13 @@ val fatalWarnings = settingKey[Boolean]("whether or not warnings should be fatal // enable fatal warnings automatically on CI Global / fatalWarnings := insideCI.value +Global / credentials ++= { + val file = Path.userHome / ".credentials" + if (file.exists && !file.isDirectory) List(Credentials(file)) + else Nil +} + lazy val publishSettings : Seq[Setting[_]] = Seq( - credentials ++= { - val file = Path.userHome / ".credentials" - if (file.exists && !file.isDirectory) List(Credentials(file)) - else Nil - }, // Add a "default" Ivy configuration because sbt expects the Scala distribution to have one: ivyConfigurations += Configuration.of("Default", "default", "Default", true, Vector(Configurations.Runtime), true), publishMavenStyle := true diff --git a/project/ScriptCommands.scala b/project/ScriptCommands.scala index 82cc51f38561..973d23053218 100644 --- a/project/ScriptCommands.scala +++ b/project/ScriptCommands.scala @@ -107,7 +107,13 @@ object ScriptCommands { Global / baseVersionSuffix := "SPLIT", Global / resolvers += "scala-pr" at url, Global / publishTo := Some("sonatype-releases" at "https://oss.sonatype.org/service/local/staging/deploy/maven2"), - Global / credentials += Credentials("Sonatype Nexus Repository Manager", "oss.sonatype.org", env("SONA_USER"), env("SONA_PASS")) + Global / credentials ++= { + val user = env("SONA_USER") + val pass = env("SONA_PASS") + if (user != "" && pass != "") + List(Credentials("Sonatype Nexus Repository Manager", "oss.sonatype.org", user, pass)) + else Nil + } // pgpSigningKey and pgpPassphrase are set externally by travis / the bootstrap script, as the sbt-pgp plugin is not enabled by default ) ++ enableOptimizer } @@ -168,7 +174,12 @@ object ScriptCommands { Seq( Global / publishTo := Some("scala-pr-publish" at url2), - Global / credentials += Credentials("Artifactory Realm", "scala-ci.typesafe.com", "scala-ci", env("PRIVATE_REPO_PASS")) + Global / credentials ++= { + val pass = env("PRIVATE_REPO_PASS") + if (pass != "") + List(Credentials("Artifactory Realm", "scala-ci.typesafe.com", "scala-ci", pass)) + else Nil + } ) } From 0bcca094fac32f7e4e64de65c0a76e05f184e9ca Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Mon, 7 Jun 2021 15:54:47 +1000 Subject: [PATCH 235/769] [backport] Fix intermittent auth failure to artifactory from Jenkins In 41e376a, the build was updated to support publishing from Travis CI. However, on Jenkins, the old means of supplying the publish credentials to pr-validation snapshots was retained, it has a ~/.credentials file. So we provided two credentials for the same host/realm to SBT, and on Jenkins the DirectCredentials contains an empty password. Which one of these would SBT pick? ``` sbt 'setupPublishCore https://scala-ci.typesafe.com/artifactory/scala-pr-validation-snapshots/' 'show credentials' [info] compiler / credentials [info] List(DirectCredentials("Artifactory Realm", "scala-ci.typesafe.com", "scala-ci", ****), FileCredentials("/Users/jz/.credentials")) [info] scalap / credentials ... <10 more like this> [info] credentials [info] List(DirectCredentials("Artifactory Realm", "scala-ci.typesafe.com", "scala-ci", ****)) ``` The `ivySbt` task in SBT registers the credentials in order in a global map in Ivy (`CredentialStore`). So on Jenkins, the invalid `DirectCredentials` would be overwritten in the map by he `FileCredentials`. But the fact that this is global state in Ivy appears to be a source of cross talk between the configured credentials for different modules in the build. Even though the publish task is serialized through the ivy lock, this lock does not enclose the previous execution of the `ivySbt` which sets up the credentials in `CredentialStore`. In our build, notice that the root project does _not_ have the `FileCredentials` set. So if the `ivySBT` task for this project runs last, the global map will have the incorrect `DirectCredentials`. The fix in our build is easy, avoid configuring the `DirectCredentials` if the environment variables are absent. We can also standardize on using `Global/credentials := `. The principled fix in SBT would be to thread the credentials down to the HTTP client without using global state. It could also emit a warning if conflicting credentials are configured for a given host/realm. (cherry picked from commit a8ec10d02c81f3eeb8f6787ba2c041bfec5d6221) --- build.sbt | 6 ++++++ project/ScriptCommands.scala | 15 +++++++++++++-- 2 files changed, 19 insertions(+), 2 deletions(-) diff --git a/build.sbt b/build.sbt index 6024296ad517..e31bf14b7a51 100644 --- a/build.sbt +++ b/build.sbt @@ -56,6 +56,12 @@ val diffUtilsDep = "com.googlecode.java-diff-utils" % "diffutils" % " * real publishing should be done with sbt's standard `publish` task. */ lazy val publishDists = taskKey[Unit]("Publish to ./dists/maven-sbt.") +credentials in Global ++= { + val file = Path.userHome / ".credentials" + if (file.exists && !file.isDirectory) List(Credentials(file)) + else Nil +} + lazy val publishSettings : Seq[Setting[_]] = Seq( publishDists := { val artifacts = (packagedArtifacts in publish).value diff --git a/project/ScriptCommands.scala b/project/ScriptCommands.scala index 0b51f3b91c31..12cd37e34156 100644 --- a/project/ScriptCommands.scala +++ b/project/ScriptCommands.scala @@ -106,7 +106,13 @@ object ScriptCommands { baseVersionSuffix in Global := "SPLIT", resolvers in Global += "scala-pr" at url, publishTo in Global := Some("sonatype-releases" at "https://oss.sonatype.org/service/local/staging/deploy/maven2"), - credentials in Global += Credentials("Sonatype Nexus Repository Manager", "oss.sonatype.org", env("SONA_USER"), env("SONA_PASS")) + credentials in Global ++= { + val user = env("SONA_USER") + val pass = env("SONA_PASS") + if (user != "" && pass != "") + List(Credentials("Sonatype Nexus Repository Manager", "oss.sonatype.org", user, pass)) + else Nil + } // pgpSigningKey and pgpPassphrase are set externally by travis / the bootstrap script, as the sbt-pgp plugin is not enabled by default ) ++ enableOptimizer } @@ -159,7 +165,12 @@ object ScriptCommands { Seq( publishTo in Global := Some("scala-pr-publish" at url2), - credentials in Global += Credentials("Artifactory Realm", "scala-ci.typesafe.com", "scala-ci", env("PRIVATE_REPO_PASS")) + credentials in Global ++= { + val pass = env("PRIVATE_REPO_PASS") + if (pass != "") + List(Credentials("Artifactory Realm", "scala-ci.typesafe.com", "scala-ci", env("PRIVATE_REPO_PASS"))) + else Nil + } ) } From 531ca66c3710fd48faeeec991b534a9b47afdbc8 Mon Sep 17 00:00:00 2001 From: Dale Wijnand Date: Thu, 3 Jun 2021 09:03:39 +0100 Subject: [PATCH 236/769] Fix prefixAligns, avoid spurious outer test warnings on patdep types --- .../tools/nsc/transform/patmat/MatchTreeMaking.scala | 5 +++-- test/files/pos/t12392.scala | 1 + test/files/pos/t12398.scala | 11 +++++++++++ test/files/pos/t12398b.scala | 11 +++++++++++ 4 files changed, 26 insertions(+), 2 deletions(-) create mode 100644 test/files/pos/t12398.scala create mode 100644 test/files/pos/t12398b.scala diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala index 0c7646fb03b4..bda182568a2d 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala @@ -393,8 +393,9 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging { case TypeRef(pre, _, _) if !pre.isStable => // e.g. _: Outer#Inner false case TypeRef(pre, sym, args) => - val testedBinderClass = testedBinder.info.upperBound.typeSymbol - // alternatively..... = testedBinder.info.baseClasses.find(_.isClass).getOrElse(NoSymbol) + val testedBinderClass = testedBinder.info.baseClasses.find { sym => + sym.isClass && !sym.isRefinementClass + }.getOrElse(NoSymbol) val testedBinderType = testedBinder.info.baseType(testedBinderClass) val testedPrefixIsExpectedTypePrefix = pre =:= testedBinderType.prefix diff --git a/test/files/pos/t12392.scala b/test/files/pos/t12392.scala index 78496e1aa392..056fd1ae2d17 100644 --- a/test/files/pos/t12392.scala +++ b/test/files/pos/t12392.scala @@ -1,3 +1,4 @@ +// scalac: -Werror import scala.reflect.api.Universe object Test { diff --git a/test/files/pos/t12398.scala b/test/files/pos/t12398.scala new file mode 100644 index 000000000000..ebd6bda4cf8e --- /dev/null +++ b/test/files/pos/t12398.scala @@ -0,0 +1,11 @@ +// scalac: -Werror +import scala.reflect.api.Universe + +object Test { + type SingletonUniverse = Universe with Singleton + + def foo[U <: SingletonUniverse](u: U)(typ: u.Type): List[u.Annotation] = typ match { + case t: u.AnnotatedTypeApi => t.annotations // was: "The outer reference in this type test cannot be checked at run time." + case _ => Nil + } +} diff --git a/test/files/pos/t12398b.scala b/test/files/pos/t12398b.scala new file mode 100644 index 000000000000..9337a6e8e0fd --- /dev/null +++ b/test/files/pos/t12398b.scala @@ -0,0 +1,11 @@ +// scalac: -Werror +import scala.reflect.api.Universe + +object Test { + type SingletonUniverse = Universe with Singleton + + def foo[U <: SingletonUniverse](u: U)(typ: U#Type): List[U#Annotation] = typ match { + case t: U#AnnotatedTypeApi => t.annotations // as a comparison, this wasn't emitting a warning + case _ => Nil + } +} From 0fc323b399e8ca57846481ccff8c3e4dbeaac824 Mon Sep 17 00:00:00 2001 From: Li Haoyi Date: Fri, 4 Jun 2021 18:25:28 +0800 Subject: [PATCH 237/769] Fix asymmetric failure behavior of Future#{zip,zipWith,traverse,sequence} by making them fail fast regardless of ordering Currently, given the following setup: ```scala val f1 = Future{Thread.sleep(10000)} val f2 = Future{Thread.sleep(2000); throw new Exception("Boom")} ``` The following two snippets exhibit different failure behavior: ```scala val fa = Await.result(f1.zip(f2). Duration.Inf) ``` ```scala val fb = Await.result(f2.zip(f1). Duration.Inf) ``` `fa` fails after 10000ms, while `fb` fails after 2000ms. Both fail with `java.lang.Exception: boom`. When zipping two `Future`s together, if the left `Future` fails early, the zipped `Future` fails early. But if the right `Future` fails early, the zipped `Future` waits until the right `Future` completes before failing. `traverse` and `sequence` are similarly implemented with `zipWith` and should exhibit the same behavior. This all arises because `zipWith` is implemented using `flatMap`, which by definition asymmetric due to waiting fo the left `Future` to complete before even considering the right `Future`. The current behavior makes the failure behavior of `Future`s most unpredictable; in general nobody pays attention to the order of `Future`s when zipping them together, and thus whether a `zipWith`ed/`zip`ed/`traverse`d/`sequence`d `Future` fails early or not is entirely arbitrary. This PR replaces the implementation of `zipWith`, turning it from `flatMap`-based to `Promise`-based, so that when a `Future` fails early, regardless of whether it's the left or right `Future`, the resultant `Future` will fail immediately. Implementation-wise I'm using an `AtomicReference` and `compareAndSet`, which should give us the behavior we want without any locking. It may well be possible to achieve with even less overhead, e.g. using only `volatile`s or even using no concurrency controls at all, but I couldn't come up with anything better. If anyone has a better solution I'll happily include it. This fix would apply to all of `zip`/`zipWith`/`traverse`/`sequence`, since they all are implemented on top of `zipWith` While it is possible that someone could be relying on the left-biased nature of current `zip`/`zipWith`/`traverse`/`sequence` implementation, but it seems like something that's unlikely to be reliable enough to depend upon. In my experience people generally aren't aware that `zipWith`/`zip`/`traverse`/`sequence`, and they don't generally know the total ordering of how long their Futures take to run. That means status quo behavior would just result in some `Future` fails mysterious taking longer to report for no clear reason. Notably, the biased nature of these operators is not documented in any of their scaladoc comments. While there is a non-zero chance that somebody could be intentionally or unintentionally depending on the biased nature of these combinators, there is a much greater chance that someone unaware of the current bias would be puzzled why their highly-concurrent system seems to be taking longer than expected in certain scenarios. It seems likely that this PR would fix more bugs than it would introduce Note that this does not fix the left-biased fail-fast behavior of `flatMap` chains, or their equivalent `for`-comprehensions, as `flatMap`'s API is inherently left-biased. But anyone who wants fail-fast behavior can convert sections of their `flatMap` chains into `.zip`s where possible, and where not possible that's generally because there is some true data dependency between the `flatMap`s --- project/MimaFilters.scala | 3 ++ src/library/scala/concurrent/Future.scala | 28 ++++++++------ .../scala/concurrent/impl/Promise.scala | 36 ++++++++++++++++++ test/files/jvm/future-spec/FutureTests.scala | 1 + test/junit/scala/concurrent/FutureTest.scala | 38 +++++++++++++++++++ 5 files changed, 95 insertions(+), 11 deletions(-) diff --git a/project/MimaFilters.scala b/project/MimaFilters.scala index ad847e7b0a31..8088df181f9d 100644 --- a/project/MimaFilters.scala +++ b/project/MimaFilters.scala @@ -33,6 +33,9 @@ object MimaFilters extends AutoPlugin { // #8835 ProblemFilters.exclude[ReversedMissingMethodProblem]("scala.reflect.runtime.SynchronizedOps#SynchronizedBaseTypeSeq.scala$reflect$runtime$SynchronizedOps$SynchronizedBaseTypeSeq$$super$maxDepthOfElems"), + + // this is an internal class and adding a final override here should not be a problem + ProblemFilters.exclude[FinalMethodProblem]("scala.concurrent.impl.Promise#DefaultPromise.zipWith"), ) override val buildSettings = Seq( diff --git a/src/library/scala/concurrent/Future.scala b/src/library/scala/concurrent/Future.scala index 4439b6507f7d..3bcedc53a84a 100644 --- a/src/library/scala/concurrent/Future.scala +++ b/src/library/scala/concurrent/Future.scala @@ -383,10 +383,11 @@ trait Future[+T] extends Awaitable[T] { /** Zips the values of `this` and `that` future, and creates * a new future holding the tuple of their results. * - * If `this` future fails, the resulting future is failed - * with the throwable stored in `this`. - * Otherwise, if `that` future fails, the resulting future is failed - * with the throwable stored in `that`. + * If either input future fails, the resulting future is failed with the same + * throwable, without waiting for the other input future to complete. + * + * If the application of `f` throws a non-fatal throwable, the resulting future + * is failed with that throwable. * * @tparam U the type of the other `Future` * @param that the other `Future` @@ -399,12 +400,11 @@ trait Future[+T] extends Awaitable[T] { /** Zips the values of `this` and `that` future using a function `f`, * and creates a new future holding the result. * - * If `this` future fails, the resulting future is failed - * with the throwable stored in `this`. - * Otherwise, if `that` future fails, the resulting future is failed - * with the throwable stored in `that`. - * If the application of `f` throws a throwable, the resulting future - * is failed with that throwable if it is non-fatal. + * If either input future fails, the resulting future is failed with the same + * throwable, without waiting for the other input future to complete. + * + * If the application of `f` throws a non-fatal throwable, the resulting future + * is failed with that throwable. * * @tparam U the type of the other `Future` * @tparam R the type of the resulting `Future` @@ -413,8 +413,14 @@ trait Future[+T] extends Awaitable[T] { * @return a `Future` with the result of the application of `f` to the results of `this` and `that` * @group Transformations */ - def zipWith[U, R](that: Future[U])(f: (T, U) => R)(implicit executor: ExecutionContext): Future[R] = + def zipWith[U, R](that: Future[U])(f: (T, U) => R)(implicit executor: ExecutionContext): Future[R] = { + // This is typically overriden by the implementation in DefaultPromise, which provides + // symmetric fail-fast behavior regardless of which future fails first. + // + // TODO: remove this implementation and make Future#zipWith abstract + // when we're next willing to make a binary incompatible change flatMap(r1 => that.map(r2 => f(r1, r2)))(if (executor.isInstanceOf[BatchingExecutor]) executor else parasitic) + } /** Creates a new future which holds the result of this future if it was completed successfully, or, if not, * the result of the `that` future if `that` is completed successfully. diff --git a/src/library/scala/concurrent/impl/Promise.scala b/src/library/scala/concurrent/impl/Promise.scala index 2ec0ebe9a24e..e031e51bd011 100644 --- a/src/library/scala/concurrent/impl/Promise.scala +++ b/src/library/scala/concurrent/impl/Promise.scala @@ -130,6 +130,42 @@ private[concurrent] object Promise { override final def transformWith[S](f: Try[T] => Future[S])(implicit executor: ExecutionContext): Future[S] = dispatchOrAddCallbacks(get(), new Transformation[T, S](Xform_transformWith, f, executor)) + override final def zipWith[U, R](that: Future[U])(f: (T, U) => R)(implicit executor: ExecutionContext): Future[R] = { + val state = get() + if (state.isInstanceOf[Try[T]]) { + if (state.asInstanceOf[Try[T]].isFailure) this.asInstanceOf[Future[R]] + else { + val l = state.asInstanceOf[Success[T]].get + that.map(r => f(l, r)) + } + } else { + val buffer = new AtomicReference[Success[Any]]() + val zipped = new DefaultPromise[R]() + + val thisF: Try[T] => Unit = { + case left: Success[T] => + val right = buffer.getAndSet(left).asInstanceOf[Success[U]] + if (right ne null) + zipped.tryComplete(try Success(f(left.get, right.get)) catch { case e if NonFatal(e) => Failure(e) }) + case f => // Can only be Failure + zipped.tryComplete(f.asInstanceOf[Failure[R]]) + } + + val thatF: Try[U] => Unit = { + case right: Success[U] => + val left = buffer.getAndSet(right).asInstanceOf[Success[T]] + if (left ne null) + zipped.tryComplete(try Success(f(left.get, right.get)) catch { case e if NonFatal(e) => Failure(e) }) + case f => // Can only be Failure + zipped.tryComplete(f.asInstanceOf[Failure[R]]) + } + // Cheaper than this.onComplete since we already polled the state + this.dispatchOrAddCallbacks(state, new Transformation[T, Unit](Xform_onComplete, thisF, executor)) + that.onComplete(thatF) + zipped.future + } + } + override final def foreach[U](f: T => U)(implicit executor: ExecutionContext): Unit = { val state = get() if (!state.isInstanceOf[Failure[T]]) dispatchOrAddCallbacks(state, new Transformation[T, Unit](Xform_foreach, f, executor)) diff --git a/test/files/jvm/future-spec/FutureTests.scala b/test/files/jvm/future-spec/FutureTests.scala index f2c83a64aeeb..7181abd144c7 100644 --- a/test/files/jvm/future-spec/FutureTests.scala +++ b/test/files/jvm/future-spec/FutureTests.scala @@ -147,6 +147,7 @@ class FutureTests extends MinimalScalaTest { assert( ECNotUsed(ec => f.filter(_ => fail("filter should not have been called"))(ec)) eq f) assert( ECNotUsed(ec => f.collect({ case _ => fail("collect should not have been called")})(ec)) eq f) assert( ECNotUsed(ec => f.zipWith(f)({ (_,_) => fail("zipWith should not have been called")})(ec)) eq f) + } } diff --git a/test/junit/scala/concurrent/FutureTest.scala b/test/junit/scala/concurrent/FutureTest.scala index 45069e274170..8c3e3310f687 100644 --- a/test/junit/scala/concurrent/FutureTest.scala +++ b/test/junit/scala/concurrent/FutureTest.scala @@ -6,8 +6,46 @@ import org.junit.Test import scala.tools.testkit.AssertUtil._ import scala.util.Try +import duration.Duration.Inf class FutureTest { + @Test + def testZipWithFailFastBothWays(): Unit = { + import ExecutionContext.Implicits.global + + val p1 = Promise[Int]() + val p2 = Promise[Int]() + + // Make sure that the combined future fails early, after the earlier failure occurs, and does not + // wait for the later failure regardless of which one is on the left and which is on the right + p1.failure(new Exception("Boom Early")) + val f1 = p1.future + val f2 = p2.future + + val scala.util.Failure(fa) = Try(Await.result(f1.zip(f2), Inf)) + val scala.util.Failure(fb) = Try(Await.result(f2.zip(f1), Inf)) + + val scala.util.Failure(fc) = Try(Await.result(f1.zipWith(f2)((_, _)), Inf)) + val scala.util.Failure(fd) = Try(Await.result(f2.zipWith(f1)((_, _)), Inf)) + + val scala.util.Failure(fe) = Try(Await.result(Future.sequence(Seq(f1, f2)), Inf)) + val scala.util.Failure(ff) = Try(Await.result(Future.sequence(Seq(f2, f1)), Inf)) + + val scala.util.Failure(fg) = Try(Await.result(Future.traverse(Seq(0, 1))(Seq(f1, f2)(_)), Inf)) + val scala.util.Failure(fh) = Try(Await.result(Future.traverse(Seq(0, 1))(Seq(f1, f2)(_)), Inf)) + + // Make sure the early failure is always reported, regardless of whether it's on + // the left or right of the zip/zipWith/sequence/traverse + assert(fa.getMessage == "Boom Early") + assert(fb.getMessage == "Boom Early") + assert(fc.getMessage == "Boom Early") + assert(fd.getMessage == "Boom Early") + assert(fe.getMessage == "Boom Early") + assert(ff.getMessage == "Boom Early") + assert(fg.getMessage == "Boom Early") + assert(fh.getMessage == "Boom Early") + } + @Test def `bug/issues#10513 firstCompletedOf must not leak references`(): Unit = { val unfulfilled = Promise[AnyRef]() From 0a3e2074eda196b05bb68a34edf73505614200d3 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Tue, 8 Jun 2021 14:56:57 +1000 Subject: [PATCH 238/769] Repl type-hint should Print inferred DefDef/ValDef tpts --- .../interpreter/PresentationCompilation.scala | 20 +++++++++++++++++-- 1 file changed, 18 insertions(+), 2 deletions(-) diff --git a/src/repl/scala/tools/nsc/interpreter/PresentationCompilation.scala b/src/repl/scala/tools/nsc/interpreter/PresentationCompilation.scala index 609fd0619345..83de183d9a52 100644 --- a/src/repl/scala/tools/nsc/interpreter/PresentationCompilation.scala +++ b/src/repl/scala/tools/nsc/interpreter/PresentationCompilation.scala @@ -152,11 +152,27 @@ trait PresentationCompilation { self: IMain => override def print = { val tree = treeAt(inputRange) - val tpString = typeString(tree) match { + import compiler._ + object makeCodePrinterPrintInferredTypes extends Transformer { + private def printableTypeTree(tp: Type): TypeTree = { + val tree = TypeTree(tp) + tree.wasEmpty = false + tree + } + override def transform(tree: Tree): Tree = super.transform(tree) match { + case ValDef(mods, name, tt @ build.SyntacticEmptyTypeTree(), rhs) => + treeCopy.ValDef(tree, mods, name, printableTypeTree(tt.tpe), rhs) + case DefDef(mods, name, tparams, vparamss, tt @ build.SyntacticEmptyTypeTree(), rhs) => + treeCopy.DefDef(tree, mods, name, tparams, vparamss, printableTypeTree(tt.tpe), rhs) + case t => t + } + } + val tree1 = makeCodePrinterPrintInferredTypes.transform(tree) + val tpString = typeString(tree1) match { case "" => "" case s => " // : " + s } - treeString(tree) + tpString + treeString(tree1) + tpString } From c30d9b3c9c91f1797ff1cba426a129946fc7b87b Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Tue, 8 Jun 2021 15:08:29 +1000 Subject: [PATCH 239/769] Hide REPL wrapper details from type/tree hint output --- .../scala/tools/nsc/interpreter/jline/Reader.scala | 2 +- .../tools/nsc/interpreter/PresentationCompilation.scala | 8 +++++++- 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/src/repl-frontend/scala/tools/nsc/interpreter/jline/Reader.scala b/src/repl-frontend/scala/tools/nsc/interpreter/jline/Reader.scala index aa83d492122b..bff410b8ded6 100644 --- a/src/repl-frontend/scala/tools/nsc/interpreter/jline/Reader.scala +++ b/src/repl-frontend/scala/tools/nsc/interpreter/jline/Reader.scala @@ -169,7 +169,7 @@ object Reader { } true } - def show(text: String): Unit = { + def show(text: String): Unit = if (text != "") { reader.callWidget(LineReader.CLEAR) reader.getTerminal.writer.println() reader.getTerminal.writer.println(text) diff --git a/src/repl/scala/tools/nsc/interpreter/PresentationCompilation.scala b/src/repl/scala/tools/nsc/interpreter/PresentationCompilation.scala index 83de183d9a52..a2128f52cf49 100644 --- a/src/repl/scala/tools/nsc/interpreter/PresentationCompilation.scala +++ b/src/repl/scala/tools/nsc/interpreter/PresentationCompilation.scala @@ -143,6 +143,7 @@ trait PresentationCompilation { self: IMain => def typeString(tree: compiler.Tree): String = { tree.tpe match { case null | compiler.NoType | compiler.ErrorType => "" + case tp if compiler.nme.isReplWrapperName(tp.typeSymbol.name) => "" case tp => compiler.exitingTyper(tp.toString) } } @@ -161,11 +162,16 @@ trait PresentationCompilation { self: IMain => } override def transform(tree: Tree): Tree = super.transform(tree) match { case ValDef(mods, name, tt @ build.SyntacticEmptyTypeTree(), rhs) => - treeCopy.ValDef(tree, mods, name, printableTypeTree(tt.tpe), rhs) + if (tree.symbol != null && tree.symbol != NoSymbol && nme.isReplWrapperName(tree.symbol.owner.name)) { + treeCopy.ValDef(tree, mods &~ (Flag.PRIVATE | Flag.LOCAL), name.dropLocal, printableTypeTree(tt.tpe), rhs) + } else { + treeCopy.ValDef(tree, mods, name, printableTypeTree(tt.tpe), rhs) + } case DefDef(mods, name, tparams, vparamss, tt @ build.SyntacticEmptyTypeTree(), rhs) => treeCopy.DefDef(tree, mods, name, tparams, vparamss, printableTypeTree(tt.tpe), rhs) case t => t } + } val tree1 = makeCodePrinterPrintInferredTypes.transform(tree) val tpString = typeString(tree1) match { From 9f8633a0fd99e0fa625219357b7363adaa0a1930 Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Mon, 7 Jun 2021 10:39:41 -0700 Subject: [PATCH 240/769] Avoid inlined varargs after named arg rewrite If a temp val has been created to hold varargs, always use it. A single constant arg would induce inlining and creation of a fresh varargs, ignoring the unused temp val. --- .../tools/nsc/typechecker/NamesDefaults.scala | 24 +++++++++---------- test/files/pos/t11964.scala | 19 +++++++++++++++ test/files/run/names-defaults.check | 9 ------- 3 files changed, 30 insertions(+), 22 deletions(-) create mode 100644 test/files/pos/t11964.scala diff --git a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala index 50117fde232f..67a7107ac084 100644 --- a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala +++ b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala @@ -302,18 +302,15 @@ trait NamesDefaults { self: Analyzer => case _ => val byName = isByNameParamType(paramTpe) val repeated = isScalaRepeatedParamType(paramTpe) - val argTpe = ( - if (repeated) arg match { + // TODO In 83c9c764b, we tried to a stable type here to fix scala/bug#7234. But the resulting TypeTree over a + // singleton type without an original TypeTree fails to retypecheck after a resetAttrs (scala/bug#7516), + // which is important for (at least) macros. + val argTpe = + arg match { + case _ if !repeated => arg.tpe case WildcardStarArg(expr) => expr.tpe - case _ => seqType(arg.tpe) + case _ => seqType(arg.tpe.widen) // avoid constant type } - else { - // TODO In 83c9c764b, we tried to a stable type here to fix scala/bug#7234. But the resulting TypeTree over a - // singleton type without an original TypeTree fails to retypecheck after a resetAttrs (scala/bug#7516), - // which is important for (at least) macros. - arg.tpe - } - ) val s = context.owner.newValue(freshTermName(nme.NAMEDARG_PREFIX)(typer.fresh), arg.pos, newFlags = ARTIFACT) setInfo { val tp = if (byName) functionType(Nil, argTpe) else argTpe uncheckedBounds(tp) @@ -330,10 +327,11 @@ trait NamesDefaults { self: Analyzer => res } else { new ChangeOwnerTraverser(context.owner, sym) traverse arg // fixes #4502 - if (repeated) arg match { + arg match { + case _ if !repeated => arg case WildcardStarArg(expr) => expr - case _ => blockTyper typed gen.mkSeqApply(resetAttrs(arg)) - } else arg + case _ => blockTyper.typed(gen.mkSeqApply(resetAttrs(arg))) + } } Some(atPos(body.pos)(ValDef(sym, body).setType(NoType))) } diff --git a/test/files/pos/t11964.scala b/test/files/pos/t11964.scala new file mode 100644 index 000000000000..4f0bd8f73726 --- /dev/null +++ b/test/files/pos/t11964.scala @@ -0,0 +1,19 @@ +// scalac: -Werror -Xlint + +object Hmm { + def zxc(b: Int*)(implicit x: Int = 3) = "" + b + x + def res = zxc(4) +} + +object Test { + def foo(a: Any, b: Any = null, c: Any = null)(cs: String*) = ??? + def res = foo("", c = "")("X") +} + +object OP { + def f(a: Int, b: String*) = "first" + def res = f(b = "sl19", a = 28) // looks like the issue is only with single arg supplied to varargs. + def or = f(b = ("x"::"y"::Nil):_*, a = 42) // 2.13 syntax only + //def and = f(b = ("x"::"y"::Nil):_*) // broken under 2.13, which disallows default + varargs + def and = List(elems = ("x"::"y"::Nil):_*) +} diff --git a/test/files/run/names-defaults.check b/test/files/run/names-defaults.check index 8b6d99ec2981..7e38494250da 100644 --- a/test/files/run/names-defaults.check +++ b/test/files/run/names-defaults.check @@ -13,9 +13,6 @@ names-defaults.scala:371: warning: the parameter name x is deprecated: use s ins names-defaults.scala:35: warning: local var var2 in value is never used var var2 = 0 ^ -names-defaults.scala:108: warning: local val x$34 in value is never used - println(t7.f(b = "sl19", a = 28)) // first - ^ names-defaults.scala:279: warning: local val u in method foo is never used class A2489 { def foo(): Unit = { def bar(a: Int = 1) = a; bar(); val u = 0 } } ^ @@ -25,12 +22,6 @@ names-defaults.scala:280: warning: local val v in method foo is never used names-defaults.scala:280: warning: local val u in method foo is never used class A2489x2 { def foo(): Unit = { val v = 10; def bar(a: Int = 1, b: Int = 2) = a; bar(); val u = 0 } } ^ -names-defaults.scala:380: warning: local val x$104 in value is never used - println(t3697.a(3)()) - ^ -names-defaults.scala:385: warning: local val x$112 in value is never used - println(t3697.b(b = 1, a = 2, c = 3)) - ^ names-defaults.scala:269: warning: multiline expressions might require enclosing parentheses; a value can be silently discarded when Unit is expected spawn(b = { val ttt = 1; ttt }, a = 0) ^ From e3c31e037cda9c2d286e3bdd532137c66d7d1afc Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Wed, 9 Jun 2021 13:01:59 +1000 Subject: [PATCH 241/769] Avoid possible NPE after cancelled compilation I noticed this when cancelling the compile: ``` [warn] Canceling execution... [error] ## Exception when compiling 707 sources to /Users/jz/code/scala/build/quick/classes/library [error] java.lang.NullPointerException [error] scala.tools.nsc.backend.jvm.GenBCode$BCodePhase.close(GenBCode.scala:108) [error] scala.tools.nsc.backend.jvm.GenBCode$BCodePhase.$anonfun$run$1(GenBCode.scala:84) [error] scala.tools.nsc.backend.jvm.GenBCode$BCodePhase.run(GenBCode.scala:78) [error] scala.tools.nsc.Global$Run.compileUnitsInternal(Global.scala:1514) [error] scala.tools.nsc.Global$Run.compileUnits(Global.scala:1498) [error] scala.tools.nsc.Global$Run.compileSources(Global.scala:1491) [error] scala.tools.nsc.Global$Run.compile(Global.scala:1620) [error] xsbt.CachedCompiler0.run(CompilerInterface.scala:153) [error] xsbt.CachedCompiler0.run(CompilerInterface.scala:125) [error] xsbt.CompilerInterface.run(CompilerInterface.scala:39) [error] sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ``` --- src/compiler/scala/tools/nsc/backend/jvm/GenBCode.scala | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenBCode.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenBCode.scala index a4a01fd60946..1e8fc8dc45c9 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/GenBCode.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/GenBCode.scala @@ -105,8 +105,8 @@ abstract class GenBCode extends SubComponent { } private def close(): Unit = { - postProcessor.classfileWriter.close() - generatedClassHandler.close() + Option(postProcessor.classfileWriter).foreach(_.close()) + Option(generatedClassHandler).foreach(_.close()) } } } From a5d03cd43f07423aac381de8bc11232cc463bf93 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Thu, 10 Jun 2021 15:45:08 +1000 Subject: [PATCH 242/769] Delay package object decls entering to the package object phase --- src/compiler/scala/tools/nsc/Global.scala | 16 ++++++++++++---- .../scala/tools/nsc/typechecker/Analyzer.scala | 8 ++++++++ .../scala/tools/nsc/interactive/Global.scala | 7 +++++++ .../scala/reflect/internal/SymbolTable.scala | 5 ++++- 4 files changed, 31 insertions(+), 5 deletions(-) diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala index bea3b0678099..5dbea6505185 100644 --- a/src/compiler/scala/tools/nsc/Global.scala +++ b/src/compiler/scala/tools/nsc/Global.scala @@ -80,7 +80,15 @@ class Global(var currentSettings: Settings, reporter0: Reporter) import definitions.findNamedMember def findMemberFromRoot(fullName: Name): Symbol = rootMirror.findMemberFromRoot(fullName) - + override def deferredOpenPackageModule(container: Symbol, dest: Symbol): Unit = { + // Some compiler runs (e.g. Toolbox and the PC) just initialise Global and then discard the Run + // such that the scala package object decls never get entered into the scala package + if ((curRun eq null) || !isGlobalInitialized || isPastPackageObjects) { + super.openPackageModule(container, dest) + } else { + analyzer.packageObjects.deferredOpen(dest) = container + } + } // alternate constructors ------------------------------------------ override def settings = currentSettings @@ -1017,6 +1025,7 @@ class Global(var currentSettings: Settings, reporter0: Reporter) ) override def isPastTyper = isPast(currentRun.typerPhase) def isBeforeErasure = isBefore(currentRun.erasurePhase) + def isPastPackageObjects = isPast(currentRun.packageobjectsPhase) def isPast(phase: Phase) = ( (curRun ne null) && isGlobalInitialized // defense against init order issues @@ -1338,7 +1347,7 @@ class Global(var currentSettings: Settings, reporter0: Reporter) */ val parserPhase = phaseNamed("parser") val namerPhase = phaseNamed("namer") - // val packageobjectsPhase = phaseNamed("packageobjects") + val packageobjectsPhase = phaseNamed("packageobjects") val typerPhase = phaseNamed("typer") // val inlineclassesPhase = phaseNamed("inlineclasses") // val superaccessorsPhase = phaseNamed("superaccessors") @@ -1649,8 +1658,7 @@ class Global(var currentSettings: Settings, reporter0: Reporter) compileLate(new CompilationUnit(scripted(getSourceFile(file)))) } - /** Compile abstract file until `globalPhase`, but at least to phase "namer". - */ + /** Compile the unit until `globalPhase`, but at least to phase "typer". */ def compileLate(unit: CompilationUnit): Unit = { addUnit(unit) diff --git a/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala b/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala index a48dad7c960c..1fd2fde5894e 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala @@ -52,6 +52,7 @@ trait Analyzer extends AnyRef object packageObjects extends { val global: Analyzer.this.global.type = Analyzer.this.global } with SubComponent { + val deferredOpen = perRunCaches.newMap[Symbol, Symbol]() val phaseName = "packageobjects" val runsAfter = List[String]() val runsRightAfter= Some("namer") @@ -64,6 +65,9 @@ trait Analyzer extends AnyRef override def traverse(tree: Tree): Unit = tree match { case ModuleDef(_, _, _) => if (tree.symbol.name == nme.PACKAGEkw) { + // we've actually got a source file + deferredOpen.remove(tree.symbol.owner) + openPackageModule(tree.symbol, tree.symbol.owner) } case ClassDef(_, _, _, _) => () // make it fast @@ -73,6 +77,10 @@ trait Analyzer extends AnyRef def apply(unit: CompilationUnit): Unit = { openPackageObjectsTraverser(unit.body) + deferredOpen.foreach { + case (dest, container) => + openPackageModule(container, dest) + } } } } diff --git a/src/interactive/scala/tools/nsc/interactive/Global.scala b/src/interactive/scala/tools/nsc/interactive/Global.scala index c99fe6637aff..bb434dd7a0bf 100644 --- a/src/interactive/scala/tools/nsc/interactive/Global.scala +++ b/src/interactive/scala/tools/nsc/interactive/Global.scala @@ -1355,6 +1355,13 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "") } } + override def isPastPackageObjects = { + (if (currentTyperRun == null) NoCompilationUnit else currentTyperRun.currentUnit) match { + case unit: RichCompilationUnit => unit.isParsed + case _ => super.isPastPackageObjects + } + } + def newTyperRun(): Unit = { currentTyperRun = new TyperRun } diff --git a/src/reflect/scala/reflect/internal/SymbolTable.scala b/src/reflect/scala/reflect/internal/SymbolTable.scala index 3113062c5b51..ec882b71d690 100644 --- a/src/reflect/scala/reflect/internal/SymbolTable.scala +++ b/src/reflect/scala/reflect/internal/SymbolTable.scala @@ -339,6 +339,9 @@ abstract class SymbolTable extends macros.Universe } } + def deferredOpenPackageModule(container: Symbol, dest: Symbol): Unit = { + openPackageModule(container, dest) + } def openPackageModule(container: Symbol, dest: Symbol): Unit = { // unlink existing symbols in the package for (member <- container.info.decls.iterator) { @@ -396,7 +399,7 @@ abstract class SymbolTable extends macros.Universe case _ => false } if (pkgModule.isModule && !fromSource) { - openPackageModule(pkgModule, pkgClass) + deferredOpenPackageModule(pkgModule, pkgClass) } } From c7466b50a2089cfe85540db77c00ffcbc93bf27a Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Thu, 10 Jun 2021 09:04:28 -0700 Subject: [PATCH 243/769] [backport] Handle star in import selector --- .../scala/tools/nsc/ast/parser/Parsers.scala | 2 +- test/files/neg/import-syntax.check | 4 ++++ test/files/neg/import-syntax.scala | 22 +++++++++++++++++++ 3 files changed, 27 insertions(+), 1 deletion(-) create mode 100644 test/files/neg/import-syntax.check create mode 100644 test/files/neg/import-syntax.scala diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala index 5532d9328354..94403fadc206 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala @@ -2648,7 +2648,7 @@ self => } def wildcardOrIdent() = { - if (in.token == USCORE) { in.nextToken() ; nme.WILDCARD } + if (in.token == USCORE || currentRun.isScala3 && isRawStar) { in.nextToken() ; nme.WILDCARD } else ident() } diff --git a/test/files/neg/import-syntax.check b/test/files/neg/import-syntax.check new file mode 100644 index 000000000000..e8bb8d5636e9 --- /dev/null +++ b/test/files/neg/import-syntax.check @@ -0,0 +1,4 @@ +import-syntax.scala:13: error: not found: value should fail + `should fail`() + ^ +one error found diff --git a/test/files/neg/import-syntax.scala b/test/files/neg/import-syntax.scala new file mode 100644 index 000000000000..74fd2bbd3a7e --- /dev/null +++ b/test/files/neg/import-syntax.scala @@ -0,0 +1,22 @@ +// scalac: -Xsource:3 + +class D { + def *(y: Int): Int = y + def unrelated(y: Int): Int = y +} + +// TODO +object nope { + val d = new D + import d.{* => huh} + import d.{_ => also_no} + `should fail`() +} + +// OK +object rename { + val d = new D + import d.{unrelated => f, *} + def x = f(42) + def y = *(27) +} From 5e64bc8798fc69a99820e3cb2b941bd680fd8574 Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Thu, 10 Jun 2021 09:40:19 -0700 Subject: [PATCH 244/769] Wildcard import cannot be renamed --- src/compiler/scala/tools/nsc/ast/parser/Parsers.scala | 1 + test/files/neg/import-syntax.check | 11 +++++++---- test/files/neg/import-syntax.scala | 3 +-- 3 files changed, 9 insertions(+), 6 deletions(-) diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala index 94403fadc206..7d8b4f50a6be 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala @@ -2669,6 +2669,7 @@ self => if (in.token == ARROW || (currentRun.isScala3 && isRawIdent && in.name == nme.as)) { in.nextToken() renameOffset = in.offset + if (name == nme.WILDCARD) syntaxError(renameOffset, "Wildcard import cannot be renamed") wildcardOrIdent() } else if (name == nme.WILDCARD) null diff --git a/test/files/neg/import-syntax.check b/test/files/neg/import-syntax.check index e8bb8d5636e9..231b64ce44ae 100644 --- a/test/files/neg/import-syntax.check +++ b/test/files/neg/import-syntax.check @@ -1,4 +1,7 @@ -import-syntax.scala:13: error: not found: value should fail - `should fail`() - ^ -one error found +import-syntax.scala:10: error: Wildcard import cannot be renamed + import d.{* => huh} + ^ +import-syntax.scala:11: error: Wildcard import cannot be renamed + import d.{_ => also_no} + ^ +two errors found diff --git a/test/files/neg/import-syntax.scala b/test/files/neg/import-syntax.scala index 74fd2bbd3a7e..72f90f232d8d 100644 --- a/test/files/neg/import-syntax.scala +++ b/test/files/neg/import-syntax.scala @@ -5,7 +5,6 @@ class D { def unrelated(y: Int): Int = y } -// TODO object nope { val d = new D import d.{* => huh} @@ -13,7 +12,7 @@ object nope { `should fail`() } -// OK +// OK, except previous syntax errors bail early object rename { val d = new D import d.{unrelated => f, *} From d3d4a3d7314919c3c9b69a9fb70cd9b3ce5d6c7b Mon Sep 17 00:00:00 2001 From: VladKopanev Date: Sat, 29 May 2021 20:14:57 +0300 Subject: [PATCH 245/769] Limit productElementName to productArity --- .../nsc/typechecker/SyntheticMethods.scala | 2 +- test/files/run/productElementName-oob.check | 12 ++++++++++ test/files/run/productElementName-oob.scala | 22 +++++++++++++++++++ 3 files changed, 35 insertions(+), 1 deletion(-) diff --git a/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala b/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala index 575324df0f76..fcc5560ad149 100644 --- a/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala +++ b/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala @@ -126,7 +126,7 @@ trait SyntheticMethods extends ast.TreeDSL { createSwitchMethod(name, accessors.indices, returnType)(idx => caseFn(accessors(idx))) def productElementNameMethod = { - val constrParamAccessors = clazz.constrParamAccessors + val constrParamAccessors = clazz.constrParamAccessors.take(arity) createSwitchMethod(nme.productElementName, constrParamAccessors.indices, StringTpe)(idx => LIT(constrParamAccessors(idx).name.dropLocal.decode)) } diff --git a/test/files/run/productElementName-oob.check b/test/files/run/productElementName-oob.check index 1d73c804feb4..ef4fb8c2115f 100644 --- a/test/files/run/productElementName-oob.check +++ b/test/files/run/productElementName-oob.check @@ -9,3 +9,15 @@ scala.Product.productElementName scala.Product.productElementName$ CaseObject$.productElementName Test$.delayedEndpoint$Test$1 + +java.lang.IndexOutOfBoundsException: 1 +scala.runtime.Statics.ioobe +ImplicitParamsCaseClass.productElementName +Test$.delayedEndpoint$Test$1 +Test$delayedInit$body.apply + +java.lang.IndexOutOfBoundsException: 1 +scala.runtime.Statics.ioobe +CurriedCaseClass.productElementName +Test$.delayedEndpoint$Test$1 +Test$delayedInit$body.apply diff --git a/test/files/run/productElementName-oob.scala b/test/files/run/productElementName-oob.scala index 52702a4a5165..6a4cf44e5df4 100644 --- a/test/files/run/productElementName-oob.scala +++ b/test/files/run/productElementName-oob.scala @@ -1,5 +1,7 @@ case class CaseClass(a: String, b: Int) case object CaseObject +case class ImplicitParamsCaseClass[A: Ordering](a: A) +case class CurriedCaseClass(i: Int)(s: String) object Test extends App { @@ -21,5 +23,25 @@ object Test extends App { e.getStackTrace.take(4).foreach(s => println(s.toString.takeWhile(_ != '('))) } + println() + + try { + ImplicitParamsCaseClass(42).productElementName(1) + } catch { + case e: IndexOutOfBoundsException => + println(e) + e.getStackTrace.take(4).foreach(s => println(s.toString.takeWhile(_ != '('))) + } + + println() + + try { + CurriedCaseClass(42)("").productElementName(1) + } catch { + case e: IndexOutOfBoundsException => + println(e) + e.getStackTrace.take(4).foreach(s => println(s.toString.takeWhile(_ != '('))) + } + } From ce38b958ea369f9f28158261202a7540d6575a8d Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Sat, 29 May 2021 15:03:02 -0700 Subject: [PATCH 246/769] Fix exception message from productElementName Tweak the test and eliminate check file. --- .../nsc/typechecker/SyntheticMethods.scala | 9 +-- src/library/scala/Product.scala | 2 +- test/files/run/productElementName-oob.check | 23 -------- test/files/run/productElementName-oob.scala | 55 ++++++------------- 4 files changed, 21 insertions(+), 68 deletions(-) delete mode 100644 test/files/run/productElementName-oob.check diff --git a/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala b/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala index fcc5560ad149..4097d6c3510b 100644 --- a/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala +++ b/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala @@ -126,8 +126,8 @@ trait SyntheticMethods extends ast.TreeDSL { createSwitchMethod(name, accessors.indices, returnType)(idx => caseFn(accessors(idx))) def productElementNameMethod = { - val constrParamAccessors = clazz.constrParamAccessors.take(arity) - createSwitchMethod(nme.productElementName, constrParamAccessors.indices, StringTpe)(idx => LIT(constrParamAccessors(idx).name.dropLocal.decode)) + val elementAccessors = clazz.constrParamAccessors.take(arity) + createSwitchMethod(nme.productElementName, elementAccessors.indices, StringTpe)(idx => LIT(elementAccessors(idx).name.dropLocal.decode)) } var syntheticCanEqual = false @@ -283,10 +283,7 @@ trait SyntheticMethods extends ast.TreeDSL { case sym => (sym, () => productElementNameMethod) :: Nil } - List( - productMethods, - elementName - ).flatten + productMethods ::: elementName } def hashcodeImplementation(sym: Symbol): Tree = { diff --git a/src/library/scala/Product.scala b/src/library/scala/Product.scala index 240a4d43f5c2..96a2277d736e 100644 --- a/src/library/scala/Product.scala +++ b/src/library/scala/Product.scala @@ -59,7 +59,7 @@ trait Product extends Any with Equals { */ def productElementName(n: Int): String = if (n >= 0 && n < productArity) "" - else throw new IndexOutOfBoundsException(s"$n is out of bounds (min 0, max ${productArity-1}") + else throw new IndexOutOfBoundsException(s"$n is out of bounds (min 0, max ${productArity-1})") /** An iterator over the names of all the elements of this product. */ diff --git a/test/files/run/productElementName-oob.check b/test/files/run/productElementName-oob.check deleted file mode 100644 index ef4fb8c2115f..000000000000 --- a/test/files/run/productElementName-oob.check +++ /dev/null @@ -1,23 +0,0 @@ -java.lang.IndexOutOfBoundsException: 99 -scala.runtime.Statics.ioobe -CaseClass.productElementName -Test$.delayedEndpoint$Test$1 -Test$delayedInit$body.apply - -java.lang.IndexOutOfBoundsException: 99 is out of bounds (min 0, max -1 -scala.Product.productElementName -scala.Product.productElementName$ -CaseObject$.productElementName -Test$.delayedEndpoint$Test$1 - -java.lang.IndexOutOfBoundsException: 1 -scala.runtime.Statics.ioobe -ImplicitParamsCaseClass.productElementName -Test$.delayedEndpoint$Test$1 -Test$delayedInit$body.apply - -java.lang.IndexOutOfBoundsException: 1 -scala.runtime.Statics.ioobe -CurriedCaseClass.productElementName -Test$.delayedEndpoint$Test$1 -Test$delayedInit$body.apply diff --git a/test/files/run/productElementName-oob.scala b/test/files/run/productElementName-oob.scala index 6a4cf44e5df4..89f24e18069c 100644 --- a/test/files/run/productElementName-oob.scala +++ b/test/files/run/productElementName-oob.scala @@ -1,47 +1,26 @@ -case class CaseClass(a: String, b: Int) +// scalac: -Xsource:3 +import scala.tools.testkit.AssertUtil.assertThrown +import scala.util.chaining.* + +case class CaseClass[A: Ordering](a: String, b: Int)(c: A) case object CaseObject -case class ImplicitParamsCaseClass[A: Ordering](a: A) -case class CurriedCaseClass(i: Int)(s: String) object Test extends App { - try { - CaseClass("foo", 123).productElementName(99) - } catch { - case e: IndexOutOfBoundsException => - println(e) - e.getStackTrace.take(4).foreach(s => println(s.toString.takeWhile(_ != '('))) - } - - println() + def check(t: Throwable)(msg: String)(ms: String*): Boolean = + (t.getMessage == msg).tap(if (_) () else println(s"expected [$msg], got [${t.getMessage}]")) + && + ms.forall(m => t.getStackTrace.exists(f => m == s"${f.getClassName}.${f.getMethodName}")) - try { - CaseObject.productElementName(99) - } catch { - case e: IndexOutOfBoundsException => - println(e) - e.getStackTrace.take(4).foreach(s => println(s.toString.takeWhile(_ != '('))) + //java.lang.IndexOutOfBoundsException: 99 + assertThrown[IndexOutOfBoundsException](check(_)("99")("scala.runtime.Statics.ioobe", "CaseClass.productElementName")) { + CaseClass("foo", 123)(42).productElementName(99) } - - println() - - try { - ImplicitParamsCaseClass(42).productElementName(1) - } catch { - case e: IndexOutOfBoundsException => - println(e) - e.getStackTrace.take(4).foreach(s => println(s.toString.takeWhile(_ != '('))) + assertThrown[IndexOutOfBoundsException](_ => true) { + CaseClass("foo", 123)(42).productElementName(2) } - - println() - - try { - CurriedCaseClass(42)("").productElementName(1) - } catch { - case e: IndexOutOfBoundsException => - println(e) - e.getStackTrace.take(4).foreach(s => println(s.toString.takeWhile(_ != '('))) + //java.lang.IndexOutOfBoundsException: 99 is out of bounds (min 0, max -1 + assertThrown[IndexOutOfBoundsException](check(_)(s"99 is out of bounds (min 0, max -1)")("scala.Product.productElementName", "CaseObject$.productElementName")) { + CaseObject.productElementName(99) } - } - From 8295e7099f81aa000ac8c655c7fb314819676c4b Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Thu, 10 Jun 2021 13:55:46 -0700 Subject: [PATCH 247/769] Consolidate productElementName test --- test/files/run/productElementName-oob.scala | 26 ------- test/files/run/productElementName.scala | 79 ++++++++++++++------- 2 files changed, 54 insertions(+), 51 deletions(-) delete mode 100644 test/files/run/productElementName-oob.scala diff --git a/test/files/run/productElementName-oob.scala b/test/files/run/productElementName-oob.scala deleted file mode 100644 index 89f24e18069c..000000000000 --- a/test/files/run/productElementName-oob.scala +++ /dev/null @@ -1,26 +0,0 @@ -// scalac: -Xsource:3 -import scala.tools.testkit.AssertUtil.assertThrown -import scala.util.chaining.* - -case class CaseClass[A: Ordering](a: String, b: Int)(c: A) -case object CaseObject - -object Test extends App { - - def check(t: Throwable)(msg: String)(ms: String*): Boolean = - (t.getMessage == msg).tap(if (_) () else println(s"expected [$msg], got [${t.getMessage}]")) - && - ms.forall(m => t.getStackTrace.exists(f => m == s"${f.getClassName}.${f.getMethodName}")) - - //java.lang.IndexOutOfBoundsException: 99 - assertThrown[IndexOutOfBoundsException](check(_)("99")("scala.runtime.Statics.ioobe", "CaseClass.productElementName")) { - CaseClass("foo", 123)(42).productElementName(99) - } - assertThrown[IndexOutOfBoundsException](_ => true) { - CaseClass("foo", 123)(42).productElementName(2) - } - //java.lang.IndexOutOfBoundsException: 99 is out of bounds (min 0, max -1 - assertThrown[IndexOutOfBoundsException](check(_)(s"99 is out of bounds (min 0, max -1)")("scala.Product.productElementName", "CaseObject$.productElementName")) { - CaseObject.productElementName(99) - } -} diff --git a/test/files/run/productElementName.scala b/test/files/run/productElementName.scala index ff9a2e4dac33..18dcaad0935a 100644 --- a/test/files/run/productElementName.scala +++ b/test/files/run/productElementName.scala @@ -1,3 +1,7 @@ +// scalac: -Xsource:3 +import scala.tools.testkit.AssertUtil.assertThrown +import scala.util.chaining.* +import org.junit.Assert.assertEquals case class User(name: String, age: Int) @@ -14,15 +18,12 @@ case class Symbols(:: : String, || : Int) case class MultipleParamLists(a: String, b: Int)(c: Boolean) case class AuxiliaryConstructor(a: String, b: Int) { - def this(x: String) = { - this(x, 123) - } + def this(x: String) = this(x, 123) } case class OverloadedApply(a: String, b: Int) object OverloadedApply { - def apply(x: String): OverloadedApply = - new OverloadedApply(x, 123) + def apply(x: String): OverloadedApply = new OverloadedApply(x, 123) } case class DefinesProductElementName(a: String, b: Int) { @@ -46,32 +47,60 @@ case class InheritsProductElementName_Override_SelfType(a: String, b: Int) exten case class PrivateMembers(a: Int, private val b: Int, c: Int, private val d: Int, e: Int, private val f: Int) +case class ImplicitParameter[A: Ordering](a: String, b: Int)(c: A) + +case object CaseObject + object Test extends App { - def pretty(p: Product): String = - p.productElementNames.zip(p.productIterator) - .map { case (name, value) => s"$name=$value" } - .mkString(p.productPrefix + "(", ", ", ")") - - println(pretty(User("Susan", 42))) - println(pretty(ユーザ("Susan", 42))) - println(pretty(U$er("Susan", 42))) - println(pretty(`type`("Susan", 42))) - println(pretty(`contains spaces`("Susan", 42))) - println(pretty(Symbols("Susan", 42))) - println(pretty(MultipleParamLists("Susan", 42)(true))) - println(pretty(AuxiliaryConstructor("Susan", 42))) - println(pretty(OverloadedApply("Susan"))) - println(pretty(DefinesProductElementName("Susan", 42))) + def verify(p: Product, checkName: Boolean = true): Unit = { + val iterated = p.productElementNames.zip(p.productIterator) + .map { case (name, value) => s"$name=$value" } + .mkString(p.productPrefix + "(", ", ", ")") + val indexed = (0 until p.productArity) + .map(i => s"${p.productElementName(i)}=${p.productElement(i)}") + .mkString(p.productPrefix + "(", ", ", ")") + assertEquals(iterated, indexed) + if (checkName) assertThrown[IndexOutOfBoundsException](_ => true)(p.productElementName(p.productArity + 1)) + println(iterated) + } + + verify(User("Susan", 42)) + verify(ユーザ("Susan", 42)) + verify(U$er("Susan", 42)) + verify(`type`("Susan", 42)) + verify(`contains spaces`("Susan", 42)) + verify(Symbols("Susan", 42)) + verify(MultipleParamLists("Susan", 42)(true)) + verify(AuxiliaryConstructor("Susan", 42)) + verify(OverloadedApply("Susan")) + verify(DefinesProductElementName("Susan", 42), checkName = false) // uses the synthetic, not the one defined in the trait - println(pretty(InheritsProductElementName("Susan", 42))) + verify(InheritsProductElementName("Susan", 42)) // uses the override defined in the trait - println(pretty(InheritsProductElementName_Override("Susan", 42))) + verify(InheritsProductElementName_Override("Susan", 42), checkName = false) // uses the synthetic, not the one defined in the trait - println(pretty(InheritsProductElementName_Override_SelfType("Susan", 42))) + verify(InheritsProductElementName_Override_SelfType("Susan", 42)) - println(pretty(PrivateMembers(10, 20, 30, 40, 50, 60))) -} + verify(PrivateMembers(10, 20, 30, 40, 50, 60)) + // message check and probe for characteristic stack frames + def check(t: Throwable)(msg: String)(ms: String*): Boolean = + (t.getMessage == msg).tap(if (_) () else println(s"expected [$msg], got [${t.getMessage}]")) + && + ms.forall(m => t.getStackTrace.exists(f => m == s"${f.getClassName}.${f.getMethodName}")) + + //java.lang.IndexOutOfBoundsException: 99 + assertThrown[IndexOutOfBoundsException](check(_)("99")("scala.runtime.Statics.ioobe", "ImplicitParameter.productElementName")) { + ImplicitParameter("foo", 123)(42).productElementName(99) + } + assertThrown[IndexOutOfBoundsException](_ => true) { + ImplicitParameter("foo", 123)(42).productElementName(2) + } + //java.lang.IndexOutOfBoundsException: 99 is out of bounds (min 0, max -1 [sic] + assertThrown[IndexOutOfBoundsException](check(_)(s"99 is out of bounds (min 0, max -1)")("scala.Product.productElementName", "CaseObject$.productElementName")) { + CaseObject.productElementName(99) + } +} From 519fbd3290e7a12a1a4dca554430e1df1407481e Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Fri, 11 Jun 2021 08:14:10 +1000 Subject: [PATCH 248/769] Tests that require lazier loading of package.class --- ...nner-class-in-ancestor-simpler-still.scala | 25 ++++++++++++++ ...with-inner-class-in-ancestor-simpler.scala | 26 +++++++++++++++ ...-object-with-inner-class-in-ancestor.scala | 33 +++++++++++++++++++ 3 files changed, 84 insertions(+) create mode 100644 test/files/run/package-object-with-inner-class-in-ancestor-simpler-still.scala create mode 100644 test/files/run/package-object-with-inner-class-in-ancestor-simpler.scala create mode 100644 test/files/run/package-object-with-inner-class-in-ancestor.scala diff --git a/test/files/run/package-object-with-inner-class-in-ancestor-simpler-still.scala b/test/files/run/package-object-with-inner-class-in-ancestor-simpler-still.scala new file mode 100644 index 000000000000..9d467f714044 --- /dev/null +++ b/test/files/run/package-object-with-inner-class-in-ancestor-simpler-still.scala @@ -0,0 +1,25 @@ +import scala.reflect.io.Path +import scala.tools.partest._ +import java.io.File + +object Test extends StoreReporterDirectTest { + def A = "package b; class A" + def pkg = "package object b extends A" + + override def extraSettings = s"-cp ${sys.props("partest.lib")}${File.pathSeparator}$testOutput" + + def show(): Unit = { + compiles(A, pkg) + delete(testOutput / "b" / "A.class") + compiles(A) + } + + def compiles(codes: String*) = { + val global = newCompiler() + withRun(global)(_ compileSources newSources(codes: _*)) + assert(!global.reporter.hasErrors, storeReporter.infos.mkString("\n")) + } + + def delete(paths: Path*) = paths.foreach(p => assert(p.delete(), s"$p didn't delete")) + def code = "" +} diff --git a/test/files/run/package-object-with-inner-class-in-ancestor-simpler.scala b/test/files/run/package-object-with-inner-class-in-ancestor-simpler.scala new file mode 100644 index 000000000000..123de8d847b1 --- /dev/null +++ b/test/files/run/package-object-with-inner-class-in-ancestor-simpler.scala @@ -0,0 +1,26 @@ +import scala.reflect.io.Path +import scala.tools.partest._ +import java.io.File + +object Test extends StoreReporterDirectTest { + def A = "package b; class A" + def pkg = "package object b extends A" + def M = "package b; class M" + + override def extraSettings = s"-cp ${sys.props("partest.lib")}${File.pathSeparator}$testOutput" + + def show(): Unit = { + compiles(A, pkg, M) + delete(testOutput / "b" / "A.class") + compiles(M, A) + } + + def compiles(codes: String*) = { + val global = newCompiler() + withRun(global)(_ compileSources newSources(codes: _*)) + assert(!global.reporter.hasErrors, storeReporter.infos.mkString("\n")) + } + + def delete(paths: Path*) = paths.foreach(p => assert(p.delete(), s"$p didn't delete")) + def code = "" +} diff --git a/test/files/run/package-object-with-inner-class-in-ancestor.scala b/test/files/run/package-object-with-inner-class-in-ancestor.scala new file mode 100644 index 000000000000..03e1c561de0d --- /dev/null +++ b/test/files/run/package-object-with-inner-class-in-ancestor.scala @@ -0,0 +1,33 @@ +import scala.reflect.io.Path +import scala.tools.partest._ +import java.io.File + +object Test extends StoreReporterDirectTest { + class V1 { + def O = "package b; object O { def o = \"\" }" + def A = "package b; class A { class C { O.o } }" + def pkg = "package object b extends A" + } + class V2 extends V1 { + override def O = "package b; object O { def o = 42 }" + } + + override def extraSettings = s"-cp ${sys.props("partest.lib")}${File.pathSeparator}$testOutput" + + def show(): Unit = { + val v1 = new V1 + compiles(v1.O, v1.A, v1.pkg) + delete(testOutput / "b" / "A.class", testOutput / "b" / "A$C.class") + val v2 = new V2 + compiles(v2.O, v2.A) + } + + def compiles(codes: String*) = { + val global = newCompiler() + withRun(global)(_ compileSources newSources(codes: _*)) + assert(!global.reporter.hasErrors, storeReporter.infos.mkString("\n")) + } + + def delete(paths: Path*) = paths.foreach(p => assert(p.delete(), s"$p didn't delete")) + def code = "" +} From 078f85eddb10f330fea0c13272e11ac7f7943488 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Fri, 11 Jun 2021 11:03:24 +1000 Subject: [PATCH 249/769] Add test for progression in compiler determism --- test/junit/scala/tools/nsc/DeterminismTest.scala | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/test/junit/scala/tools/nsc/DeterminismTest.scala b/test/junit/scala/tools/nsc/DeterminismTest.scala index fa8fd9c9e966..57eda2d5d72b 100644 --- a/test/junit/scala/tools/nsc/DeterminismTest.scala +++ b/test/junit/scala/tools/nsc/DeterminismTest.scala @@ -330,6 +330,14 @@ class DeterminismTest { test(List(code)) } + @Test def testPackageObjectUserLand(): Unit = { + def code = List[SourceFile]( + source("package.scala", "package userland; object `package` { type Throwy = java.lang.Throwable }"), + source("th.scala", "package userland; class th[T <: Throwy](cause: T = null)") + ) + test(code :: Nil) + } + def source(name: String, code: String): SourceFile = new BatchSourceFile(name, code) } From b7c6d59c080bd194416ebf59a6a4f77c6a15bef6 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Fri, 11 Jun 2021 11:44:38 +1000 Subject: [PATCH 250/769] progression test: error stale reference inherited package object member --- .../files/run/package-object-stale-decl.scala | 40 +++++++++++++++++++ 1 file changed, 40 insertions(+) create mode 100644 test/files/run/package-object-stale-decl.scala diff --git a/test/files/run/package-object-stale-decl.scala b/test/files/run/package-object-stale-decl.scala new file mode 100644 index 000000000000..bbf1ba7cda16 --- /dev/null +++ b/test/files/run/package-object-stale-decl.scala @@ -0,0 +1,40 @@ +import scala.reflect.io.Path +import scala.tools.partest._ +import java.io.File + +object Test extends StoreReporterDirectTest { + class V1 { + def pkg = "package object b extends B" + def B = "package b; class B { def stale = 42 }" + def A = "package b; class A { stale }" + } + class V2 extends V1 { + override def B = "package b; class B { }" + } + + override def extraSettings = s"-cp ${sys.props("partest.lib")}${File.pathSeparator}$testOutput" + + def show(): Unit = { + val v1 = new V1 + val v2 = new V2 + compiles(v1.A, v1.B, v1.pkg)() + delete(testOutput / "b" / "A.class") + compiles(v2.B, v2.A)(Some("not found: value stale")) + } + + def compiles(codes: String*)(expectedError: Option[String] = None) = { + val global = newCompiler() + withRun(global)(_ compileSources newSources(codes: _*)) + val reporterOutput = storeReporter.infos.map(x => x.pos.showError(x.msg)).mkString("\n") + expectedError match { + case None => + assert(!global.reporter.hasErrors, reporterOutput) + case Some(text) => + assert(global.reporter.hasErrors, "expected compile failure, got success") + assert(reporterOutput.contains(text), reporterOutput) + } + } + + def delete(paths: Path*) = paths.foreach(p => assert(p.delete(), s"$p didn't delete")) + def code = "" +} From baa9ff8a1184eb8f0536b67f5e5ac17af003ca6d Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Fri, 11 Jun 2021 11:46:09 +1000 Subject: [PATCH 251/769] Remove workaround for scala/bug#5954 It is solved directly now. --- src/compiler/scala/tools/nsc/typechecker/Namers.scala | 11 ----------- 1 file changed, 11 deletions(-) diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala index bdda512b6dbd..384c836ee1b3 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala @@ -467,17 +467,6 @@ trait Namers extends MethodSynthesis { val existingModule = context.scope lookupModule tree.name if (existingModule.isModule && !existingModule.hasPackageFlag && inCurrentScope(existingModule) && (currentRun.canRedefine(existingModule) || existingModule.isSynthetic)) { - // This code accounts for the way the package objects found in the classpath are opened up - // early by the completer of the package itself. If the `packageobjects` phase then finds - // the same package object in sources, we have to clean the slate and remove package object - // members from the package class. - // - // TODO scala/bug#4695 Pursue the approach in https://github.com/scala/scala/pull/2789 that avoids - // opening up the package object on the classpath at all if one exists in source. - if (existingModule.isPackageObject) { - val packageScope = existingModule.enclosingPackageClass.rawInfo.decls - packageScope.foreach(mem => if (mem.owner != existingModule.enclosingPackageClass) packageScope unlink mem) - } updatePosFlags(existingModule, tree.pos, moduleFlags) setPrivateWithin(tree, existingModule) existingModule.moduleClass andAlso (setPrivateWithin(tree, _)) From c78db9906ba04b629154a2129d7632289f25e9e2 Mon Sep 17 00:00:00 2001 From: "Magnolia.K" Date: Mon, 7 Jun 2021 23:43:18 +0900 Subject: [PATCH 252/769] Fixed Syntax Summary Reflect the following modifications to match the behavior of the actual code. - Unicode_Sm and Unicode_So included in opchar - Enumerates the characters available in opchar - Fixed upper and lower descriptions - Removed \u007F from printableChar - Fixed an error in Unicode category names(Ml -> Lm) Also removed unnecessary comment outs. https://github.com/scala/bug/issues/12260 --- spec/01-lexical-syntax.md | 2 +- spec/13-syntax-summary.md | 15 ++++++++++----- 2 files changed, 11 insertions(+), 6 deletions(-) diff --git a/spec/01-lexical-syntax.md b/spec/01-lexical-syntax.md index 718950b171a1..c703b49c0ef3 100644 --- a/spec/01-lexical-syntax.md +++ b/spec/01-lexical-syntax.md @@ -19,7 +19,7 @@ classes (Unicode general category given in parentheses): 1. Whitespace characters. `\u0020 | \u0009 | \u000D | \u000A`. 1. Letters, which include lower case letters (`Ll`), upper case letters (`Lu`), - title case letters (`Lt`), other letters (`Lo`), modifier letters (`Ml`), + title case letters (`Lt`), other letters (`Lo`), modifier letters (`Lm`), letter numerals (`Nl`) and the two characters `\u0024 ‘$’` and `\u005F ‘_’`. 1. Digits `‘0’ | … | ‘9’`. 1. Parentheses `‘(’ | ‘)’ | ‘[’ | ‘]’ | ‘{’ | ‘}’ `. diff --git a/spec/13-syntax-summary.md b/spec/13-syntax-summary.md index aec631beb45f..1f54d346a3b2 100644 --- a/spec/13-syntax-summary.md +++ b/spec/13-syntax-summary.md @@ -14,15 +14,20 @@ The lexical syntax of Scala is given by the following grammar in EBNF form: ```ebnf whiteSpace ::= ‘\u0020’ | ‘\u0009’ | ‘\u000D’ | ‘\u000A’ -upper ::= ‘A’ | … | ‘Z’ | ‘$’ // and any character in Unicode category Lu, Lt or Nl, and any character in Lo and Ml that don't have contributory property Other_Lowercase -lower ::= ‘a’ | … | ‘z’ | ‘_’ // and any character in Unicode category Ll, and and any character in Lo or Ml that has contributory property Other_Lowercase +upper ::= ‘A’ | … | ‘Z’ | ‘$’ and any character in Unicode categories Lu, Lt or Nl, + and any character in Unicode categories Lo and Lm that don't have + contributory property Other_Lowercase +lower ::= ‘a’ | … | ‘z’ | ‘_’ and any character in Unicode category Ll, + and any character in Unicode categories Lo or Lm that has contributory + property Other_Lowercase letter ::= upper | lower digit ::= ‘0’ | … | ‘9’ paren ::= ‘(’ | ‘)’ | ‘[’ | ‘]’ | ‘{’ | ‘}’ delim ::= ‘`’ | ‘'’ | ‘"’ | ‘.’ | ‘;’ | ‘,’ -opchar ::= // printableChar not matched by (whiteSpace | upper | lower | - // letter | digit | paren | delim | Unicode_Sm | Unicode_So) -printableChar ::= // all characters in [\u0020, \u007F] inclusive +opchar ::= ‘!’ | ‘#’ | ‘%’ | ‘&’ | ‘*’ | ‘+’ | ‘-’ | ‘/’ | ‘:’ | + ‘<’ | ‘=’ | ‘>’ | ‘?’ | ‘@’ | ‘\’ | ‘^’ | ‘|’ | ‘~’ + and any character in Unicode categories Sm or So +printableChar ::= all characters in [\u0020, \u007E] inclusive UnicodeEscape ::= ‘\’ ‘u’ {‘u’} hexDigit hexDigit hexDigit hexDigit hexDigit ::= ‘0’ | … | ‘9’ | ‘A’ | … | ‘F’ | ‘a’ | … | ‘f’ charEscapeSeq ::= ‘\’ (‘b’ | ‘t’ | ‘n’ | ‘f’ | ‘r’ | ‘"’ | ‘'’ | ‘\’) From 622c87be886add4f91a12bae8ad1382a4f506314 Mon Sep 17 00:00:00 2001 From: Seth Tisue Date: Mon, 14 Jun 2021 08:50:22 -0700 Subject: [PATCH 253/769] sbt 1.5.4 (was 1.5.3) --- project/build.properties | 2 +- scripts/common | 2 +- src/intellij/scala.ipr.SAMPLE | 36 +++++++++++++------------- test/jcstress/project/build.properties | 2 +- 4 files changed, 21 insertions(+), 21 deletions(-) diff --git a/project/build.properties b/project/build.properties index 67d27a1dfe00..9edb75b77c28 100644 --- a/project/build.properties +++ b/project/build.properties @@ -1 +1 @@ -sbt.version=1.5.3 +sbt.version=1.5.4 diff --git a/scripts/common b/scripts/common index 8f6c3aa3bef8..447ac660b6bd 100644 --- a/scripts/common +++ b/scripts/common @@ -11,7 +11,7 @@ else fi SBT_CMD=${SBT_CMD-sbt} -SBT_CMD="$SBT_CMD -sbt-version 1.5.3" +SBT_CMD="$SBT_CMD -sbt-version 1.5.4" # repo to publish builds integrationRepoUrl=${integrationRepoUrl-"https://scala-ci.typesafe.com/artifactory/scala-integration/"} diff --git a/src/intellij/scala.ipr.SAMPLE b/src/intellij/scala.ipr.SAMPLE index 797c804d9fcf..eabf6729ecde 100644 --- a/src/intellij/scala.ipr.SAMPLE +++ b/src/intellij/scala.ipr.SAMPLE @@ -325,13 +325,13 @@ - + - + - - + + @@ -355,7 +355,7 @@ - + @@ -369,8 +369,8 @@ - - + + @@ -382,13 +382,13 @@ - + - + @@ -402,16 +402,16 @@ - + - + - + - + @@ -435,18 +435,18 @@ - - + + - + - + - + diff --git a/test/jcstress/project/build.properties b/test/jcstress/project/build.properties index 67d27a1dfe00..9edb75b77c28 100644 --- a/test/jcstress/project/build.properties +++ b/test/jcstress/project/build.properties @@ -1 +1 @@ -sbt.version=1.5.3 +sbt.version=1.5.4 From d1984ea7eedfb343918c2ac7476dd9c01dc5d579 Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Mon, 14 Jun 2021 13:53:28 -0700 Subject: [PATCH 254/769] Retry sealed traits for relatedness --- .../tools/nsc/typechecker/Checkable.scala | 52 ++++++++++--------- test/files/neg/t12414.check | 6 +++ test/files/neg/t12414.scala | 15 ++++++ test/files/neg/t12414b.check | 6 +++ test/files/neg/t12414b/a_1.scala | 6 +++ test/files/neg/t12414b/b_2.scala | 9 ++++ 6 files changed, 70 insertions(+), 24 deletions(-) create mode 100644 test/files/neg/t12414.check create mode 100644 test/files/neg/t12414.scala create mode 100644 test/files/neg/t12414b.check create mode 100644 test/files/neg/t12414b/a_1.scala create mode 100644 test/files/neg/t12414b/b_2.scala diff --git a/src/compiler/scala/tools/nsc/typechecker/Checkable.scala b/src/compiler/scala/tools/nsc/typechecker/Checkable.scala index c3bb3f65fbd4..ed210ff3b83b 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Checkable.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Checkable.scala @@ -76,7 +76,6 @@ trait Checkable { import global._ import definitions._ - import CheckabilityChecker.{ isNeverSubType, isNeverSubClass } /** The applied type of class 'to' after inferring anything * possible from the knowledge that 'to' must also be of the @@ -155,7 +154,7 @@ trait Checkable { scrut <:< pattTpWild } - private class CheckabilityChecker(val X: Type, val P: Type) { + private class CheckabilityChecker(val X: Type, val P: Type, isRecheck: Boolean = false) { def Xsym = X.typeSymbol def Psym = P.typeSymbol def PErased = { @@ -166,7 +165,6 @@ trait Checkable { } def XR = if (Xsym == AnyClass) PErased else propagateKnownTypes(X, Psym) - // sadly the spec says (new java.lang.Boolean(true)).isInstanceOf[scala.Boolean] def P1 = scrutConformsToPatternType(X, P) def P2 = !Psym.isPrimitiveValueClass && isNeverSubType(X, P) @@ -215,11 +213,7 @@ trait Checkable { case TypeRef(_, sym, _) if sym.isAbstractType => "abstract type " + sym.name case tp => "non-variable type argument " + tp } - } - /** X, P, [P1], etc. are all explained at the top of the file. - */ - private object CheckabilityChecker { /** Are these symbols classes with no subclass relationship? */ def areUnrelatedClasses(sym1: Symbol, sym2: Symbol) = ( sym1.isClass @@ -242,23 +236,21 @@ trait Checkable { * - neither A nor B is a trait (i.e. both are actual classes, not eligible for mixin) * - both A and B are sealed/final, and every possible pairing of their children is irreconcilable * - * TODO: the last two conditions of the last possibility (that the symbols are not of + * The last two conditions of the last possibility (that the symbols are not of * classes being compiled in the current run) are because this currently runs too early, * and .children returns Nil for sealed classes because their children will not be - * populated until typer. It was too difficult to move things around for the moment, - * so I will consult with moors about the optimal time to be doing this. + * populated until typer. As a workaround, in this case, this check is performed a second + * time at the end of typer. #6537, #12414 */ def areIrreconcilableAsParents(sym1: Symbol, sym2: Symbol): Boolean = areUnrelatedClasses(sym1, sym2) && ( isEffectivelyFinal(sym1) // initialization important || isEffectivelyFinal(sym2) || !sym1.isTrait && !sym2.isTrait - || isSealedOrFinal(sym1) && isSealedOrFinal(sym2) && allChildrenAreIrreconcilable(sym1, sym2) && !currentRun.compiles(sym1) && !currentRun.compiles(sym2) + || isSealedOrFinal(sym1) && isSealedOrFinal(sym2) && allChildrenAreIrreconcilable(sym1, sym2) && (isRecheck || !currentRun.compiles(sym1) && !currentRun.compiles(sym2)) ) private def isSealedOrFinal(sym: Symbol) = sym.isSealed || sym.isFinal - private def isEffectivelyFinal(sym: Symbol): Boolean = ( - // initialization important - sym.initialize.isEffectivelyFinalOrNotOverridden - ) + // initialization important + private def isEffectivelyFinal(sym: Symbol): Boolean = sym.initialize.isEffectivelyFinalOrNotOverridden def isNeverSubClass(sym1: Symbol, sym2: Symbol) = areIrreconcilableAsParents(sym1, sym2) @@ -278,7 +270,7 @@ trait Checkable { case _ => false } - // Important to dealias at any entry point (this is the only one at this writing.) + // Important to dealias at any entry point (this is the only one at this writing but cf isNeverSubClass.) def isNeverSubType(tp1: Type, tp2: Type): Boolean = /*logResult(s"isNeverSubType($tp1, $tp2)")*/((tp1.dealias, tp2.dealias) match { case (TypeRef(_, sym1, args1), TypeRef(_, sym2, args2)) => isNeverSubClass(sym1, sym2) || { @@ -311,13 +303,11 @@ trait Checkable { * * Instead of the canRemedy flag, annotate uncheckable types that have become checkable because of the availability of a class tag? */ - def checkCheckable(tree: Tree, P0: Type, X0: Type, inPattern: Boolean, canRemedy: Boolean = false): Unit = { - if (uncheckedOk(P0)) return - def where = if (inPattern) "pattern " else "" - - if(P0.typeSymbol == SingletonClass) + def checkCheckable(tree: Tree, P0: Type, X0: Type, inPattern: Boolean, canRemedy: Boolean = false): Unit = if (!uncheckedOk(P0)) { + if (P0.typeSymbol == SingletonClass) context.warning(tree.pos, s"fruitless type test: every non-null value will be a Singleton dynamically", WarningCategory.Other) else { + def where = if (inPattern) "pattern " else "" // singleton types not considered here, dealias the pattern for SI-XXXX val P = P0.dealiasWiden val X = X0.widen @@ -341,10 +331,12 @@ trait Checkable { if (checker.result == RuntimeCheckable) log(checker.summaryString) - if (checker.neverMatches) { - val addendum = if (checker.neverSubClass) "" else " (but still might match its erasure)" + def neverMatchesWarning(result: CheckabilityChecker) = { + val addendum = if (result.neverSubClass) "" else " (but still might match its erasure)" context.warning(tree.pos, s"fruitless type test: a value of type $X cannot also be a $PString$addendum", WarningCategory.Other) } + if (checker.neverMatches) + neverMatchesWarning(checker) else if (checker.isUncheckable) { val msg = ( if (checker.uncheckableType =:= P) s"abstract type $where$PString" @@ -352,13 +344,25 @@ trait Checkable { ) context.warning(tree.pos, s"$msg is unchecked since it is eliminated by erasure", WarningCategory.Unchecked) } + else if (checker.result == RuntimeCheckable) { + // register deferred checking for sealed types in current run + @`inline` def Xsym = X.typeSymbol + @`inline` def Psym = P.typeSymbol + @`inline` def isSealedOrFinal(sym: Symbol) = sym.isSealed || sym.isFinal + def recheckFruitless(): Unit = { + val rechecker = new CheckabilityChecker(X, P, isRecheck = true) + if (rechecker.neverMatches) neverMatchesWarning(rechecker) + } + if (isSealedOrFinal(Xsym) && isSealedOrFinal(Psym) && (currentRun.compiles(Xsym) || currentRun.compiles(Psym))) + context.unit.toCheck += (() => recheckFruitless()) + } } } } } } -private[typechecker] final class Checkability(val value: Int) extends AnyVal { } +private[typechecker] final class Checkability(val value: Int) extends AnyVal private[typechecker] object Checkability { val StaticallyTrue = new Checkability(0) val StaticallyFalse = new Checkability(1) diff --git a/test/files/neg/t12414.check b/test/files/neg/t12414.check new file mode 100644 index 000000000000..e94e68fb179c --- /dev/null +++ b/test/files/neg/t12414.check @@ -0,0 +1,6 @@ +t12414.scala:12: warning: fruitless type test: a value of type Trait1 cannot also be a Trait2 + case y: Trait2 => + ^ +error: No warnings can be incurred under -Werror. +1 warning +1 error diff --git a/test/files/neg/t12414.scala b/test/files/neg/t12414.scala new file mode 100644 index 000000000000..649fbb23e5b4 --- /dev/null +++ b/test/files/neg/t12414.scala @@ -0,0 +1,15 @@ +// scalac: -Werror + +sealed trait Trait1 +sealed trait Trait2 + +class Class1 extends Trait1 +class Class2 extends Trait2 + +object Test extends App { + def test(x: Trait1): Unit = + x match { + case y: Trait2 => + case _ => + } +} diff --git a/test/files/neg/t12414b.check b/test/files/neg/t12414b.check new file mode 100644 index 000000000000..82da8bfc3fe3 --- /dev/null +++ b/test/files/neg/t12414b.check @@ -0,0 +1,6 @@ +b_2.scala:6: warning: fruitless type test: a value of type Trait1 cannot also be a Trait2 + case y: Trait2 => + ^ +error: No warnings can be incurred under -Werror. +1 warning +1 error diff --git a/test/files/neg/t12414b/a_1.scala b/test/files/neg/t12414b/a_1.scala new file mode 100644 index 000000000000..cdb91902eb37 --- /dev/null +++ b/test/files/neg/t12414b/a_1.scala @@ -0,0 +1,6 @@ + +sealed trait Trait1 +sealed trait Trait2 + +class Class1 extends Trait1 +class Class2 extends Trait2 diff --git a/test/files/neg/t12414b/b_2.scala b/test/files/neg/t12414b/b_2.scala new file mode 100644 index 000000000000..87f5694346eb --- /dev/null +++ b/test/files/neg/t12414b/b_2.scala @@ -0,0 +1,9 @@ +// scalac: -Werror + +object Test extends App { + def test(x: Trait1): Unit = + x match { + case y: Trait2 => + case _ => + } +} From 85947755b43bb03536395abe06345292ba027259 Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Tue, 15 Jun 2021 09:26:59 -0700 Subject: [PATCH 255/769] Strip parens as needed for varargs expr --- src/compiler/scala/tools/nsc/ast/parser/Parsers.scala | 4 ++-- test/files/pos/varargs-future.scala | 1 + 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala index c3712f7b562f..73a08788ebd2 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala @@ -1810,7 +1810,7 @@ self => val expr = reduceExprStack(base, loop(prefixExpr())) if (followingIsScala3Vararg()) atPos(expr.pos.start) { - Typed(expr, atPos(in.skipToken()) { Ident(tpnme.WILDCARD_STAR) }) + Typed(stripParens(expr), atPos(in.skipToken()) { Ident(tpnme.WILDCARD_STAR) }) } else expr } @@ -2171,7 +2171,7 @@ self => } def checkWildStar: Tree = top match { case Ident(nme.WILDCARD) if isSequenceOK && isRawStar => peekingAhead ( - if (isCloseDelim) atPos(top.pos.start, in.prev.offset)(Star(stripParens(top))) + if (isCloseDelim) atPos(top.pos.start, in.prev.offset)(Star(top)) else EmptyTree ) case Ident(name) if isSequenceOK && followingIsScala3Vararg() => diff --git a/test/files/pos/varargs-future.scala b/test/files/pos/varargs-future.scala index e8c9057e564b..7b8ddde63567 100644 --- a/test/files/pos/varargs-future.scala +++ b/test/files/pos/varargs-future.scala @@ -6,6 +6,7 @@ class Test { val s: Seq[Int] = Seq(1, 2, 3) foo(s*) + foo((s ++ s)*) // not very useful, but supported by Scala 3 (and matches what works with `: _*` syntax) foo( From d2b8e8824005bcab242bd0347eb1bbcc71fe40ce Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Tue, 15 Jun 2021 11:45:19 -0700 Subject: [PATCH 256/769] Simplify pattern3 --- .../scala/tools/nsc/ast/parser/Parsers.scala | 42 ++++++------------- test/files/neg/t5702-neg-bad-brace.check | 13 +++--- test/files/neg/t5702-neg-bad-brace.scala | 9 +--- test/files/pos/varargs-future.scala | 20 +++++++++ 4 files changed, 38 insertions(+), 46 deletions(-) diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala index 73a08788ebd2..ff9b8747f17c 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala @@ -307,25 +307,6 @@ self => } } - /** Perform an operation while peeking ahead. - * Pushback if the operation yields an empty tree or blows to pieces. - */ - @inline def peekingAhead(tree: => Tree): Tree = { - @inline def peekahead() = { - in.prev copyFrom in - in.nextToken() - } - @inline def pushback() = { - in.next copyFrom in - in copyFrom in.prev - } - peekahead() - // try it, in case it is recoverable - val res = try tree catch { case e: Exception => pushback() ; throw e } - if (res.isEmpty) pushback() - res - } - class ParserTreeBuilder extends TreeBuilder { val global: self.global.type = self.global def unit = parser.unit @@ -2169,18 +2150,19 @@ self => case COMMA => !isXML && in.isTrailingComma(RPAREN) case _ => false } - def checkWildStar: Tree = top match { - case Ident(nme.WILDCARD) if isSequenceOK && isRawStar => peekingAhead ( - if (isCloseDelim) atPos(top.pos.start, in.prev.offset)(Star(top)) - else EmptyTree - ) - case Ident(name) if isSequenceOK && followingIsScala3Vararg() => - atPos(top.pos.start) { - Bind(name, atPos(in.skipToken()) { Star(Ident(nme.WILDCARD)) }) + def checkWildStar: Tree = + if (isSequenceOK) { + top match { + case Ident(nme.WILDCARD) if isRawStar && lookingAhead(isCloseDelim) => + atPos(top.pos.start, in.skipToken()) { Star(top) } + case Ident(name) if followingIsScala3Vararg() => + atPos(top.pos.start) { + Bind(name, atPos(in.skipToken()) { Star(Ident(nme.WILDCARD)) }) + } + case _ => EmptyTree } - case _ => - EmptyTree - } + } + else EmptyTree @tailrec def loop(top: Tree): Tree = reducePatternStack(base, top) match { case next if isIdent && !isRawBar => pushOpInfo(next) ; loop(simplePattern(() => badPattern3())) diff --git a/test/files/neg/t5702-neg-bad-brace.check b/test/files/neg/t5702-neg-bad-brace.check index 25b0d96b6cf4..bdd68e43f892 100644 --- a/test/files/neg/t5702-neg-bad-brace.check +++ b/test/files/neg/t5702-neg-bad-brace.check @@ -1,10 +1,7 @@ -t5702-neg-bad-brace.scala:14: error: Unmatched closing brace '}' ignored here +t5702-neg-bad-brace.scala:7: error: Unmatched closing brace '}' ignored here case List(1, _*} => ^ -t5702-neg-bad-brace.scala:14: error: illegal start of simple pattern - case List(1, _*} => - ^ -t5702-neg-bad-brace.scala:15: error: ')' expected but '}' found. - } - ^ -3 errors +t5702-neg-bad-brace.scala:10: error: eof expected but '}' found. +} +^ +2 errors diff --git a/test/files/neg/t5702-neg-bad-brace.scala b/test/files/neg/t5702-neg-bad-brace.scala index 16a341cf8c17..49f55a37b2b2 100644 --- a/test/files/neg/t5702-neg-bad-brace.scala +++ b/test/files/neg/t5702-neg-bad-brace.scala @@ -1,16 +1,9 @@ object Test { - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { val is = List(1,2,3) - is match { -// the erroneous brace is ignored, so we can't halt on it. -// maybe brace healing can detect overlapping unmatched (...} -// In this case, the fix emits an extra error: -// t5702-neg-bad-brace.scala:10: error: Unmatched closing brace '}' ignored here -// t5702-neg-bad-brace.scala:10: error: illegal start of simple pattern (i.e., =>) -// t5702-neg-bad-brace.scala:11: error: ')' expected but '}' found. case List(1, _*} => } } diff --git a/test/files/pos/varargs-future.scala b/test/files/pos/varargs-future.scala index 7b8ddde63567..8b8c414b47b0 100644 --- a/test/files/pos/varargs-future.scala +++ b/test/files/pos/varargs-future.scala @@ -20,4 +20,24 @@ class Test { s match { case Seq(x, rest*) => println(rest) } + + // regression tests for comparison + s match { + case Seq(elems @ _*) => println(elems) + } + + s match { + case Seq(x, rest @ _*) => println(rest) + } + + // more parens + s match { + case Seq((xs) @ _*) => xs + } + + /* also disallowed in Scala 3 + s match { + case Seq((xs)*) => xs + } + */ } From 6837de2979a66ad2856c84b65efbef5a4f9be6e4 Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Wed, 16 Jun 2021 08:25:50 -0700 Subject: [PATCH 257/769] Backport strip parens on future varargs --- .../scala/tools/nsc/ast/parser/Parsers.scala | 27 +++---------------- test/files/neg/t5702-neg-bad-brace.check | 13 ++++----- test/files/neg/t5702-neg-bad-brace.scala | 8 +----- test/files/pos/varargs-future.scala | 21 +++++++++++++++ 4 files changed, 30 insertions(+), 39 deletions(-) diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala index 7d8b4f50a6be..012ee9cacb31 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala @@ -307,25 +307,6 @@ self => try body finally in copyFrom saved } - /** Perform an operation while peeking ahead. - * Pushback if the operation yields an empty tree or blows to pieces. - */ - @inline def peekingAhead(tree: =>Tree): Tree = { - @inline def peekahead() = { - in.prev copyFrom in - in.nextToken() - } - @inline def pushback() = { - in.next copyFrom in - in copyFrom in.prev - } - peekahead() - // try it, in case it is recoverable - val res = try tree catch { case e: Exception => pushback() ; throw e } - if (res.isEmpty) pushback() - res - } - class ParserTreeBuilder extends TreeBuilder { val global: self.global.type = self.global def unit = parser.unit @@ -1736,7 +1717,7 @@ self => val expr = reduceExprStack(base, loop(prefixExpr())) if (followingIsScala3Vararg()) atPos(expr.pos.start) { - Typed(expr, atPos(in.skipToken()) { Ident(tpnme.WILDCARD_STAR) }) + Typed(stripParens(expr), atPos(in.skipToken()) { Ident(tpnme.WILDCARD_STAR) }) } else expr } @@ -2090,10 +2071,8 @@ self => case _ => false } def checkWildStar: Tree = top match { - case Ident(nme.WILDCARD) if isSequenceOK && isRawStar => peekingAhead ( - if (isCloseDelim) atPos(top.pos.start, in.prev.offset)(Star(stripParens(top))) - else EmptyTree - ) + case Ident(nme.WILDCARD) if isSequenceOK && isRawStar && lookingAhead(isCloseDelim) => + atPos(top.pos.start, in.skipToken()) { Star(top) } case Ident(name) if isSequenceOK && followingIsScala3Vararg() => atPos(top.pos.start) { Bind(name, atPos(in.skipToken()) { Star(Ident(nme.WILDCARD)) }) diff --git a/test/files/neg/t5702-neg-bad-brace.check b/test/files/neg/t5702-neg-bad-brace.check index 503f7d95edc1..dce59017d9b5 100644 --- a/test/files/neg/t5702-neg-bad-brace.check +++ b/test/files/neg/t5702-neg-bad-brace.check @@ -1,10 +1,7 @@ -t5702-neg-bad-brace.scala:14: error: Unmatched closing brace '}' ignored here +t5702-neg-bad-brace.scala:8: error: Unmatched closing brace '}' ignored here case List(1, _*} => ^ -t5702-neg-bad-brace.scala:14: error: illegal start of simple pattern - case List(1, _*} => - ^ -t5702-neg-bad-brace.scala:15: error: ')' expected but '}' found. - } - ^ -three errors found +t5702-neg-bad-brace.scala:11: error: eof expected but '}' found. +} +^ +two errors found diff --git a/test/files/neg/t5702-neg-bad-brace.scala b/test/files/neg/t5702-neg-bad-brace.scala index 16a341cf8c17..c69436ed6b8b 100644 --- a/test/files/neg/t5702-neg-bad-brace.scala +++ b/test/files/neg/t5702-neg-bad-brace.scala @@ -1,16 +1,10 @@ object Test { - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { val is = List(1,2,3) is match { -// the erroneous brace is ignored, so we can't halt on it. -// maybe brace healing can detect overlapping unmatched (...} -// In this case, the fix emits an extra error: -// t5702-neg-bad-brace.scala:10: error: Unmatched closing brace '}' ignored here -// t5702-neg-bad-brace.scala:10: error: illegal start of simple pattern (i.e., =>) -// t5702-neg-bad-brace.scala:11: error: ')' expected but '}' found. case List(1, _*} => } } diff --git a/test/files/pos/varargs-future.scala b/test/files/pos/varargs-future.scala index e8c9057e564b..8b8c414b47b0 100644 --- a/test/files/pos/varargs-future.scala +++ b/test/files/pos/varargs-future.scala @@ -6,6 +6,7 @@ class Test { val s: Seq[Int] = Seq(1, 2, 3) foo(s*) + foo((s ++ s)*) // not very useful, but supported by Scala 3 (and matches what works with `: _*` syntax) foo( @@ -19,4 +20,24 @@ class Test { s match { case Seq(x, rest*) => println(rest) } + + // regression tests for comparison + s match { + case Seq(elems @ _*) => println(elems) + } + + s match { + case Seq(x, rest @ _*) => println(rest) + } + + // more parens + s match { + case Seq((xs) @ _*) => xs + } + + /* also disallowed in Scala 3 + s match { + case Seq((xs)*) => xs + } + */ } From 3dd6aa3496ff83b8bb4237e2eef46d25788baca3 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Sat, 19 Jun 2021 14:56:55 +1000 Subject: [PATCH 258/769] Speed up BSP import of project into IDE by skipping JMH source gen JMH has source generators that wrap the user-written benchmarks in synthetic code that wraps it will timing loops. By default, BSP import runs all source generators. This is useful for things like protobuf bindings, where the user wants to code against the generated APIs. But the JMH generated code isn't useful to see in the IDE and by skipping it we can import the project without needing to trigger compilation of scala-library / bench. --- build.sbt | 1 + 1 file changed, 1 insertion(+) diff --git a/build.sbt b/build.sbt index af33ac8f0dec..82208895c7ad 100644 --- a/build.sbt +++ b/build.sbt @@ -673,6 +673,7 @@ lazy val bench = project.in(file("test") / "benchmarks") else "org.scala-lang" % "scala-compiler" % benchmarkScalaVersion :: Nil }, scalacOptions ++= Seq("-feature", "-opt:l:inline", "-opt-inline-from:scala/**", "-opt-warnings"), + Jmh / bspEnabled := false // Skips JMH source generators during IDE import to avoid needing to compile scala-library during the import ).settings(inConfig(JmhPlugin.JmhKeys.Jmh)(scalabuild.JitWatchFilePlugin.jitwatchSettings)) From 31d8d058fa346e599c13f9d075e32e94e9d56073 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Thu, 17 Jun 2021 09:44:20 +1000 Subject: [PATCH 259/769] Include -release arg in cache key for ct.sym classpath element The compiler has a per-classloader cache that backs the classpath lookups of individual instances of `Global`. Elements in the cache are used by `Global` instances that are concurrent (think parallel compilation of sub-projects) or are sequential within a small timeout. In #9557, this was extended to the classpath entry that backs the `scalac -release` compiler option ([JEP-247](https://openjdk.java.net/jeps/247) support for viewing the Java base library "as of" an older JDK version. This change was buggy -- it did not include the selected release in the cache key, which could lead to a compiler that specifies `-release X` seeing the results of another compiler using `-release Y`. This behaviour was tested by a JDK-9+ conditional test (`MultiReleaseJarTest`) which unfortunately is not part of our CI on the 2.12.x branch, so the regression went unnoticed. While in this area, I followed up on a TODO comment in the same test and discovered another bug in handling of multi-release JARs. Again, this bug could manifest when different values of `-release` were used in a build. It would manifest as an `IllegalArgumentException` in `ResuableDataReader` when it used the size of the non-versioned classfile when sizing buffers for the versioned classfile. --- .../nsc/classpath/DirectoryClassPath.scala | 4 ++-- src/reflect/scala/reflect/io/ZipArchive.scala | 13 +++++++---- .../nsc/classpath/MultiReleaseJarTest.scala | 23 ++++++++++++++----- 3 files changed, 27 insertions(+), 13 deletions(-) diff --git a/src/compiler/scala/tools/nsc/classpath/DirectoryClassPath.scala b/src/compiler/scala/tools/nsc/classpath/DirectoryClassPath.scala index e35c3aa22353..523aece292fd 100644 --- a/src/compiler/scala/tools/nsc/classpath/DirectoryClassPath.scala +++ b/src/compiler/scala/tools/nsc/classpath/DirectoryClassPath.scala @@ -130,7 +130,7 @@ trait JFileDirectoryLookup[FileEntryType <: ClassRepresentation] extends Directo object JrtClassPath { import java.nio.file._, java.net.URI private val jrtClassPathCache = new FileBasedCache[Unit, JrtClassPath]() - private val ctSymClassPathCache = new FileBasedCache[Unit, CtSymClassPath]() + private val ctSymClassPathCache = new FileBasedCache[String, CtSymClassPath]() def apply(release: Option[String], closeableRegistry: CloseableRegistry): Option[ClassPath] = { import scala.util.Properties._ if (!isJavaAtLeast("9")) None @@ -149,7 +149,7 @@ object JrtClassPath { val ctSym = Paths.get(javaHome).resolve("lib").resolve("ct.sym") if (Files.notExists(ctSym)) None else { - val classPath = ctSymClassPathCache.getOrCreate((), ctSym :: Nil, () => new CtSymClassPath(ctSym, v.toInt), closeableRegistry, true) + val classPath = ctSymClassPathCache.getOrCreate(v, ctSym :: Nil, () => new CtSymClassPath(ctSym, v.toInt), closeableRegistry, true) Some(classPath) } } catch { diff --git a/src/reflect/scala/reflect/io/ZipArchive.scala b/src/reflect/scala/reflect/io/ZipArchive.scala index 55fa3d84a236..2ed2bda0affe 100644 --- a/src/reflect/scala/reflect/io/ZipArchive.scala +++ b/src/reflect/scala/reflect/io/ZipArchive.scala @@ -220,7 +220,7 @@ final class FileZipArchive(file: JFile, release: Option[String]) extends ZipArch override def close(): Unit = { zipFilePool.release(zipFile) } } } - override def sizeOption: Option[Int] = Some(size) // could be stale + override def sizeOption: Option[Int] = Some(size) } private[this] val dirs = new java.util.HashMap[String, DirEntry]() @@ -236,16 +236,19 @@ final class FileZipArchive(file: JFile, release: Option[String]) extends ZipArch if (!zipEntry.getName.startsWith("META-INF/versions/")) { if (!zipEntry.isDirectory) { val dir = getDir(dirs, zipEntry) + val mrEntry = if (release.isDefined) { + zipFile.getEntry(zipEntry.getName) + } else zipEntry val f = if (ZipArchive.closeZipFile) new LazyEntry( zipEntry.getName, - zipEntry.getTime, - zipEntry.getSize.toInt) + mrEntry.getTime, + mrEntry.getSize.toInt) else new LeakyEntry(zipEntry.getName, - zipEntry.getTime, - zipEntry.getSize.toInt) + mrEntry.getTime, + mrEntry.getSize.toInt) dir.entries(f.name) = f } diff --git a/test/junit/scala/tools/nsc/classpath/MultiReleaseJarTest.scala b/test/junit/scala/tools/nsc/classpath/MultiReleaseJarTest.scala index 75d4c2d30756..96d118847ec7 100644 --- a/test/junit/scala/tools/nsc/classpath/MultiReleaseJarTest.scala +++ b/test/junit/scala/tools/nsc/classpath/MultiReleaseJarTest.scala @@ -4,10 +4,9 @@ import java.io.ByteArrayOutputStream import java.nio.file.{FileSystems, Files, Path} import java.util.jar.Attributes import java.util.jar.Attributes.Name - import org.junit.{Assert, Test} -import scala.tools.nsc.{Global, Settings} +import scala.tools.nsc.{CloseableRegistry, Global, Settings} import scala.tools.testing.BytecodeTesting import scala.util.Properties @@ -22,6 +21,7 @@ class MultiReleaseJarTest extends BytecodeTesting { // TODO test fails if both Global runs look at the same JAR on disk. Caching problem in our classpath implementation? // val temp2 = temp1 val temp2 = Files.createTempFile("mr-jar-test-", ".jar") + val cleanup = new CloseableRegistry try { def code(newApi: String) = s"package p1; abstract class Versioned { def oldApi: Int; $newApi }" @@ -39,6 +39,7 @@ class MultiReleaseJarTest extends BytecodeTesting { settings.usejavacp.value = true settings.classpath.value = jarPath.toAbsolutePath.toString val g = new Global(settings) + cleanup.registerClosable(g) settings.release.value = release new g.Run val decls = g.rootMirror.staticClass("p1.Versioned").info.decls.filterNot(_.isConstructor).map(_.name.toString).toList.sorted @@ -47,28 +48,38 @@ class MultiReleaseJarTest extends BytecodeTesting { Assert.assertEquals(List("newApi", "oldApi"), declsOfC(temp1, "9")) Assert.assertEquals(List("oldApi"), declsOfC(temp2, "8")) - } finally + } finally { + cleanup.close() List(temp1, temp2).foreach(Files.deleteIfExists) + } } @Test def ctSymTest(): Unit = { if (!Properties.isJavaAtLeast("9")) { println("skipping mrJar() on old JDK"); return} // TODO test that the compiler warns that --release is unsupported. + val cleanup = new CloseableRegistry def lookup(className: String, release: String): Boolean = { val settings = new Settings() settings.usejavacp.value = true val g = new Global(settings) + cleanup.registerClosable(g) import g._ settings.release.value = release new Run rootMirror.getClassIfDefined(TypeName(className)) != NoSymbol } - Assert.assertTrue(lookup("java.lang.invoke.LambdaMetafactory", "8")) - Assert.assertFalse(lookup("java.lang.invoke.LambdaMetafactory", "7")) - Assert.assertTrue(lookup("java.lang.invoke.LambdaMetafactory", "9")) + try { + Assert.assertTrue(lookup("java.lang.invoke.LambdaMetafactory", "8")) + Assert.assertFalse(lookup("java.lang.invoke.LambdaMetafactory", "7")) + Assert.assertTrue(lookup("java.lang.invoke.LambdaMetafactory", "9")) + } finally { + cleanup.close() + } } + + private def createManifest = { val manifest = new java.util.jar.Manifest() manifest.getMainAttributes.put(Name.MANIFEST_VERSION, "1.0") From fdc20efc2067a5cd15ffff205191dc2583140f8c Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Tue, 22 Jun 2021 15:14:03 +1000 Subject: [PATCH 260/769] Heed unused import warnings --- .../tools/nsc/classpath/ZipAndJarFileLookupFactory.scala | 5 +---- src/scaladoc/scala/tools/nsc/ScalaDoc.scala | 1 - 2 files changed, 1 insertion(+), 5 deletions(-) diff --git a/src/compiler/scala/tools/nsc/classpath/ZipAndJarFileLookupFactory.scala b/src/compiler/scala/tools/nsc/classpath/ZipAndJarFileLookupFactory.scala index 5f374119daef..39854624ecdd 100644 --- a/src/compiler/scala/tools/nsc/classpath/ZipAndJarFileLookupFactory.scala +++ b/src/compiler/scala/tools/nsc/classpath/ZipAndJarFileLookupFactory.scala @@ -14,18 +14,15 @@ package scala.tools.nsc.classpath import java.io.{Closeable, File} import java.net.URL -import java.nio.file.{Files, InvalidPathException} +import java.nio.file.Files import java.nio.file.attribute.{BasicFileAttributes, FileTime} -import java.nio.file.spi.FileSystemProvider import java.util.{Timer, TimerTask} import java.util.concurrent.atomic.AtomicInteger -import java.util.zip.ZipError import scala.annotation.tailrec import scala.reflect.io.{AbstractFile, FileZipArchive, ManifestResources} import scala.tools.nsc.util.{ClassPath, ClassRepresentation} import scala.tools.nsc.{CloseableRegistry, Settings} import FileUtils._ -import scala.reflect.internal.FatalError import scala.tools.nsc.io.Jar /** diff --git a/src/scaladoc/scala/tools/nsc/ScalaDoc.scala b/src/scaladoc/scala/tools/nsc/ScalaDoc.scala index 880478044c70..04ca245e20d9 100644 --- a/src/scaladoc/scala/tools/nsc/ScalaDoc.scala +++ b/src/scaladoc/scala/tools/nsc/ScalaDoc.scala @@ -15,7 +15,6 @@ package scala.tools.nsc import scala.tools.nsc.doc.DocFactory import scala.tools.nsc.reporters.ConsoleReporter import scala.tools.nsc.settings.DefaultPathFactory -import scala.reflect.internal.Reporter import scala.reflect.internal.util.{ FakePos, NoPosition, Position } /** The main class for scaladoc, a front-end for the Scala compiler From aceaa5c305abb1e5dc33068a25c3b806beb810e1 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Tue, 22 Jun 2021 15:09:05 +1000 Subject: [PATCH 261/769] Avoid IllegalArgumentException in JDK17+ for lambda deser. --- src/library/scala/runtime/LambdaDeserializer.scala | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/library/scala/runtime/LambdaDeserializer.scala b/src/library/scala/runtime/LambdaDeserializer.scala index ec283193a786..dc54c349eda6 100644 --- a/src/library/scala/runtime/LambdaDeserializer.scala +++ b/src/library/scala/runtime/LambdaDeserializer.scala @@ -101,8 +101,7 @@ object LambdaDeserializer { /* instantiatedMethodType = */ instantiated, /* flags = */ flags.asInstanceOf[AnyRef], /* markerInterfaceCount = */ 1.asInstanceOf[AnyRef], - /* markerInterfaces[0] = */ markerInterface, - /* bridgeCount = */ 0.asInstanceOf[AnyRef] + /* markerInterfaces[0] = */ markerInterface ) } From b7dc31fb2720eeee4fdc906d05aade2ff3b2bbbf Mon Sep 17 00:00:00 2001 From: Alec Theriault Date: Thu, 25 Mar 2021 22:40:37 -0700 Subject: [PATCH 262/769] Use `StringConcatFactory` for string concatenation on JDK 9+ JEP 280, released in JDK 9, proposes a new way to compile string concatenation using `invokedynamic` and `StringConcatFactory`. This new approach generates less bytecode, doesn't have to incur the overhead of `StringBuilder` allocations, and allows users to pick swap the concatenation technique at runtime. This changes the codegen when the target is at least Java 9 to leverage `invokedynamic` and `StringConcatFactory`. On Java 8, the old `StringBuilder` approach is still used. --- .../nsc/backend/jvm/BCodeBodyBuilder.scala | 114 ++++++++++++++---- .../nsc/backend/jvm/BCodeIdiomatic.scala | 39 +++++- test/files/run/StringConcat.check | Bin 0 -> 5587 bytes test/files/run/StringConcat.scala | 86 +++++++++++++ 4 files changed, 209 insertions(+), 30 deletions(-) create mode 100644 test/files/run/StringConcat.check create mode 100644 test/files/run/StringConcat.scala diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala index a40c04e6a527..753407346a14 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala @@ -33,7 +33,7 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder { import bTypes._ import coreBTypes._ import definitions._ - import genBCode.postProcessor.backendUtils.addIndyLambdaImplMethod + import genBCode.postProcessor.backendUtils.{addIndyLambdaImplMethod, classfileVersion} import genBCode.postProcessor.callGraph.{inlineAnnotatedCallsites, noInlineAnnotatedCallsites} /* @@ -990,44 +990,110 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder { } } + /* Generate string concatenation + * + * On JDK 8: create and append using `StringBuilder` + * On JDK 9+: use `invokedynamic` with `StringConcatFactory` + */ def genStringConcat(tree: Tree): BType = { lineNumber(tree) liftStringConcat(tree) match { - // Optimization for expressions of the form "" + x. We can avoid the StringBuilder. + // Optimization for expressions of the form "" + x case List(Literal(Constant("")), arg) => genLoad(arg, ObjectRef) genCallMethod(String_valueOf, InvokeStyle.Static, arg.pos) case concatenations => - val approxBuilderSize = concatenations.map { - case Literal(Constant(s: String)) => s.length - case Literal(c @ Constant(value)) if c.isNonUnitAnyVal => String.valueOf(c).length - case _ => - // could add some guess based on types of primitive args. - // or, we could stringify all the args onto the stack, compute the exact size of - // the StringBuilder. - // or, just let https://openjdk.java.net/jeps/280 (or a re-implementation thereof in our 2.13.x stdlib) do all the hard work at link time - 0 - }.sum - bc.genStartConcat(tree.pos, approxBuilderSize) - def isEmptyString(t: Tree) = t match { - case Literal(Constant("")) => true - case _ => false - } - for (elem <- concatenations if !isEmptyString(elem)) { - val loadedElem = elem match { + + val concatArguments = concatenations.view + .filter { + case Literal(Constant("")) => false // empty strings are no-ops in concatenation + case _ => true + } + .map { case Apply(boxOp, value :: Nil) if currentRun.runDefinitions.isBox(boxOp.symbol) => // Eliminate boxing of primitive values. Boxing is introduced by erasure because // there's only a single synthetic `+` method "added" to the string class. value + case other => other + } + .toList + + // `StringConcatFactory` only got added in JDK 9, so use `StringBuilder` for lower + if (classfileVersion.get < asm.Opcodes.V9) { + + // Estimate capacity needed for the string builder + val approxBuilderSize = concatArguments.view.map { + case Literal(Constant(s: String)) => s.length + case Literal(c @ Constant(_)) if c.isNonUnitAnyVal => String.valueOf(c).length + case _ => 0 + }.sum + bc.genNewStringBuilder(tree.pos, approxBuilderSize) + + for (elem <- concatArguments) { + val elemType = tpeTK(elem) + genLoad(elem, elemType) + bc.genStringBuilderAppend(elemType, elem.pos) + } + bc.genStringBuilderEnd(tree.pos) + } else { + + /* `StringConcatFactory#makeConcatWithConstants` accepts max 200 argument slots. If + * the string concatenation is longer (unlikely), we spill into multiple calls + */ + val MaxIndySlots = 200 + val TagArg = '\u0001' // indicates a hole (in the recipe string) for an argument + val TagConst = '\u0002' // indicates a hole (in the recipe string) for a constant + + val recipe = new StringBuilder() + val argTypes = Seq.newBuilder[asm.Type] + val constVals = Seq.newBuilder[String] + var totalArgSlots = 0 + var countConcats = 1 // ie. 1 + how many times we spilled + + for (elem <- concatArguments) { + val tpe = tpeTK(elem) + val elemSlots = tpe.size + + // Unlikely spill case + if (totalArgSlots + elemSlots >= MaxIndySlots) { + bc.genIndyStringConcat(recipe.toString, argTypes.result(), constVals.result()) + countConcats += 1 + totalArgSlots = 0 + recipe.setLength(0) + argTypes.clear() + constVals.clear() + } - case _ => elem + elem match { + case Literal(Constant(s: String)) => + if (s.contains(TagArg) || s.contains(TagConst)) { + totalArgSlots += elemSlots + recipe.append(TagConst) + constVals += s + } else { + recipe.append(s) + } + + case other => + totalArgSlots += elemSlots + recipe.append(TagArg) + val tpe = tpeTK(elem) + argTypes += tpe.toASMType + genLoad(elem, tpe) + } + } + bc.genIndyStringConcat(recipe.toString, argTypes.result(), constVals.result()) + + // If we spilled, generate one final concat + if (countConcats > 1) { + bc.genIndyStringConcat( + TagArg.toString * countConcats, + Seq.fill(countConcats)(StringRef.toASMType), + Seq.empty + ) } - val elemType = tpeTK(loadedElem) - genLoad(loadedElem, elemType) - bc.genConcat(elemType, loadedElem.pos) } - bc.genEndConcat(tree.pos) } StringRef } diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeIdiomatic.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeIdiomatic.scala index 86c0b83671c4..92de2aca3b9a 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BCodeIdiomatic.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeIdiomatic.scala @@ -175,10 +175,11 @@ abstract class BCodeIdiomatic { } // end of method genPrimitiveShift() - /* + /* Creates a new `StringBuilder` instance with the requested capacity + * * can-multi-thread */ - final def genStartConcat(pos: Position, size: Int): Unit = { + final def genNewStringBuilder(pos: Position, size: Int): Unit = { jmethod.visitTypeInsn(Opcodes.NEW, JavaStringBuilderClassName) jmethod.visitInsn(Opcodes.DUP) jmethod.visitLdcInsn(Integer.valueOf(size)) @@ -191,10 +192,11 @@ abstract class BCodeIdiomatic { ) } - /* + /* Issue a call to `StringBuilder#append` for the right element type + * * can-multi-thread */ - def genConcat(elemType: BType, pos: Position): Unit = { + final def genStringBuilderAppend(elemType: BType, pos: Position): Unit = { val paramType: BType = elemType match { case ct: ClassBType if ct.isSubtypeOf(StringRef).get => StringRef case ct: ClassBType if ct.isSubtypeOf(jlStringBufferRef).get => jlStringBufferRef @@ -211,13 +213,38 @@ abstract class BCodeIdiomatic { invokevirtual(JavaStringBuilderClassName, "append", bt.descriptor, pos) } - /* + /* Extract the built `String` from the `StringBuilder` + *: * can-multi-thread */ - final def genEndConcat(pos: Position): Unit = { + final def genStringBuilderEnd(pos: Position): Unit = { invokevirtual(JavaStringBuilderClassName, "toString", "()Ljava/lang/String;", pos) } + /* Concatenate top N arguments on the stack with `StringConcatFactory#makeConcatWithConstants` + * (only works for JDK 9+) + * + * can-multi-thread + */ + final def genIndyStringConcat( + recipe: String, + argTypes: Seq[asm.Type], + constants: Seq[String] + ): Unit = { + jmethod.visitInvokeDynamicInsn( + "makeConcatWithConstants", + asm.Type.getMethodDescriptor(StringRef.toASMType, argTypes:_*), + new asm.Handle( + asm.Opcodes.H_INVOKESTATIC, + "java/lang/invoke/StringConcatFactory", + "makeConcatWithConstants", + "(Ljava/lang/invoke/MethodHandles$Lookup;Ljava/lang/String;Ljava/lang/invoke/MethodType;Ljava/lang/String;[Ljava/lang/Object;)Ljava/lang/invoke/CallSite;", + false + ), + (recipe +: constants):_* + ) + } + /* * Emits one or more conversion instructions based on the types given as arguments. * diff --git a/test/files/run/StringConcat.check b/test/files/run/StringConcat.check new file mode 100644 index 0000000000000000000000000000000000000000..10eaa9a20d1b98d974875029c1d16d893b13f4e1 GIT binary patch literal 5587 zcmeHKOHbQC5H3CXSB$J45~&!4kF{g9i(`7BnJR!WndA!XgU3;{}f2{S1QkM9yHFWGm>^ z17j{QF(XMTElWluC12qD9PU?%1i9jQx_~8RRFE{?0iV+yczwqkIi3SNhHORQ+wBxO z(@fwl;EFTc0A+d;-`ALz;R2`Ka_$9(=l7h@c z4YehIqzc33>_f!-5OA5tF%>WYMz>}r4Rz{y^CHUbn)FDH;c7+1ls@HDu{MDRvLOyj zxTA1AgM$okb+}I7?K)(=rfku%BlJs?S5BCW;f7^6jpqkpe=r!60Bo^4-^sxGflg0m21r7VmB+NY;1wE8@A!;2CIFl=TWTd z#a`|f4 Date: Thu, 24 Jun 2021 08:29:40 -0700 Subject: [PATCH 263/769] Annotations on generated accessors --- src/compiler/scala/tools/nsc/javac/JavaParsers.scala | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/compiler/scala/tools/nsc/javac/JavaParsers.scala b/src/compiler/scala/tools/nsc/javac/JavaParsers.scala index 7dcfacdb3c2e..12906d0ece7b 100644 --- a/src/compiler/scala/tools/nsc/javac/JavaParsers.scala +++ b/src/compiler/scala/tools/nsc/javac/JavaParsers.scala @@ -840,7 +840,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { var generateCanonicalCtor = true var generateAccessors = header .view - .map { case ValDef(_, name, tpt, _) => name -> tpt } + .map { case ValDef(mods, name, tpt, _) => (name, (tpt, mods.annotations)) } .toMap for (DefDef(_, name, List(), List(params), _, _) <- body) { if (name == nme.CONSTRUCTOR && params.size == header.size) { @@ -856,8 +856,8 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { // Generate canonical constructor and accessors, if not already manually specified val accessors = generateAccessors - .map { case (name, tpt) => - DefDef(Modifiers(Flags.JAVA), name, List(), List(), tpt.duplicate, blankExpr) + .map { case (name, (tpt, annots)) => + DefDef(Modifiers(Flags.JAVA) withAnnotations annots, name, List(), List(), tpt.duplicate, blankExpr) } .toList val canonicalCtor = Option.when(generateCanonicalCtor) { From 86fe04b22f8eeb2474264ef7f1bbf0bc54c5a562 Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Sun, 27 Jun 2021 14:06:15 -0700 Subject: [PATCH 264/769] Tweak test to wait for expiring thread The test for exception handling waits for thread to die, perhaps to ensure all actions are complete. Previously, throwing terminated the thread, but in JDK 17, the thread's exception handler is the pool's. The test is updated to wait some long period for the thread to expire of natural causes. Wonder why the replicants in Blade Runner didn't change their thinking from "retirement" and "termination" to "Hey, I've got a pretty healthy keepAlive." --- .../scala/tools/testkit/AssertUtil.scala | 20 +++-- test/files/jvm/scala-concurrent-tck.scala | 89 +++++++++---------- 2 files changed, 56 insertions(+), 53 deletions(-) diff --git a/src/testkit/scala/tools/testkit/AssertUtil.scala b/src/testkit/scala/tools/testkit/AssertUtil.scala index 4b7083d83e2c..e969376a71d9 100644 --- a/src/testkit/scala/tools/testkit/AssertUtil.scala +++ b/src/testkit/scala/tools/testkit/AssertUtil.scala @@ -233,6 +233,12 @@ object AssertUtil { * takes a long time, so long as we can verify progress. */ def waitForIt(terminated: => Boolean, progress: Progress = Fast, label: => String = "test"): Unit = { + def value: Option[Boolean] = if (terminated) Some(true) else None + assertTrue(waitFor(value, progress, label)) + } + /** Wait for a value or eventually throw. + */ + def waitFor[A](value: => Option[A], progress: Progress = Fast, label: => String = "test"): A = { val limit = 5 var n = 1 var (dormancy, factor) = progress match { @@ -240,14 +246,13 @@ object AssertUtil { case Fast => (250L, 4) } var period = 0L + var result: Option[A] = None var done = false - var ended = false while (!done && n < limit) { try { - ended = terminated - if (ended) { - done = true - } else { + result = value + done = result.nonEmpty + if (!done) { //println(s"Wait for test condition: $label") Thread.sleep(dormancy) period += dormancy @@ -258,7 +263,10 @@ object AssertUtil { n += 1 dormancy *= factor } - assertTrue(s"Expired after dormancy period $period waiting for termination condition $label", ended) + result match { + case Some(v) => v + case _ => fail(s"Expired after dormancy period $period waiting for termination condition $label") + } } /** How frequently to check a termination condition. */ diff --git a/test/files/jvm/scala-concurrent-tck.scala b/test/files/jvm/scala-concurrent-tck.scala index bcbd977e01cf..323eaa6937d7 100644 --- a/test/files/jvm/scala-concurrent-tck.scala +++ b/test/files/jvm/scala-concurrent-tck.scala @@ -9,25 +9,27 @@ import scala.concurrent.{ Awaitable, blocking } -import scala.util.{ Try, Success, Failure } -import scala.concurrent.duration.Duration -import scala.concurrent.duration._ -import scala.reflect.{ classTag, ClassTag } -import scala.tools.testkit.AssertUtil.assertThrows import scala.annotation.tailrec +import scala.concurrent.duration._ +import scala.reflect.{classTag, ClassTag} +import scala.tools.testkit.AssertUtil.{Fast, Slow, assertThrows, waitFor, waitForIt} +import scala.util.{Try, Success, Failure} +import scala.util.chaining._ +import java.util.concurrent.CountDownLatch +import java.util.concurrent.TimeUnit.{MILLISECONDS => Milliseconds, SECONDS => Seconds} trait TestBase { - import scala.tools.testkit.AssertUtil.{Fast, Slow, waitForIt} + trait Done { def apply(proof: => Boolean): Unit } + def once(body: Done => Unit): Unit = { - import java.util.concurrent.{ LinkedBlockingQueue, TimeUnit } - import TimeUnit.{MILLISECONDS => Milliseconds} + import java.util.concurrent.LinkedBlockingQueue val q = new LinkedBlockingQueue[Try[Boolean]] body(new Done { def apply(proof: => Boolean): Unit = q offer Try(proof) }) var tried: Try[Boolean] = null - def check = { tried = q.poll(5000, Milliseconds) ; tried != null } + def check = { tried = q.poll(5000L, Milliseconds) ; tried != null } waitForIt(check, progress = Slow, label = "concurrent-tck") assert(tried.isSuccess) assert(tried.get) @@ -37,22 +39,17 @@ trait TestBase { def test[T](name: String)(body: => T): T = { println(s"starting $name") - val r = body - println(s"finished $name") - r + body.tap(_ => println(s"finished $name")) } def await[A](value: Awaitable[A]): A = { - var a: A = null.asInstanceOf[A] - def check = { + def check: Option[A] = Try(Await.result(value, Duration(500, "ms"))) match { - case Success(x) => a = x ; true - case Failure(_: TimeoutException) => false + case Success(x) => Some(x) + case Failure(_: TimeoutException) => None case Failure(t) => throw t } - } - waitForIt(check, progress = Fast, label = "concurrent-tck test result") - a + waitFor(check, progress = Fast, label = "concurrent-tck test result") } } @@ -989,36 +986,34 @@ class CustomExecutionContext extends TestBase { assert(count >= 1) } - def testUncaughtExceptionReporting(): Unit = once { - done => - import java.util.concurrent.TimeUnit.SECONDS - val example = new InterruptedException() - val latch = new java.util.concurrent.CountDownLatch(1) - @volatile var thread: Thread = null - @volatile var reported: Throwable = null - val ec = ExecutionContext.fromExecutorService(null, t => { - reported = t - latch.countDown() - }) + def testUncaughtExceptionReporting(): Unit = once { done => + val example = new InterruptedException + val latch = new CountDownLatch(1) + @volatile var thread: Thread = null + @volatile var reported: Throwable = null + val ec = ExecutionContext.fromExecutorService(null, t => { + reported = t + latch.countDown() + }) - @tailrec def waitForThreadDeath(turns: Int): Boolean = - if (turns <= 0) false - else if ((thread ne null) && thread.isAlive == false) true - else { - Thread.sleep(10) - waitForThreadDeath(turns - 1) - } + @tailrec def waitForThreadDeath(turns: Int): Boolean = + turns > 0 && (thread != null && !thread.isAlive || { Thread.sleep(10L) ; waitForThreadDeath(turns - 1) }) - try { - ec.execute(() => { - thread = Thread.currentThread - throw example - }) - latch.await(2, SECONDS) - done(waitForThreadDeath(turns = 100) && (reported eq example)) - } finally { - ec.shutdown() - } + def truthfully(b: Boolean): Option[Boolean] = if (b) Some(true) else None + + // jdk17 thread receives pool exception handler, so wait for thread to die slow and painful expired keepalive + def threadIsDead = + waitFor(truthfully(waitForThreadDeath(turns = 100)), progress = Slow, label = "concurrent-tck-thread-death") + + try { + ec.execute(() => { + thread = Thread.currentThread + throw example + }) + latch.await(2, Seconds) + done(threadIsDead && (reported eq example)) + } + finally ec.shutdown() } test("testUncaughtExceptionReporting")(testUncaughtExceptionReporting()) From 4ed923e607e2706f18df6989565a7ca0469928d4 Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Mon, 28 Jun 2021 15:03:36 +0200 Subject: [PATCH 265/769] Always generate Record constructor, unlink later if a matching one exists --- .../scala/tools/nsc/javac/JavaParsers.scala | 34 ++++++------------- .../scala/tools/nsc/typechecker/Namers.scala | 8 ++++- .../scala/reflect/internal/Definitions.scala | 1 + .../reflect/runtime/JavaUniverseForce.scala | 1 + test/files/pos/t11908/R2.java | 4 +-- 5 files changed, 22 insertions(+), 26 deletions(-) diff --git a/src/compiler/scala/tools/nsc/javac/JavaParsers.scala b/src/compiler/scala/tools/nsc/javac/JavaParsers.scala index 12906d0ece7b..d14aacad9e68 100644 --- a/src/compiler/scala/tools/nsc/javac/JavaParsers.scala +++ b/src/compiler/scala/tools/nsc/javac/JavaParsers.scala @@ -837,22 +837,12 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { val (statics, body) = typeBody(RECORD) // Records generate a canonical constructor and accessors, unless they are manually specified - var generateCanonicalCtor = true var generateAccessors = header .view .map { case ValDef(mods, name, tpt, _) => (name, (tpt, mods.annotations)) } .toMap - for (DefDef(_, name, List(), List(params), _, _) <- body) { - if (name == nme.CONSTRUCTOR && params.size == header.size) { - val ctorParamsAreCanonical = params.lazyZip(header).forall { - case (ValDef(_, _, tpt1, _), ValDef(_, _, tpt2, _)) => tpt1 equalsStructure tpt2 - case _ => false - } - if (ctorParamsAreCanonical) generateCanonicalCtor = false - } else if (generateAccessors.contains(name) && params.isEmpty) { - generateAccessors -= name - } - } + for (DefDef(_, name, List(), List(params), _, _) <- body if generateAccessors.contains(name) && params.isEmpty) + generateAccessors -= name // Generate canonical constructor and accessors, if not already manually specified val accessors = generateAccessors @@ -860,23 +850,21 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { DefDef(Modifiers(Flags.JAVA) withAnnotations annots, name, List(), List(), tpt.duplicate, blankExpr) } .toList - val canonicalCtor = Option.when(generateCanonicalCtor) { - DefDef( - mods, - nme.CONSTRUCTOR, - List(), - List(header.map(_.duplicate)), - TypeTree(), - blankExpr - ) - } + val canonicalCtor = DefDef( + mods | Flags.SYNTHETIC, + nme.CONSTRUCTOR, + List(), + List(header.map(_.duplicate)), + TypeTree(), + blankExpr + ) addCompanionObject(statics, atPos(pos) { ClassDef( mods | Flags.FINAL, name, tparams, - makeTemplate(superclass :: interfaces, canonicalCtor.toList ++ accessors ++ body) + makeTemplate(superclass :: interfaces, canonicalCtor :: accessors ::: body) ) }) } diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala index 825bcd50b04f..0d25d8ed12a1 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala @@ -1487,7 +1487,13 @@ trait Namers extends MethodSynthesis { } val methSig = deskolemizedPolySig(vparamSymssOrEmptyParamsFromOverride, resTp) - pluginsTypeSig(methSig, typer, ddef, resTpGiven) + val unlink = methOwner.isJava && meth.isSynthetic && meth.isConstructor && methOwner.superClass == JavaRecordClass && + methOwner.info.decl(meth.name).alternatives.exists(c => c != meth && c.tpe.matches(methSig)) + if (unlink) { + methOwner.info.decls.unlink(meth) + ErrorType + } else + pluginsTypeSig(methSig, typer, ddef, resTpGiven) } /** diff --git a/src/reflect/scala/reflect/internal/Definitions.scala b/src/reflect/scala/reflect/internal/Definitions.scala index f6a8615e44d2..35cb296a1bbc 100644 --- a/src/reflect/scala/reflect/internal/Definitions.scala +++ b/src/reflect/scala/reflect/internal/Definitions.scala @@ -411,6 +411,7 @@ trait Definitions extends api.StandardDefinitions { lazy val JavaEnumClass = requiredClass[java.lang.Enum[_]] lazy val JavaUtilMap = requiredClass[java.util.Map[_, _]] lazy val JavaUtilHashMap = requiredClass[java.util.HashMap[_, _]] + lazy val JavaRecordClass = getClassIfDefined("java.lang.Record") lazy val ByNameParamClass = specialPolyClass(tpnme.BYNAME_PARAM_CLASS_NAME, COVARIANT)(_ => AnyTpe) lazy val JavaRepeatedParamClass = specialPolyClass(tpnme.JAVA_REPEATED_PARAM_CLASS_NAME, COVARIANT)(tparam => arrayType(tparam.tpe)) diff --git a/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala b/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala index be33ed5a6651..0e01853468ea 100644 --- a/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala +++ b/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala @@ -290,6 +290,7 @@ trait JavaUniverseForce { self: runtime.JavaUniverse => definitions.JavaEnumClass definitions.JavaUtilMap definitions.JavaUtilHashMap + definitions.JavaRecordClass definitions.ByNameParamClass definitions.JavaRepeatedParamClass definitions.RepeatedParamClass diff --git a/test/files/pos/t11908/R2.java b/test/files/pos/t11908/R2.java index 52fb72b26e5e..62bf5ff6c22c 100644 --- a/test/files/pos/t11908/R2.java +++ b/test/files/pos/t11908/R2.java @@ -6,9 +6,9 @@ public int getInt() { } // Canonical constructor - public R(int i, String s) { + public R(int i, java.lang.String s) { this.i = i; this.s = s.intern(); } } -} \ No newline at end of file +} From e987b5a74c9a8eb8b5bf4fb3c43fd8c593e121a5 Mon Sep 17 00:00:00 2001 From: Mathias Date: Tue, 29 Jun 2021 22:23:18 +0200 Subject: [PATCH 266/769] [Library] Remove allocation overhead in Iterator#collect --- src/library/scala/collection/Iterator.scala | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/src/library/scala/collection/Iterator.scala b/src/library/scala/collection/Iterator.scala index 572dc4078f63..40f697c3fe82 100644 --- a/src/library/scala/collection/Iterator.scala +++ b/src/library/scala/collection/Iterator.scala @@ -498,7 +498,7 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite */ def withFilter(p: A => Boolean): Iterator[A] = filter(p) - def collect[B](pf: PartialFunction[A, B]): Iterator[B] = new AbstractIterator[B] { + def collect[B](pf: PartialFunction[A, B]): Iterator[B] = new AbstractIterator[B] with (A => B) { // Manually buffer to avoid extra layer of wrapping with buffered private[this] var hd: B = _ @@ -508,12 +508,14 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite // BE REALLY CAREFUL TO KEEP COMMENTS AND NUMBERS IN SYNC! private[this] var status = 0/*Seek*/ + def apply(value: A): B = Statics.pfMarker.asInstanceOf[B] + def hasNext = { val marker = Statics.pfMarker while (status == 0/*Seek*/) { if (self.hasNext) { val x = self.next() - val v = pf.applyOrElse(x, ((x: A) => marker).asInstanceOf[A => B]) + val v = pf.applyOrElse(x, this) if (marker ne v.asInstanceOf[AnyRef]) { hd = v status = 1/*Found*/ From 8a15b1c0ce77402d330b692e828e3f8f681b8e92 Mon Sep 17 00:00:00 2001 From: Dale Wijnand Date: Wed, 30 Jun 2021 10:04:24 +0100 Subject: [PATCH 267/769] Test & change package module deferred opening setup --- src/compiler/scala/tools/nsc/Global.scala | 16 +++----- .../tools/nsc/typechecker/Analyzer.scala | 7 +--- .../scala/tools/reflect/ReflectGlobal.scala | 2 + .../scala/tools/nsc/interactive/Global.scala | 16 ++++---- .../scala/reflect/internal/SymbolTable.scala | 7 +--- test/files/run/package-object-toolbox.scala | 40 +++++++++++++++++++ 6 files changed, 61 insertions(+), 27 deletions(-) create mode 100644 test/files/run/package-object-toolbox.scala diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala index 5dbea6505185..1fd77e0fe4a6 100644 --- a/src/compiler/scala/tools/nsc/Global.scala +++ b/src/compiler/scala/tools/nsc/Global.scala @@ -80,15 +80,12 @@ class Global(var currentSettings: Settings, reporter0: Reporter) import definitions.findNamedMember def findMemberFromRoot(fullName: Name): Symbol = rootMirror.findMemberFromRoot(fullName) - override def deferredOpenPackageModule(container: Symbol, dest: Symbol): Unit = { - // Some compiler runs (e.g. Toolbox and the PC) just initialise Global and then discard the Run - // such that the scala package object decls never get entered into the scala package - if ((curRun eq null) || !isGlobalInitialized || isPastPackageObjects) { - super.openPackageModule(container, dest) - } else { - analyzer.packageObjects.deferredOpen(dest) = container - } + + override def openPackageModule(pkgClass: Symbol, force: Boolean): Unit = { + if (force || isPast(currentRun.namerPhase)) super.openPackageModule(pkgClass, true) + else analyzer.packageObjects.deferredOpen.add(pkgClass) } + // alternate constructors ------------------------------------------ override def settings = currentSettings @@ -1025,7 +1022,6 @@ class Global(var currentSettings: Settings, reporter0: Reporter) ) override def isPastTyper = isPast(currentRun.typerPhase) def isBeforeErasure = isBefore(currentRun.erasurePhase) - def isPastPackageObjects = isPast(currentRun.packageobjectsPhase) def isPast(phase: Phase) = ( (curRun ne null) && isGlobalInitialized // defense against init order issues @@ -1347,7 +1343,7 @@ class Global(var currentSettings: Settings, reporter0: Reporter) */ val parserPhase = phaseNamed("parser") val namerPhase = phaseNamed("namer") - val packageobjectsPhase = phaseNamed("packageobjects") + // val packageobjectsPhase = phaseNamed("packageobjects") val typerPhase = phaseNamed("typer") // val inlineclassesPhase = phaseNamed("inlineclasses") // val superaccessorsPhase = phaseNamed("superaccessors") diff --git a/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala b/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala index 1fd2fde5894e..65e669a7743e 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala @@ -52,7 +52,7 @@ trait Analyzer extends AnyRef object packageObjects extends { val global: Analyzer.this.global.type = Analyzer.this.global } with SubComponent { - val deferredOpen = perRunCaches.newMap[Symbol, Symbol]() + val deferredOpen = perRunCaches.newSet[Symbol]() val phaseName = "packageobjects" val runsAfter = List[String]() val runsRightAfter= Some("namer") @@ -77,10 +77,7 @@ trait Analyzer extends AnyRef def apply(unit: CompilationUnit): Unit = { openPackageObjectsTraverser(unit.body) - deferredOpen.foreach { - case (dest, container) => - openPackageModule(container, dest) - } + deferredOpen.foreach(openPackageModule(_)) } } } diff --git a/src/compiler/scala/tools/reflect/ReflectGlobal.scala b/src/compiler/scala/tools/reflect/ReflectGlobal.scala index 91443b448ee1..4b4de7e96d78 100644 --- a/src/compiler/scala/tools/reflect/ReflectGlobal.scala +++ b/src/compiler/scala/tools/reflect/ReflectGlobal.scala @@ -66,5 +66,7 @@ class ReflectGlobal(currentSettings: Settings, reporter: Reporter, override val override implicit val MirrorTag: ClassTag[Mirror] = ClassTag[Mirror](classOf[Mirror]) override type RuntimeClass = java.lang.Class[_] override implicit val RuntimeClassTag: ClassTag[RuntimeClass] = ClassTag[RuntimeClass](classOf[RuntimeClass]) + + override def openPackageModule(pkgClass: Symbol, force: Boolean): Unit = super.openPackageModule(pkgClass, true) } diff --git a/src/interactive/scala/tools/nsc/interactive/Global.scala b/src/interactive/scala/tools/nsc/interactive/Global.scala index bb434dd7a0bf..909d9198b611 100644 --- a/src/interactive/scala/tools/nsc/interactive/Global.scala +++ b/src/interactive/scala/tools/nsc/interactive/Global.scala @@ -398,6 +398,15 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "") val platform: Global.this.platform.type = Global.this.platform } with BrowsingLoaders + override def openPackageModule(pkgClass: Symbol, force: Boolean): Unit = { + val isPastNamer = force || currentTyperRun == null || (currentTyperRun.currentUnit match { + case unit: RichCompilationUnit => unit.isParsed + case _ => true + }) + if (isPastNamer) super.openPackageModule(pkgClass, true) + else analyzer.packageObjects.deferredOpen.add(pkgClass) + } + // ----------------- Polling --------------------------------------- case class WorkEvent(atNode: Int, atMillis: Long) @@ -1355,13 +1364,6 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "") } } - override def isPastPackageObjects = { - (if (currentTyperRun == null) NoCompilationUnit else currentTyperRun.currentUnit) match { - case unit: RichCompilationUnit => unit.isParsed - case _ => super.isPastPackageObjects - } - } - def newTyperRun(): Unit = { currentTyperRun = new TyperRun } diff --git a/src/reflect/scala/reflect/internal/SymbolTable.scala b/src/reflect/scala/reflect/internal/SymbolTable.scala index ec882b71d690..e7b9466ffa95 100644 --- a/src/reflect/scala/reflect/internal/SymbolTable.scala +++ b/src/reflect/scala/reflect/internal/SymbolTable.scala @@ -339,9 +339,6 @@ abstract class SymbolTable extends macros.Universe } } - def deferredOpenPackageModule(container: Symbol, dest: Symbol): Unit = { - openPackageModule(container, dest) - } def openPackageModule(container: Symbol, dest: Symbol): Unit = { // unlink existing symbols in the package for (member <- container.info.decls.iterator) { @@ -391,7 +388,7 @@ abstract class SymbolTable extends macros.Universe } /** if there's a `package` member object in `pkgClass`, enter its members into it. */ - def openPackageModule(pkgClass: Symbol): Unit = { + def openPackageModule(pkgClass: Symbol, force: Boolean = false): Unit = { val pkgModule = pkgClass.packageObject def fromSource = pkgModule.rawInfo match { @@ -399,7 +396,7 @@ abstract class SymbolTable extends macros.Universe case _ => false } if (pkgModule.isModule && !fromSource) { - deferredOpenPackageModule(pkgModule, pkgClass) + openPackageModule(pkgModule, pkgClass) } } diff --git a/test/files/run/package-object-toolbox.scala b/test/files/run/package-object-toolbox.scala new file mode 100644 index 000000000000..d84a7e3c2668 --- /dev/null +++ b/test/files/run/package-object-toolbox.scala @@ -0,0 +1,40 @@ +import java.io.File +import java.net.URLClassLoader + +import scala.reflect.io.Path +import scala.reflect.runtime.{ universe => ru } +import scala.tools.partest._ +import scala.tools.reflect.ToolBox + +import org.junit.Assert._ + +object Test extends StoreReporterDirectTest { + val cp = List(sys.props("partest.lib"), testOutput.path) + override def extraSettings = s"-cp ${cp.mkString(File.pathSeparator)}" + + def show(): Unit = { + compiles("package object pkg { def foo = 1 }") + val loader = new URLClassLoader(cp.map(new File(_).toURI.toURL).toArray) + val mirror = ru.runtimeMirror(loader) + + val toolbox = mirror.mkToolBox() + val result1 = toolbox.eval(toolbox.parse("pkg.foo")) + assertEquals(1, result1) + + val obj = toolbox.eval(toolbox.parse("pkg.`package`")) + val pkg = mirror.staticPackage("pkg") + val sym = pkg.info.decl(ru.TermName("foo")).asMethod + val meth = mirror.reflect(obj).reflectMethod(sym) + val res2 = meth.apply() + assertEquals(1, res2) + } + + def compiles(codes: String*) = { + val global = newCompiler() + withRun(global)(_ compileSources newSources(codes: _*)) + assert(!global.reporter.hasErrors, storeReporter.infos.mkString("\n")) + } + + def delete(paths: Path*) = paths.foreach(p => assert(p.delete(), s"$p didn't delete")) + def code = "" +} From 1314cf3d86ba0f70d19308b96ae637adbe1e04fa Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Mon, 9 Jul 2018 11:56:00 -0700 Subject: [PATCH 268/769] [backport] Make test suite work on JDK 11/16 Cherry picks of parts of: 36a43c0314f939c60307a0a5dbe0350746531e3f 8599598443fcc9c1692725aa75877072454f664c bf4c304a71672d375e5f7d238e679b1826cc7489 64ec65670ab9245d2bb460955cb5ec64078ebfab b5367804b0ac742df4d3f364dbc48ef3ce469ac4 0db7e9b650765392b57941d1981477f98075f091 00513cdc3ab19add9f2afb780a0e5eac1b4a4080 4dcc5c002700dceb2fb229a67c6c5dd6c887d55b d99234abfda8c29f57357194db0686f70eae3553 --- build.sbt | 24 ++- .../nsc/backend/jvm/opt/BytecodeUtils.scala | 2 +- test/files/jvm/annotations.check | 69 ++++++ test/files/jvm/annotations/Test_2.scala | 4 +- test/files/jvm/javaReflection.check | 188 +++++++++++++++++ test/files/jvm/javaReflection/Test.scala | 19 +- test/files/jvm/t3003.check | 3 + test/files/jvm/t8786-sig.scala | 12 +- test/files/jvm/throws-annot.check | 23 ++ .../jvm/value-class-in-jannotation.scala | 10 - .../jvm/value-class-in-jannotation/Res.java | 10 + .../jvm/value-class-in-jannotation/Test.scala | 9 + test/files/neg/checksensible.check | 120 ++++++----- test/files/neg/checksensible.scala | 2 +- .../java-import-non-existing-selector.check | 4 + test/files/neg/macro-invalidret/Impls_1.scala | 2 +- test/files/neg/t9529.check | 2 +- test/files/neg/t9529.scala | 6 +- .../files/presentation/infix-completion.check | 199 +----------------- .../infix-completion/src/Snippet.scala | 6 +- .../presentation/infix-completion2.check | 199 +----------------- .../infix-completion2/src/Snippet.scala | 6 +- test/files/run/classfile-format-52.scala | 2 +- test/files/run/getClassTest-new.scala | 12 +- test/files/run/getClassTest-old.scala | 10 +- test/files/run/global-showdef.scala | 2 +- test/files/run/junitForwarders/C_1.scala | 6 +- test/files/run/lambda-serialization-gc.scala | 6 +- test/files/run/numbereq.scala | 28 +-- .../run/reflection-magicsymbols-invoke.check | 5 + test/files/run/richs.scala | 10 +- test/files/run/t10471.scala | 2 +- test/files/run/t1167.check | 5 + test/files/run/t1167.scala | 4 +- test/files/run/t2318.scala | 4 +- test/files/run/t3425b/Base_1.scala | 2 +- test/files/run/t3613.scala | 17 +- test/files/run/t4148.scala | 2 +- test/files/run/t5256h.scala | 2 +- test/files/run/t6130.scala | 2 +- test/files/run/t6240-universe-code-gen.scala | 2 +- test/files/run/t6344.check | 12 ++ test/files/run/t6411a.scala | 7 +- test/files/run/t6669.scala | 10 +- test/files/run/t7455.check | 2 + .../run/t7741a/GroovyInterface$1Dump.java | 2 +- test/files/run/t8015-ffc.scala | 2 +- test/files/run/t9030.scala | 10 +- test/files/run/t9097.scala | 2 +- test/files/run/t9437b.scala | 2 +- test/files/run/t9529.check | 21 +- test/files/run/t9529/Test_1.scala | 2 +- .../nsc/backend/jvm/opt/BoxUnboxTest.scala | 6 +- .../nsc/backend/jvm/opt/CallGraphTest.scala | 11 +- .../backend/jvm/opt/MethodLevelOptsTest.scala | 4 +- .../scala/tools/testing/AssertUtil.scala | 16 ++ .../scala/tools/testing/BytecodeTesting.scala | 2 +- test/junit/scala/tools/testing/Resource.java | 13 ++ test/osgi/src/ScalaOsgiHelper.scala | 6 +- .../tools/nsc/scaladoc/HtmlFactoryTest.scala | 4 +- .../tools/nsc/scaladoc/SettingsUtil.scala | 15 +- 61 files changed, 627 insertions(+), 564 deletions(-) delete mode 100644 test/files/jvm/value-class-in-jannotation.scala create mode 100644 test/files/jvm/value-class-in-jannotation/Res.java create mode 100644 test/files/jvm/value-class-in-jannotation/Test.scala create mode 100644 test/junit/scala/tools/testing/Resource.java diff --git a/build.sbt b/build.sbt index e31bf14b7a51..70128525bba0 100644 --- a/build.sbt +++ b/build.sbt @@ -669,6 +669,13 @@ lazy val bench = project.in(file("test") / "benchmarks") scalacOptions ++= Seq("-feature", "-opt:l:inline", "-opt-inline-from:scala/**", "-opt-warnings"), ).settings(inConfig(JmhPlugin.JmhKeys.Jmh)(scalabuild.JitWatchFilePlugin.jitwatchSettings)) +// Jigsaw: reflective access between modules (`setAccessible(true)`) requires an `opens` directive. +// This is enforced by error (not just by warning) since JDK 16. In our tests we use reflective access +// from the unnamed package (the classpath) to JDK modules in testing utilities like `assertNotReachable`. +// `add-exports=jdk.jdeps/com.sun.tools.javap` is tests that use `:javap` in the REPL, see scala/bug#12378 +val addOpensForTesting = "-XX:+IgnoreUnrecognizedVMOptions" +: "--add-exports=jdk.jdeps/com.sun.tools.javap=ALL-UNNAMED" +: + Seq("java.util.concurrent.atomic", "java.lang", "java.lang.reflect", "java.net").map(p => s"--add-opens=java.base/$p=ALL-UNNAMED") + lazy val junit = project.in(file("test") / "junit") .dependsOn(library, reflect, compiler, partest, scaladoc) .settings(clearSourceAndResourceDirectories) @@ -677,7 +684,7 @@ lazy val junit = project.in(file("test") / "junit") .settings(disablePublishing) .settings( fork in Test := true, - javaOptions in Test += "-Xss1M", + javaOptions in Test ++= "-Xss1M" +: addOpensForTesting, (forkOptions in Test) := (forkOptions in Test).value.withWorkingDirectory((baseDirectory in ThisBuild).value), (forkOptions in Test in testOnly) := (forkOptions in Test in testOnly).value.withWorkingDirectory((baseDirectory in ThisBuild).value), libraryDependencies ++= Seq(junitDep, junitInterfaceDep, jolDep), @@ -695,7 +702,7 @@ lazy val scalacheck = project.in(file("test") / "scalacheck") .settings( // enable forking to workaround https://github.com/sbt/sbt/issues/4009 fork in Test := true, - javaOptions in Test += "-Xss1M", + javaOptions in Test ++= "-Xss1M" +: addOpensForTesting, testOptions ++= { if ((fork in Test).value) Nil else List(Tests.Cleanup { loader => @@ -712,11 +719,11 @@ lazy val scalacheck = project.in(file("test") / "scalacheck") lazy val osgiTestFelix = osgiTestProject( project.in(file(".") / "target" / "osgiTestFelix"), - "org.apache.felix" % "org.apache.felix.framework" % "5.0.1") + "org.apache.felix" % "org.apache.felix.framework" % "5.6.10") lazy val osgiTestEclipse = osgiTestProject( project.in(file(".") / "target" / "osgiTestEclipse"), - "org.eclipse.tycho" % "org.eclipse.osgi" % "3.10.100.v20150521-1310") + "org.eclipse.tycho" % "org.eclipse.osgi" % "3.13.0.v20180226-1711") def osgiTestProject(p: Project, framework: ModuleID) = p .dependsOn(library, reflect, compiler) @@ -728,7 +735,7 @@ def osgiTestProject(p: Project, framework: ModuleID) = p fork in Test := true, parallelExecution in Test := false, libraryDependencies ++= { - val paxExamVersion = "4.5.0" // Last version which supports Java 6 + val paxExamVersion = "4.11.0" // Last version which supports Java 9+ Seq( junitDep, junitInterfaceDep, @@ -744,8 +751,9 @@ def osgiTestProject(p: Project, framework: ModuleID) = p ) }, Keys.test in Test := (Keys.test in Test).dependsOn(packageBin in Compile).value, + Keys.testOnly in Test := (Keys.testOnly in Test).dependsOn(packageBin in Compile).evaluated, testOptions += Tests.Argument(TestFrameworks.JUnit, "-a", "-v", "-q"), - javaOptions in Test += "-Dscala.bundle.dir=" + (buildDirectory in ThisBuild).value / "osgi", + javaOptions in Test ++= ("-Dscala.bundle.dir=" + (buildDirectory in ThisBuild).value / "osgi") +: addOpensForTesting, (forkOptions in Test in test) := (forkOptions in Test in test).value.withWorkingDirectory((baseDirectory in ThisBuild).value), unmanagedSourceDirectories in Test := List((baseDirectory in ThisBuild).value / "test" / "osgi" / "src"), unmanagedResourceDirectories in Compile := (unmanagedSourceDirectories in Test).value, @@ -797,10 +805,10 @@ lazy val test = project // enable this in 2.13, when tests pass //scalacOptions in Compile += "-Yvalidate-pos:parser,typer", scalacOptions in Compile -= "-Ywarn-unused:imports", - javaOptions in IntegrationTest ++= List("-Xmx2G", "-Dpartest.exec.in.process=true", "-Dfile.encoding=UTF-8", "-Duser.language=en", "-Duser.country=US"), + javaOptions in IntegrationTest ++= List("-Xmx2G", "-Dpartest.exec.in.process=true", "-Dfile.encoding=UTF-8", "-Duser.language=en", "-Duser.country=US") ++ addOpensForTesting, testOptions in IntegrationTest += Tests.Argument("-Dfile.encoding=UTF-8", "-Duser.language=en", "-Duser.country=US"), testFrameworks += new TestFramework("scala.tools.partest.sbt.Framework"), - testOptions in IntegrationTest += Tests.Argument("-Dpartest.java_opts=-Xmx1024M -Xms64M"), + testOptions in IntegrationTest += Tests.Argument(s"""-Dpartest.java_opts=-Xmx1024M -Xms64M ${addOpensForTesting.mkString(" ")}"""), testOptions in IntegrationTest += Tests.Argument("-Dpartest.scalac_opts=" + (scalacOptions in Compile).value.mkString(" ")), (forkOptions in IntegrationTest) := (forkOptions in IntegrationTest).value.withWorkingDirectory((baseDirectory in ThisBuild).value), testOptions in IntegrationTest += { diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/BytecodeUtils.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/BytecodeUtils.scala index d6fd2d123261..835c7f36ebdd 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/opt/BytecodeUtils.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/BytecodeUtils.scala @@ -14,7 +14,7 @@ package scala.tools.nsc package backend.jvm package opt -import scala.annotation.{switch, tailrec} +import scala.annotation.{tailrec, switch} import scala.collection.JavaConverters._ import scala.reflect.internal.util.Collections._ import scala.tools.asm.Opcodes._ diff --git a/test/files/jvm/annotations.check b/test/files/jvm/annotations.check index a8600108124f..968288205b5f 100644 --- a/test/files/jvm/annotations.check +++ b/test/files/jvm/annotations.check @@ -1,3 +1,4 @@ +#partest java8 Test_2.scala:8: warning: class remote in package scala is deprecated (since 2.12.0): extend java.rmi.Remote instead and add @throws[java.rmi.RemoteException] to public methods def foo: Unit = () ^ @@ -65,3 +66,71 @@ public void Test4$Foo12.name_$eq(java.lang.String) 99 dylan 2 +#partest !java8 +Test_2.scala:8: warning: class remote in package scala is deprecated (since 2.12.0): extend java.rmi.Remote instead and add @throws[java.rmi.RemoteException] to public methods + def foo: Unit = () + ^ +class java.rmi.RemoteException +class java.io.IOException +@java.lang.Deprecated(forRemoval=false, since="") +@test.SourceAnnotation_1(mails={scala@lists.epfl.ch,scala-lounge@lists.epfl.ch}, value=http://scala-lang.org) +class Test4$Foo1 + +@test.SourceAnnotation_1(mails={you@bloodsuckers.com}, value=http://bloodsuckers.com) +class Test4$Foo2 + +@test.SourceAnnotation_1(mails={bill.gates@bloodsuckers.com}, value=http://bloodsuckers.com) +class Test4$Foo3 + +@test.SourceAnnotation_1(mails={bill.gates@bloodsuckers.com}, value=file:///dev/null) +private final int Test4$Foo4.x + +@test.SourceAnnotation_1(mails={bill.gates@bloodsuckers.com}, value=file:///dev/zero) +public int Test4$Foo5.bar() + +@test.SourceAnnotation_1(mails={bill.gates@bloodsuckers.com}, value=primary constructor) +public Test4$Foo6(java.lang.String) + +@test.SourceAnnotation_1(mails={bill.gates@bloodsuckers.com}, value=secondary constructor) +public Test4$Foo7() + +@test.SourceAnnotation_1(mails={bill.gates@bloodsuckers.com}, value=constructor val) +public Test4$Foo8(int) + +@test.SourceAnnotation_1(mails={bill.gates@bloodsuckers.com}, value=http://eppli.com) +private int Test4$Foo9.z + +@test.SourceAnnotation_1(mails={bill.gates@bloodsuckers.com}, value=http://eppli.com) +private int Test4$Foo9.z2 + +@test.SourceAnnotation_1(mails={bill.gates@bloodsuckers.com}, value=http://eppli.com) +private int Test4$Foo9.z3 + +@test.SourceAnnotation_1(mails={bill.gates@bloodsuckers.com}, value=http://eppli.com) +public int Test4$Foo9.getZ() + +@test.SourceAnnotation_1(mails={bill.gates@bloodsuckers.com}, value=http://eppli.com) +public int Test4$Foo9.getZ2() + +@test.SourceAnnotation_1(mails={bill.gates@bloodsuckers.com}, value=http://eppli.com) +public int Test4$Foo9.getZ3() + +@test.SourceAnnotation_1(mails={bill.gates@bloodsuckers.com}, value=http://apple.com) +public int Test4$Foo9.x() + +@test.SourceAnnotation_1(mails={bill.gates@bloodsuckers.com}, value=http://uppla.com) +public void Test4$Foo9.setY(int) + +@test.SourceAnnotation_1(mails={bill.gates@bloodsuckers.com}, value=on param 1) +public Test4$Foo10(java.lang.String) + +@test.SourceAnnotation_1(mails={bill.gates@bloodsuckers.com}, value=on param 2) +private final java.lang.String Test4$Foo11.name + +@test.SourceAnnotation_1(mails={bill.gates@bloodsuckers.com}, value=on param 3) +public void Test4$Foo12.name_$eq(java.lang.String) + +0 +99 +dylan +2 diff --git a/test/files/jvm/annotations/Test_2.scala b/test/files/jvm/annotations/Test_2.scala index d46bae58d525..f016215c2daf 100644 --- a/test/files/jvm/annotations/Test_2.scala +++ b/test/files/jvm/annotations/Test_2.scala @@ -1,5 +1,5 @@ // scalac: -deprecation - +import scala.tools.partest.Util.ArrayDeep import scala.language.{ higherKinds, reflectiveCalls } object Test1 { @@ -181,7 +181,7 @@ object Test5 { getClass().getMethod("setCount", classOf[Integer]) def get = getter.invoke(this).asInstanceOf[Integer].intValue - def set(n: Int) = setter.invoke(this, new Integer(n)) + def set(n: Int) = setter.invoke(this, Integer.valueOf(n)) } def run { val count = new Count diff --git a/test/files/jvm/javaReflection.check b/test/files/jvm/javaReflection.check index aa11e860ef83..9835b950c967 100644 --- a/test/files/jvm/javaReflection.check +++ b/test/files/jvm/javaReflection.check @@ -1,3 +1,4 @@ +#partest java8 A / A (canon) / A (simple) - declared cls: List(class A$B, interface A$C, class A$D$) - enclosing : null (declaring cls) / null (cls) / null (constr) / null (meth) @@ -166,3 +167,190 @@ T / T (canon) / T (simple) - declared cls: List() - enclosing : null (declaring cls) / null (cls) / null (constr) / null (meth) - properties : false (local) / false (member) +#partest !java8 +A / A (canon) / A (simple) +- declared cls: List(class A$B, interface A$C, class A$D$) +- enclosing : null (declaring cls) / null (cls) / null (constr) / null (meth) +- properties : false (local) / false (member) +A$$anon$2 / null (canon) / (simple) +- declared cls: List() +- enclosing : null (declaring cls) / class A (cls) / null (constr) / null (meth) +- properties : false (local) / false (member) +assert not class A$$anon$2 is anonymous +assert not class A$$anon$2 +A$$anon$3 / null (canon) / (simple) +- declared cls: List() +- enclosing : null (declaring cls) / class A (cls) / null (constr) / public java.lang.Object A.f() (meth) +- properties : false (local) / false (member) +assert not class A$$anon$3 is anonymous +assert not class A$$anon$3 +A$$anon$4 / null (canon) / (simple) +- declared cls: List() +- enclosing : null (declaring cls) / class A (cls) / null (constr) / public java.lang.Object A.f() (meth) +- properties : false (local) / false (member) +assert not class A$$anon$4 is anonymous +assert not class A$$anon$4 +A$$anon$5 / null (canon) / (simple) +- declared cls: List() +- enclosing : null (declaring cls) / class A (cls) / null (constr) / null (meth) +- properties : false (local) / false (member) +assert not class A$$anon$5 is anonymous +assert not class A$$anon$5 +A$$anon$6 / null (canon) / (simple) +- declared cls: List() +- enclosing : null (declaring cls) / class A (cls) / null (constr) / null (meth) +- properties : false (local) / false (member) +assert not class A$$anon$6 is anonymous +assert not class A$$anon$6 +A$$anon$7 / null (canon) / (simple) +- declared cls: List() +- enclosing : null (declaring cls) / class A (cls) / public A(int) (constr) / null (meth) +- properties : false (local) / false (member) +assert not class A$$anon$7 is anonymous +assert not class A$$anon$7 +A$B / A.B (canon) / B (simple) +- declared cls: List() +- enclosing : class A (declaring cls) / class A (cls) / null (constr) / null (meth) +- properties : false (local) / true (member) +A$C / A.C (canon) / C (simple) +- declared cls: List() +- enclosing : class A (declaring cls) / class A (cls) / null (constr) / null (meth) +- properties : false (local) / true (member) +A$D$ / A.D$ (canon) / D$ (simple) +- declared cls: List(class A$D$B, interface A$D$C, class A$D$D$) +- enclosing : class A (declaring cls) / class A (cls) / null (constr) / null (meth) +- properties : false (local) / true (member) +A$D$$anon$1 / null (canon) / (simple) +- declared cls: List() +- enclosing : null (declaring cls) / class A$D$ (cls) / null (constr) / null (meth) +- properties : false (local) / false (member) +assert not class A$D$$anon$1 is anonymous +assert not class A$D$$anon$1 +A$D$B / A.D$.B (canon) / B (simple) +- declared cls: List() +- enclosing : class A$D$ (declaring cls) / class A$D$ (cls) / null (constr) / null (meth) +- properties : false (local) / true (member) +A$D$C / A.D$.C (canon) / C (simple) +- declared cls: List() +- enclosing : class A$D$ (declaring cls) / class A$D$ (cls) / null (constr) / null (meth) +- properties : false (local) / true (member) +A$D$D$ / A.D$.D$ (canon) / D$ (simple) +- declared cls: List() +- enclosing : class A$D$ (declaring cls) / class A$D$ (cls) / null (constr) / null (meth) +- properties : false (local) / true (member) +A$D$KB$1 / null (canon) / KB$1 (simple) +- declared cls: List() +- enclosing : null (declaring cls) / class A$D$ (cls) / null (constr) / public void A$D$.f() (meth) +- properties : true (local) / false (member) +A$E$1 / null (canon) / E$1 (simple) +- declared cls: List() +- enclosing : null (declaring cls) / class A (cls) / null (constr) / public java.lang.Object A.f() (meth) +- properties : true (local) / false (member) +A$F$1 / null (canon) / F$1 (simple) +- declared cls: List() +- enclosing : null (declaring cls) / class A (cls) / null (constr) / public java.lang.Object A.f() (meth) +- properties : true (local) / false (member) +A$G$1$ / null (canon) / G$1$ (simple) +- declared cls: List() +- enclosing : null (declaring cls) / class A (cls) / null (constr) / public java.lang.Object A.f() (meth) +- properties : true (local) / false (member) +A$H$1 / null (canon) / H$1 (simple) +- declared cls: List() +- enclosing : null (declaring cls) / class A (cls) / null (constr) / public java.lang.Object A.f() (meth) +- properties : true (local) / false (member) +A$I$1 / null (canon) / I$1 (simple) +- declared cls: List() +- enclosing : null (declaring cls) / class A (cls) / null (constr) / public java.lang.Object A.f() (meth) +- properties : true (local) / false (member) +A$J$1$ / null (canon) / J$1$ (simple) +- declared cls: List() +- enclosing : null (declaring cls) / class A (cls) / null (constr) / public java.lang.Object A.f() (meth) +- properties : true (local) / false (member) +A$K$1 / null (canon) / K$1 (simple) +- declared cls: List() +- enclosing : null (declaring cls) / class A (cls) / null (constr) / null (meth) +- properties : true (local) / false (member) +A$L$1 / null (canon) / L$1 (simple) +- declared cls: List() +- enclosing : null (declaring cls) / class A (cls) / null (constr) / null (meth) +- properties : true (local) / false (member) +A$M$1$ / null (canon) / M$1$ (simple) +- declared cls: List() +- enclosing : null (declaring cls) / class A (cls) / null (constr) / null (meth) +- properties : true (local) / false (member) +A$N$1 / null (canon) / N$1 (simple) +- declared cls: List() +- enclosing : null (declaring cls) / class A (cls) / null (constr) / null (meth) +- properties : true (local) / false (member) +A$O$1 / null (canon) / O$1 (simple) +- declared cls: List() +- enclosing : null (declaring cls) / class A (cls) / null (constr) / null (meth) +- properties : true (local) / false (member) +A$P$1$ / null (canon) / P$1$ (simple) +- declared cls: List() +- enclosing : null (declaring cls) / class A (cls) / null (constr) / null (meth) +- properties : true (local) / false (member) +A$Q$1 / null (canon) / Q$1 (simple) +- declared cls: List() +- enclosing : null (declaring cls) / class A (cls) / public A(int) (constr) / null (meth) +- properties : true (local) / false (member) +A$R$1 / null (canon) / R$1 (simple) +- declared cls: List() +- enclosing : null (declaring cls) / class A (cls) / public A(int) (constr) / null (meth) +- properties : true (local) / false (member) +A$S$1$ / null (canon) / S$1$ (simple) +- declared cls: List() +- enclosing : null (declaring cls) / class A (cls) / public A(int) (constr) / null (meth) +- properties : true (local) / false (member) +AO / AO (canon) / AO (simple) +- declared cls: List(class AO$B, interface AO$C, class AO$D$) +- enclosing : null (declaring cls) / null (cls) / null (constr) / null (meth) +- properties : false (local) / false (member) +AO$ / AO$ (canon) / AO$ (simple) +- declared cls: List() +- enclosing : null (declaring cls) / null (cls) / null (constr) / null (meth) +- properties : false (local) / false (member) +AO$$anon$8 / null (canon) / (simple) +- declared cls: List() +- enclosing : null (declaring cls) / class AO$ (cls) / null (constr) / null (meth) +- properties : false (local) / false (member) +assert not class AO$$anon$8 is anonymous +assert not class AO$$anon$8 +AO$B / AO.B (canon) / B (simple) +- declared cls: List() +- enclosing : class AO (declaring cls) / class AO (cls) / null (constr) / null (meth) +- properties : false (local) / true (member) +AO$C / AO.C (canon) / C (simple) +- declared cls: List() +- enclosing : class AO (declaring cls) / class AO (cls) / null (constr) / null (meth) +- properties : false (local) / true (member) +AO$D$ / AO.D$ (canon) / D$ (simple) +- declared cls: List() +- enclosing : class AO (declaring cls) / class AO (cls) / null (constr) / null (meth) +- properties : false (local) / true (member) +AT / AT (canon) / AT (simple) +- declared cls: List(class AT$B, interface AT$C, class AT$D$) +- enclosing : null (declaring cls) / null (cls) / null (constr) / null (meth) +- properties : false (local) / false (member) +AT$$anon$9 / null (canon) / (simple) +- declared cls: List() +- enclosing : null (declaring cls) / interface AT (cls) / null (constr) / null (meth) +- properties : false (local) / false (member) +assert not class AT$$anon$9 is anonymous +assert not class AT$$anon$9 +AT$B / AT.B (canon) / B (simple) +- declared cls: List() +- enclosing : interface AT (declaring cls) / interface AT (cls) / null (constr) / null (meth) +- properties : false (local) / true (member) +AT$C / AT.C (canon) / C (simple) +- declared cls: List() +- enclosing : interface AT (declaring cls) / interface AT (cls) / null (constr) / null (meth) +- properties : false (local) / true (member) +AT$D$ / AT.D$ (canon) / D$ (simple) +- declared cls: List() +- enclosing : interface AT (declaring cls) / interface AT (cls) / null (constr) / null (meth) +- properties : false (local) / true (member) +T / T (canon) / T (simple) +- declared cls: List() +- enclosing : null (declaring cls) / null (cls) / null (constr) / null (meth) +- properties : false (local) / false (member) diff --git a/test/files/jvm/javaReflection/Test.scala b/test/files/jvm/javaReflection/Test.scala index 199399fec8e8..3e2965a5efad 100644 --- a/test/files/jvm/javaReflection/Test.scala +++ b/test/files/jvm/javaReflection/Test.scala @@ -50,23 +50,30 @@ getSimpleName / getCanonicalName / isAnonymousClass / isLocalClass / isSynthetic will change some day). */ +import scala.tools.nsc.settings.ScalaVersion +import scala.util.Properties.javaSpecVersion + object Test { + def assert8(b: => Boolean, msg: => Any) = { + if (ScalaVersion(javaSpecVersion) == ScalaVersion("1.8")) assert(b, msg) + else if (!b) println(s"assert not $msg") + } + def tr[T](m: => T): String = try { val r = m if (r == null) "null" else r.toString } catch { case e: InternalError => e.getMessage } - def assertNotAnonymous(c: Class[_]) = { - val an = try { + def assertNotAnonymous(c: Class[_]) = assert8(!isAnonymous(c), s"$c is anonymous") + def isAnonymous(c: Class[_]) = + try { c.isAnonymousClass } catch { // isAnonymousClass is implemented using getSimpleName, which may throw. case e: InternalError => false } - assert(!an, c) - } def ruleMemberOrLocal(c: Class[_]) = { // if it throws, then it's because of the call from isLocalClass to isAnonymousClass. @@ -85,7 +92,7 @@ object Test { def ruleScalaAnonClassIsLocal(c: Class[_]) = { if (c.getName contains "$anon$") - assert(c.isLocalClass, c) + assert8(c.isLocalClass, c) } def ruleScalaAnonFunInlineIsLocal(c: Class[_]) = { @@ -134,4 +141,4 @@ object Test { classfiles foreach showClass } -} \ No newline at end of file +} diff --git a/test/files/jvm/t3003.check b/test/files/jvm/t3003.check index c69e389d135e..1e03691f37fa 100644 --- a/test/files/jvm/t3003.check +++ b/test/files/jvm/t3003.check @@ -1 +1,4 @@ +#partest java8 List(List(@Annot(optionType=class java.lang.String))) +#partest !java8 +List(List(@Annot(optionType=java.lang.String.class))) diff --git a/test/files/jvm/t8786-sig.scala b/test/files/jvm/t8786-sig.scala index 13800f4569e1..63e76c4ead3a 100644 --- a/test/files/jvm/t8786-sig.scala +++ b/test/files/jvm/t8786-sig.scala @@ -19,13 +19,14 @@ class A[U] { } object Test extends App { + import scala.util.Properties.isJavaAtLeast val a = classOf[A[_]] def sig (method: String, tp: Class[_]) = a.getDeclaredMethod(method, tp).toString def genSig(method: String, tp: Class[_]) = a.getDeclaredMethod(method, tp).toGenericString def bound (method: String, tp: Class[_]) = { val m = a.getDeclaredMethod(method, tp) - m.getGenericParameterTypes.apply(0) match { + (m.getGenericParameterTypes.apply(0): @unchecked) match { case _: Class[_] => "" case gat: java.lang.reflect.GenericArrayType => val compTp = gat.getGenericComponentType.asInstanceOf[java.lang.reflect.TypeVariable[_]] @@ -58,7 +59,8 @@ object Test extends App { // TODO: the signature for is wrong for T <: Int, scala/bug#9846. The signature should be // `public int A.m4(scala.collection.Seq)`. This is testing the status quo. check(genSig("m4", sq), "public T A.m4(scala.collection.Seq)") - check(genSig("m5", sq), "public T A.m5(scala.collection.Seq)") + if (!isJavaAtLeast("15")) check(genSig("m5", sq), "public T A.m5(scala.collection.Seq)") + if ( isJavaAtLeast("15")) check(genSig("m5", sq), "public T A.m5(scala.collection.Seq)") check(genSig("m6", sq), "public java.lang.String A.m6(scala.collection.Seq)") check(genSig("m7", sq), "public int A.m7(scala.collection.Seq)") check(genSig("m8", sq), "public U A.m8(scala.collection.Seq)") @@ -80,7 +82,8 @@ object Test extends App { check(genSig("m3", ao), "public T A.m3(T...)") // testing status quo: signature is wrong for T <: Int, scala/bug#9846 check(genSig("m4", ao), "public T A.m4(T...)") - check(genSig("m5", as), "public T A.m5(T...)") + if (!isJavaAtLeast("15")) check(genSig("m5", as), "public T A.m5(T...)") + if ( isJavaAtLeast("15")) check(genSig("m5", as), "public T A.m5(T...)") check(genSig("m6", as), "public java.lang.String A.m6(java.lang.String...)") check(genSig("m7", ai), "public int A.m7(int...)") check(genSig("m8", ao), "public U A.m8(U...)") @@ -109,7 +112,8 @@ object Test extends App { check(genSig("n3", ob), "public T A.n3(java.lang.Object)") // testing status quo: signature is wrong for T <: Int, scala/bug#9846 check(genSig("n4", ob), "public T A.n4(java.lang.Object)") - check(genSig("n5", as), "public T A.n5(T[])") + if (!isJavaAtLeast("15")) check(genSig("n5", as), "public T A.n5(T[])") + if ( isJavaAtLeast("15")) check(genSig("n5", as), "public T A.n5(T[])") check(genSig("n6", as), "public java.lang.String A.n6(java.lang.String[])") check(genSig("n7", ai), "public int A.n7(int[])") check(genSig("n8", ob), "public U A.n8(java.lang.Object)") diff --git a/test/files/jvm/throws-annot.check b/test/files/jvm/throws-annot.check index a0ed82b1069a..6ad71fbf3b06 100644 --- a/test/files/jvm/throws-annot.check +++ b/test/files/jvm/throws-annot.check @@ -1,3 +1,4 @@ +#partest java8 read throws: class java.io.IOException read annotations: readWith2 throws: class java.lang.ClassCastException, class java.io.IOException @@ -19,3 +20,25 @@ readMixed2 throws: class java.io.IOException, class java.lang.NullPointerExcepti readMixed2 annotations: @java.lang.Deprecated() readNoEx throws: readNoEx annotations: @java.lang.Deprecated() +#partest !java8 +read throws: class java.io.IOException +read annotations: +readWith2 throws: class java.lang.ClassCastException, class java.io.IOException +readWith2 annotations: +readMixed throws: class java.io.IOException, class java.lang.NullPointerException +readMixed annotations: @java.lang.Deprecated(forRemoval=false, since="") +readMixed2 throws: class java.io.IOException, class java.lang.NullPointerException +readMixed2 annotations: @java.lang.Deprecated(forRemoval=false, since="") +readNoEx throws: +readNoEx annotations: @java.lang.Deprecated(forRemoval=false, since="") +Testing mirror class +read throws: class java.io.IOException +read annotations: +readWith2 throws: class java.lang.ClassCastException, class java.io.IOException +readWith2 annotations: +readMixed throws: class java.io.IOException, class java.lang.NullPointerException +readMixed annotations: @java.lang.Deprecated(forRemoval=false, since="") +readMixed2 throws: class java.io.IOException, class java.lang.NullPointerException +readMixed2 annotations: @java.lang.Deprecated(forRemoval=false, since="") +readNoEx throws: +readNoEx annotations: @java.lang.Deprecated(forRemoval=false, since="") diff --git a/test/files/jvm/value-class-in-jannotation.scala b/test/files/jvm/value-class-in-jannotation.scala deleted file mode 100644 index bc466ce510f7..000000000000 --- a/test/files/jvm/value-class-in-jannotation.scala +++ /dev/null @@ -1,10 +0,0 @@ -import javax.annotation.{Resource => R} - -final class Foo[T](val t: T) extends AnyVal - -@R(`type` = classOf[Foo[_]]) -class It - -object Test extends App { - println(classOf[It].getAnnotation(classOf[R]).`type`) -} \ No newline at end of file diff --git a/test/files/jvm/value-class-in-jannotation/Res.java b/test/files/jvm/value-class-in-jannotation/Res.java new file mode 100644 index 000000000000..a394994fd173 --- /dev/null +++ b/test/files/jvm/value-class-in-jannotation/Res.java @@ -0,0 +1,10 @@ + +package res; + +import java.lang.annotation.*; +import static java.lang.annotation.RetentionPolicy.RUNTIME; + +@Retention(RUNTIME) +public @interface Res { + Class type(); +} diff --git a/test/files/jvm/value-class-in-jannotation/Test.scala b/test/files/jvm/value-class-in-jannotation/Test.scala new file mode 100644 index 000000000000..2e5cf2553253 --- /dev/null +++ b/test/files/jvm/value-class-in-jannotation/Test.scala @@ -0,0 +1,9 @@ + +final class Foo[T](val t: T) extends AnyVal + +@res.Res(`type` = classOf[Foo[_]]) +class It + +object Test extends App { + println(classOf[It].getAnnotation(classOf[res.Res]).`type`) +} diff --git a/test/files/neg/checksensible.check b/test/files/neg/checksensible.check index 2e0a0a0eeda4..f4f36f5a1744 100644 --- a/test/files/neg/checksensible.check +++ b/test/files/neg/checksensible.check @@ -1,3 +1,4 @@ +#partest !java8 checksensible.scala:46: warning: Adaptation of argument list by inserting () is deprecated: this is unlikely to be what you want. signature: Any.==(x$1: Any): Boolean given arguments: @@ -49,12 +50,24 @@ checksensible.scala:30: warning: comparing values of types Int and String using checksensible.scala:34: warning: comparing values of types Some[Int] and Int using `==` will always yield false Some(1) == 1 // as above ^ +checksensible.scala:36: warning: constructor Boolean in class Boolean is deprecated + true == new java.lang.Boolean(true) // none of these should warn except for deprecated API + ^ +checksensible.scala:37: warning: constructor Boolean in class Boolean is deprecated + new java.lang.Boolean(true) == true + ^ checksensible.scala:39: warning: comparing a fresh object using `==` will always yield false new AnyRef == 1 ^ +checksensible.scala:41: warning: constructor Integer in class Integer is deprecated + 1 == (new java.lang.Integer(1)) // ...something like this + ^ checksensible.scala:42: warning: comparing values of types Int and Boolean using `==` will always yield false 1 == (new java.lang.Boolean(true)) ^ +checksensible.scala:42: warning: constructor Boolean in class Boolean is deprecated + 1 == (new java.lang.Boolean(true)) + ^ checksensible.scala:44: warning: comparing values of types Int and Boolean using `!=` will always yield true 1 != true ^ @@ -116,123 +129,126 @@ checksensible.scala:96: warning: comparing values of types Unit and Int using `! while ((c = in.read) != -1) ^ error: No warnings can be incurred under -Xfatal-warnings. -36 warnings found +40 warnings found one error found -#partest !java8 -checksensible.scala:54: warning: symbol literal is deprecated; use Symbol("sym") instead - (1 != 'sym) - ^ -checksensible.scala:15: warning: comparing a fresh object using `eq` will always yield false +#partest java8 +checksensible.scala:46: warning: Adaptation of argument list by inserting () is deprecated: this is unlikely to be what you want. + signature: Any.==(x$1: Any): Boolean + given arguments: + after adaptation: Any.==((): Unit) + () == () + ^ +checksensible.scala:49: warning: Adaptation of argument list by inserting () is deprecated: this is unlikely to be what you want. + signature: Object.!=(x$1: Any): Boolean + given arguments: + after adaptation: Object.!=((): Unit) + scala.runtime.BoxedUnit.UNIT != () + ^ +checksensible.scala:50: warning: Adaptation of argument list by inserting () is deprecated: this is unlikely to be what you want. + signature: Any.!=(x$1: Any): Boolean + given arguments: + after adaptation: Any.!=((): Unit) + (scala.runtime.BoxedUnit.UNIT: java.io.Serializable) != () // shouldn't warn + ^ +checksensible.scala:14: warning: comparing a fresh object using `eq` will always yield false (new AnyRef) eq (new AnyRef) ^ -checksensible.scala:16: warning: comparing a fresh object using `ne` will always yield true +checksensible.scala:15: warning: comparing a fresh object using `ne` will always yield true (new AnyRef) ne (new AnyRef) ^ -checksensible.scala:17: warning: comparing a fresh object using `eq` will always yield false +checksensible.scala:16: warning: comparing a fresh object using `eq` will always yield false Shmoopie eq (new AnyRef) ^ -checksensible.scala:18: warning: comparing a fresh object using `eq` will always yield false +checksensible.scala:17: warning: comparing a fresh object using `eq` will always yield false (Shmoopie: AnyRef) eq (new AnyRef) ^ -checksensible.scala:19: warning: comparing a fresh object using `eq` will always yield false +checksensible.scala:18: warning: comparing a fresh object using `eq` will always yield false (new AnyRef) eq Shmoopie ^ -checksensible.scala:20: warning: comparing a fresh object using `eq` will always yield false +checksensible.scala:19: warning: comparing a fresh object using `eq` will always yield false (new AnyRef) eq null ^ -checksensible.scala:21: warning: comparing a fresh object using `eq` will always yield false +checksensible.scala:20: warning: comparing a fresh object using `eq` will always yield false null eq new AnyRef ^ -checksensible.scala:28: warning: comparing values of types Unit and Int using `==` will always yield false +checksensible.scala:27: warning: comparing values of types Unit and Int using `==` will always yield false (c = 1) == 0 ^ -checksensible.scala:29: warning: comparing values of types Integer and Unit using `==` will always yield false +checksensible.scala:28: warning: comparing values of types Int and Unit using `==` will always yield false 0 == (c = 1) ^ -checksensible.scala:31: warning: comparing values of types Int and String using `==` will always yield false +checksensible.scala:30: warning: comparing values of types Int and String using `==` will always yield false 1 == "abc" ^ -checksensible.scala:35: warning: comparing values of types Some[Int] and Int using `==` will always yield false +checksensible.scala:34: warning: comparing values of types Some[Int] and Int using `==` will always yield false Some(1) == 1 // as above ^ -checksensible.scala:37: warning: constructor Boolean in class Boolean is deprecated: see corresponding Javadoc for more information. - true == new java.lang.Boolean(true) // none of these should warn except for deprecated API - ^ -checksensible.scala:38: warning: constructor Boolean in class Boolean is deprecated: see corresponding Javadoc for more information. - new java.lang.Boolean(true) == true - ^ -checksensible.scala:40: warning: comparing a fresh object using `==` will always yield false +checksensible.scala:39: warning: comparing a fresh object using `==` will always yield false new AnyRef == 1 ^ -checksensible.scala:42: warning: constructor Integer in class Integer is deprecated: see corresponding Javadoc for more information. - 1 == (new java.lang.Integer(1)) // ...something like this - ^ -checksensible.scala:43: warning: comparing values of types Int and Boolean using `==` will always yield false +checksensible.scala:42: warning: comparing values of types Int and Boolean using `==` will always yield false 1 == (new java.lang.Boolean(true)) ^ -checksensible.scala:43: warning: constructor Boolean in class Boolean is deprecated: see corresponding Javadoc for more information. - 1 == (new java.lang.Boolean(true)) - ^ -checksensible.scala:45: warning: comparing values of types Int and Boolean using `!=` will always yield true +checksensible.scala:44: warning: comparing values of types Int and Boolean using `!=` will always yield true 1 != true ^ -checksensible.scala:46: warning: comparing values of types Unit and Boolean using `==` will always yield false +checksensible.scala:45: warning: comparing values of types Unit and Boolean using `==` will always yield false () == true ^ -checksensible.scala:47: warning: comparing values of types Unit and Unit using `==` will always yield true +checksensible.scala:46: warning: comparing values of types Unit and Unit using `==` will always yield true () == () ^ -checksensible.scala:48: warning: comparing values of types Unit and Unit using `==` will always yield true +checksensible.scala:47: warning: comparing values of types Unit and Unit using `==` will always yield true () == println ^ -checksensible.scala:49: warning: comparing values of types Unit and scala.runtime.BoxedUnit using `==` will always yield true +checksensible.scala:48: warning: comparing values of types Unit and scala.runtime.BoxedUnit using `==` will always yield true () == scala.runtime.BoxedUnit.UNIT // these should warn for always being true/false ^ -checksensible.scala:50: warning: comparing values of types scala.runtime.BoxedUnit and Unit using `!=` will always yield false +checksensible.scala:49: warning: comparing values of types scala.runtime.BoxedUnit and Unit using `!=` will always yield false scala.runtime.BoxedUnit.UNIT != () ^ -checksensible.scala:53: warning: comparing values of types Int and Unit using `!=` will always yield true +checksensible.scala:52: warning: comparing values of types Int and Unit using `!=` will always yield true (1 != println) ^ -checksensible.scala:54: warning: comparing values of types Int and Symbol using `!=` will always yield true +checksensible.scala:53: warning: comparing values of types Int and Symbol using `!=` will always yield true (1 != 'sym) ^ -checksensible.scala:60: warning: comparing a fresh object using `==` will always yield false +checksensible.scala:59: warning: comparing a fresh object using `==` will always yield false ((x: Int) => x + 1) == null ^ -checksensible.scala:61: warning: comparing a fresh object using `==` will always yield false +checksensible.scala:60: warning: comparing a fresh object using `==` will always yield false Bep == ((_: Int) + 1) ^ -checksensible.scala:63: warning: comparing a fresh object using `==` will always yield false +checksensible.scala:62: warning: comparing a fresh object using `==` will always yield false new Object == new Object ^ -checksensible.scala:64: warning: comparing a fresh object using `==` will always yield false +checksensible.scala:63: warning: comparing a fresh object using `==` will always yield false new Object == "abc" ^ -checksensible.scala:65: warning: comparing a fresh object using `!=` will always yield true +checksensible.scala:64: warning: comparing a fresh object using `!=` will always yield true new Exception() != new Exception() ^ -checksensible.scala:68: warning: comparing values of types Int and Null using `==` will always yield false +checksensible.scala:67: warning: comparing values of types Int and Null using `==` will always yield false if (foo.length == null) "plante" else "plante pas" ^ -checksensible.scala:73: warning: comparing values of types Bip and Bop using `==` will always yield false +checksensible.scala:72: warning: comparing values of types Bip and Bop using `==` will always yield false (x1 == x2) ^ -checksensible.scala:83: warning: comparing values of types EqEqRefTest.this.C3 and EqEqRefTest.this.Z1 using `==` will always yield false +checksensible.scala:82: warning: comparing values of types EqEqRefTest.this.C3 and EqEqRefTest.this.Z1 using `==` will always yield false c3 == z1 ^ -checksensible.scala:84: warning: comparing values of types EqEqRefTest.this.Z1 and EqEqRefTest.this.C3 using `==` will always yield false +checksensible.scala:83: warning: comparing values of types EqEqRefTest.this.Z1 and EqEqRefTest.this.C3 using `==` will always yield false z1 == c3 ^ -checksensible.scala:85: warning: comparing values of types EqEqRefTest.this.Z1 and EqEqRefTest.this.C3 using `!=` will always yield true +checksensible.scala:84: warning: comparing values of types EqEqRefTest.this.Z1 and EqEqRefTest.this.C3 using `!=` will always yield true z1 != c3 ^ -checksensible.scala:86: warning: comparing values of types EqEqRefTest.this.C3 and String using `!=` will always yield true +checksensible.scala:85: warning: comparing values of types EqEqRefTest.this.C3 and String using `!=` will always yield true c3 != "abc" ^ -checksensible.scala:97: warning: comparing values of types Unit and Int using `!=` will always yield true +checksensible.scala:96: warning: comparing values of types Unit and Int using `!=` will always yield true while ((c = in.read) != -1) ^ -error: No warnings can be incurred under -Werror. -38 warnings found +error: No warnings can be incurred under -Xfatal-warnings. +36 warnings found one error found diff --git a/test/files/neg/checksensible.scala b/test/files/neg/checksensible.scala index e4d467130c7f..419054b8dd66 100644 --- a/test/files/neg/checksensible.scala +++ b/test/files/neg/checksensible.scala @@ -33,7 +33,7 @@ class EqEqValTest { "abc" == 1 // warns because the lub of String and Int is Any Some(1) == 1 // as above - true == new java.lang.Boolean(true) // none of these should warn + true == new java.lang.Boolean(true) // none of these should warn except for deprecated API new java.lang.Boolean(true) == true new AnyRef == 1 diff --git a/test/files/neg/java-import-non-existing-selector.check b/test/files/neg/java-import-non-existing-selector.check index a85a08e79e3a..925ce3612f0e 100644 --- a/test/files/neg/java-import-non-existing-selector.check +++ b/test/files/neg/java-import-non-existing-selector.check @@ -2,5 +2,9 @@ java-import-non-existing-selector/BadClient.java:3: error: cannot find symbol import static p1.Test.DoesNotExist; ^ symbol: static DoesNotExist +#partest java8 location: class +#partest !java8 + location: class Test +#partest 1 error diff --git a/test/files/neg/macro-invalidret/Impls_1.scala b/test/files/neg/macro-invalidret/Impls_1.scala index 3845c4fa01f0..30bae076c5d4 100644 --- a/test/files/neg/macro-invalidret/Impls_1.scala +++ b/test/files/neg/macro-invalidret/Impls_1.scala @@ -5,7 +5,7 @@ import scala.reflect.runtime.{universe => ru} object Impls { def foo1(c: Context) = 2 def foo2(c: Context) = ru.Literal(ru.Constant(42)) - def foo3(c: Context) = throw null + def foo3(c: Context) = throw new NullPointerException def foo5(c: Context) = c.universe.Literal(c.universe.Constant(42)) def foo6(c: Context) = c.Expr[Int](c.universe.Literal(c.universe.Constant(42))) } diff --git a/test/files/neg/t9529.check b/test/files/neg/t9529.check index c1d30b7a1e70..1d4724a59831 100644 --- a/test/files/neg/t9529.check +++ b/test/files/neg/t9529.check @@ -1,4 +1,4 @@ -t9529.scala:7: error: Java annotation Resource may not appear multiple times on class TooMany +t9529.scala:7: error: Java annotation Deprecated may not appear multiple times on class TooMany class TooMany ^ one error found diff --git a/test/files/neg/t9529.scala b/test/files/neg/t9529.scala index 0be2254ae805..e8593a154f90 100644 --- a/test/files/neg/t9529.scala +++ b/test/files/neg/t9529.scala @@ -2,6 +2,6 @@ @deprecated("bar", "") class `scala ftw` -@javax.annotation.Resource(name = "baz") -@javax.annotation.Resource(name = "quux") -class TooMany \ No newline at end of file +@java.lang.Deprecated +@java.lang.Deprecated +class TooMany diff --git a/test/files/presentation/infix-completion.check b/test/files/presentation/infix-completion.check index b73019fc61df..eaa3a686bdda 100644 --- a/test/files/presentation/infix-completion.check +++ b/test/files/presentation/infix-completion.check @@ -1,184 +1,23 @@ reload: Snippet.scala -askTypeCompletion at Snippet.scala(1,34) +askTypeCompletion at Snippet.scala(1,38) ================================================================================ -[response] askTypeCompletion at (1,34) -retrieved 211 members -[inaccessible] protected def integralNum: math.Numeric.DoubleAsIfIntegral.type -[inaccessible] protected def num: math.Numeric.DoubleIsFractional.type -[inaccessible] protected def ord: math.Ordering.Double.type -[inaccessible] protected def unifiedPrimitiveEquals(x: Any): Boolean -[inaccessible] protected def unifiedPrimitiveHashcode(): Int +[response] askTypeCompletion at (1,38) +retrieved 30 members [inaccessible] protected[package lang] def clone(): Object [inaccessible] protected[package lang] def finalize(): Unit -def !=(x: Byte): Boolean -def !=(x: Char): Boolean -def !=(x: Double): Boolean -def !=(x: Float): Boolean -def !=(x: Int): Boolean -def !=(x: Long): Boolean -def !=(x: Short): Boolean -def %(x: Byte): Int -def %(x: Char): Int -def %(x: Double): Double -def %(x: Float): Float -def %(x: Int): Int -def %(x: Long): Long -def %(x: Short): Int -def &(x: Byte): Int -def &(x: Char): Int -def &(x: Int): Int -def &(x: Long): Long -def &(x: Short): Int -def *(x: Byte): Int -def *(x: Char): Int -def *(x: Double): Double -def *(x: Float): Float -def *(x: Int): Int -def *(x: Long): Long -def *(x: Short): Int -def +(x: Byte): Int -def +(x: Char): Int -def +(x: Double): Double -def +(x: Float): Float -def +(x: Int): Int -def +(x: Long): Long -def +(x: Short): Int -def +(x: String): String -def -(x: Byte): Int -def -(x: Char): Int -def -(x: Double): Double -def -(x: Float): Float -def -(x: Int): Int -def -(x: Long): Long -def -(x: Short): Int -def ->[B](y: B): (Int, B) -def /(x: Byte): Int -def /(x: Char): Int -def /(x: Double): Double -def /(x: Float): Float -def /(x: Int): Int -def /(x: Long): Long -def /(x: Short): Int -def <(x: Byte): Boolean -def <(x: Char): Boolean -def <(x: Double): Boolean -def <(x: Float): Boolean -def <(x: Int): Boolean -def <(x: Long): Boolean -def <(x: Short): Boolean -def <<(x: Int): Int -def <<(x: Long): Int -def <=(x: Byte): Boolean -def <=(x: Char): Boolean -def <=(x: Double): Boolean -def <=(x: Float): Boolean -def <=(x: Int): Boolean -def <=(x: Long): Boolean -def <=(x: Short): Boolean -def ==(x: Byte): Boolean -def ==(x: Char): Boolean -def ==(x: Double): Boolean -def ==(x: Float): Boolean -def ==(x: Int): Boolean -def ==(x: Long): Boolean -def ==(x: Short): Boolean -def >(x: Byte): Boolean -def >(x: Char): Boolean -def >(x: Double): Boolean -def >(x: Float): Boolean -def >(x: Int): Boolean -def >(x: Long): Boolean -def >(x: Short): Boolean -def >=(x: Byte): Boolean -def >=(x: Char): Boolean -def >=(x: Double): Boolean -def >=(x: Float): Boolean -def >=(x: Int): Boolean -def >=(x: Long): Boolean -def >=(x: Short): Boolean -def >>(x: Int): Int -def >>(x: Long): Int -def >>>(x: Int): Int -def >>>(x: Long): Int -def ^(x: Byte): Int -def ^(x: Char): Int -def ^(x: Int): Int -def ^(x: Long): Long -def ^(x: Short): Int -def byteValue(): Byte -def ceil: Double -def compare(y: Double): Int -def compare(y: Float): Int -def compare(y: Int): Int -def compare(y: Long): Int -def compareTo(that: Double): Int -def compareTo(that: Float): Int -def compareTo(that: Int): Int -def compareTo(that: Long): Int -def compareTo(x$1: Double): Int -def compareTo(x$1: Float): Int -def compareTo(x$1: Integer): Int -def compareTo(x$1: Long): Int -def doubleValue(): Double -def ensuring(cond: Boolean): Int -def ensuring(cond: Boolean,msg: => Any): Int -def ensuring(cond: Int => Boolean): Int -def ensuring(cond: Int => Boolean,msg: => Any): Int +def +(other: ): One.type +def ->[B](y: B): (One.type, B) +def ensuring(cond: Boolean): One.type +def ensuring(cond: Boolean,msg: => Any): One.type +def ensuring(cond: One.type => Boolean): One.type +def ensuring(cond: One.type => Boolean,msg: => Any): One.type def equals(x$1: Any): Boolean -def floatValue(): Float -def floor: Double def formatted(fmtstr: String): String def hashCode(): Int -def intValue(): Int -def isInfinite(): Boolean -def isInfinity: Boolean -def isNaN(): Boolean -def isNegInfinity: Boolean -def isPosInfinity: Boolean -def isValidLong: Boolean -def longValue(): Long -def round: Long -def shortValue(): Short -def to(end: Double): Range.Partial[Double,scala.collection.immutable.NumericRange[Double]] -def to(end: Double,step: Double): scala.collection.immutable.NumericRange.Inclusive[Double] -def to(end: Float): Range.Partial[Float,scala.collection.immutable.NumericRange[Float]] -def to(end: Float,step: Float): scala.collection.immutable.NumericRange.Inclusive[Float] -def to(end: Int): scala.collection.immutable.Range.Inclusive -def to(end: Int,step: Int): scala.collection.immutable.Range.Inclusive -def to(end: Long): scala.collection.immutable.NumericRange.Inclusive[Long] -def to(end: Long,step: Long): scala.collection.immutable.NumericRange.Inclusive[Long] -def toBinaryString: String -def toByte: Byte -def toChar: Char -def toDegrees: Double -def toDouble: Double -def toFloat: Float -def toHexString: String -def toInt: Int -def toLong: Long -def toOctalString: String -def toRadians: Double -def toShort: Short def toString(): String -def unary_+ : Int -def unary_- : Int -def unary_~ : Int -def underlying(): AnyRef -def until(end: Double): Range.Partial[Double,scala.collection.immutable.NumericRange[Double]] -def until(end: Double,step: Double): scala.collection.immutable.NumericRange.Exclusive[Double] -def until(end: Float): Range.Partial[Float,scala.collection.immutable.NumericRange[Float]] -def until(end: Float,step: Float): scala.collection.immutable.NumericRange.Exclusive[Float] -def until(end: Int): scala.collection.immutable.Range -def until(end: Int,step: Int): scala.collection.immutable.Range -def until(end: Long): scala.collection.immutable.NumericRange.Exclusive[Long] -def until(end: Long,step: Long): scala.collection.immutable.NumericRange.Exclusive[Long] -def |(x: Byte): Int -def |(x: Char): Int -def |(x: Int): Int -def |(x: Long): Long -def |(x: Short): Int -def →[B](y: B): (Int, B) +def youCompleteMe(other: One.type): Unit +def →[B](y: B): (One.type, B) final def !=(x$1: Any): Boolean final def ##(): Int final def ==(x$1: Any): Boolean @@ -192,20 +31,4 @@ final def synchronized[T0](x$1: T0): T0 final def wait(): Unit final def wait(x$1: Long): Unit final def wait(x$1: Long,x$2: Int): Unit -override def abs: Double -override def isValidByte: Boolean -override def isValidChar: Boolean -override def isValidInt: Boolean -override def isValidShort: Boolean -override def isWhole(): Boolean -override def max(that: Double): Double -override def max(that: Float): Float -override def max(that: Int): Int -override def max(that: Long): Long -override def min(that: Double): Double -override def min(that: Float): Float -override def min(that: Int): Int -override def min(that: Long): Long -override def signum: Int -private[this] val self: Double ================================================================================ diff --git a/test/files/presentation/infix-completion/src/Snippet.scala b/test/files/presentation/infix-completion/src/Snippet.scala index 7e03c486ba8e..75b07c11a324 100644 --- a/test/files/presentation/infix-completion/src/Snippet.scala +++ b/test/files/presentation/infix-completion/src/Snippet.scala @@ -1 +1,5 @@ -object Snippet{val x = 123; 1 + 1./*!*/} +object Snippet{val x = 123; One + One./*!*/} +object One { + def +(other: One) = this + def youCompleteMe(other: One.type) = () +} diff --git a/test/files/presentation/infix-completion2.check b/test/files/presentation/infix-completion2.check index b73019fc61df..b410fe39f1f9 100644 --- a/test/files/presentation/infix-completion2.check +++ b/test/files/presentation/infix-completion2.check @@ -1,184 +1,23 @@ reload: Snippet.scala -askTypeCompletion at Snippet.scala(1,34) +askTypeCompletion at Snippet.scala(1,46) ================================================================================ -[response] askTypeCompletion at (1,34) -retrieved 211 members -[inaccessible] protected def integralNum: math.Numeric.DoubleAsIfIntegral.type -[inaccessible] protected def num: math.Numeric.DoubleIsFractional.type -[inaccessible] protected def ord: math.Ordering.Double.type -[inaccessible] protected def unifiedPrimitiveEquals(x: Any): Boolean -[inaccessible] protected def unifiedPrimitiveHashcode(): Int +[response] askTypeCompletion at (1,46) +retrieved 30 members [inaccessible] protected[package lang] def clone(): Object [inaccessible] protected[package lang] def finalize(): Unit -def !=(x: Byte): Boolean -def !=(x: Char): Boolean -def !=(x: Double): Boolean -def !=(x: Float): Boolean -def !=(x: Int): Boolean -def !=(x: Long): Boolean -def !=(x: Short): Boolean -def %(x: Byte): Int -def %(x: Char): Int -def %(x: Double): Double -def %(x: Float): Float -def %(x: Int): Int -def %(x: Long): Long -def %(x: Short): Int -def &(x: Byte): Int -def &(x: Char): Int -def &(x: Int): Int -def &(x: Long): Long -def &(x: Short): Int -def *(x: Byte): Int -def *(x: Char): Int -def *(x: Double): Double -def *(x: Float): Float -def *(x: Int): Int -def *(x: Long): Long -def *(x: Short): Int -def +(x: Byte): Int -def +(x: Char): Int -def +(x: Double): Double -def +(x: Float): Float -def +(x: Int): Int -def +(x: Long): Long -def +(x: Short): Int -def +(x: String): String -def -(x: Byte): Int -def -(x: Char): Int -def -(x: Double): Double -def -(x: Float): Float -def -(x: Int): Int -def -(x: Long): Long -def -(x: Short): Int -def ->[B](y: B): (Int, B) -def /(x: Byte): Int -def /(x: Char): Int -def /(x: Double): Double -def /(x: Float): Float -def /(x: Int): Int -def /(x: Long): Long -def /(x: Short): Int -def <(x: Byte): Boolean -def <(x: Char): Boolean -def <(x: Double): Boolean -def <(x: Float): Boolean -def <(x: Int): Boolean -def <(x: Long): Boolean -def <(x: Short): Boolean -def <<(x: Int): Int -def <<(x: Long): Int -def <=(x: Byte): Boolean -def <=(x: Char): Boolean -def <=(x: Double): Boolean -def <=(x: Float): Boolean -def <=(x: Int): Boolean -def <=(x: Long): Boolean -def <=(x: Short): Boolean -def ==(x: Byte): Boolean -def ==(x: Char): Boolean -def ==(x: Double): Boolean -def ==(x: Float): Boolean -def ==(x: Int): Boolean -def ==(x: Long): Boolean -def ==(x: Short): Boolean -def >(x: Byte): Boolean -def >(x: Char): Boolean -def >(x: Double): Boolean -def >(x: Float): Boolean -def >(x: Int): Boolean -def >(x: Long): Boolean -def >(x: Short): Boolean -def >=(x: Byte): Boolean -def >=(x: Char): Boolean -def >=(x: Double): Boolean -def >=(x: Float): Boolean -def >=(x: Int): Boolean -def >=(x: Long): Boolean -def >=(x: Short): Boolean -def >>(x: Int): Int -def >>(x: Long): Int -def >>>(x: Int): Int -def >>>(x: Long): Int -def ^(x: Byte): Int -def ^(x: Char): Int -def ^(x: Int): Int -def ^(x: Long): Long -def ^(x: Short): Int -def byteValue(): Byte -def ceil: Double -def compare(y: Double): Int -def compare(y: Float): Int -def compare(y: Int): Int -def compare(y: Long): Int -def compareTo(that: Double): Int -def compareTo(that: Float): Int -def compareTo(that: Int): Int -def compareTo(that: Long): Int -def compareTo(x$1: Double): Int -def compareTo(x$1: Float): Int -def compareTo(x$1: Integer): Int -def compareTo(x$1: Long): Int -def doubleValue(): Double -def ensuring(cond: Boolean): Int -def ensuring(cond: Boolean,msg: => Any): Int -def ensuring(cond: Int => Boolean): Int -def ensuring(cond: Int => Boolean,msg: => Any): Int +def +(other: ): One.type +def ->[B](y: B): (Snippet.x.type, B) +def ensuring(cond: Boolean): Snippet.x.type +def ensuring(cond: Boolean,msg: => Any): Snippet.x.type +def ensuring(cond: Snippet.x.type => Boolean): Snippet.x.type +def ensuring(cond: Snippet.x.type => Boolean,msg: => Any): Snippet.x.type def equals(x$1: Any): Boolean -def floatValue(): Float -def floor: Double def formatted(fmtstr: String): String def hashCode(): Int -def intValue(): Int -def isInfinite(): Boolean -def isInfinity: Boolean -def isNaN(): Boolean -def isNegInfinity: Boolean -def isPosInfinity: Boolean -def isValidLong: Boolean -def longValue(): Long -def round: Long -def shortValue(): Short -def to(end: Double): Range.Partial[Double,scala.collection.immutable.NumericRange[Double]] -def to(end: Double,step: Double): scala.collection.immutable.NumericRange.Inclusive[Double] -def to(end: Float): Range.Partial[Float,scala.collection.immutable.NumericRange[Float]] -def to(end: Float,step: Float): scala.collection.immutable.NumericRange.Inclusive[Float] -def to(end: Int): scala.collection.immutable.Range.Inclusive -def to(end: Int,step: Int): scala.collection.immutable.Range.Inclusive -def to(end: Long): scala.collection.immutable.NumericRange.Inclusive[Long] -def to(end: Long,step: Long): scala.collection.immutable.NumericRange.Inclusive[Long] -def toBinaryString: String -def toByte: Byte -def toChar: Char -def toDegrees: Double -def toDouble: Double -def toFloat: Float -def toHexString: String -def toInt: Int -def toLong: Long -def toOctalString: String -def toRadians: Double -def toShort: Short def toString(): String -def unary_+ : Int -def unary_- : Int -def unary_~ : Int -def underlying(): AnyRef -def until(end: Double): Range.Partial[Double,scala.collection.immutable.NumericRange[Double]] -def until(end: Double,step: Double): scala.collection.immutable.NumericRange.Exclusive[Double] -def until(end: Float): Range.Partial[Float,scala.collection.immutable.NumericRange[Float]] -def until(end: Float,step: Float): scala.collection.immutable.NumericRange.Exclusive[Float] -def until(end: Int): scala.collection.immutable.Range -def until(end: Int,step: Int): scala.collection.immutable.Range -def until(end: Long): scala.collection.immutable.NumericRange.Exclusive[Long] -def until(end: Long,step: Long): scala.collection.immutable.NumericRange.Exclusive[Long] -def |(x: Byte): Int -def |(x: Char): Int -def |(x: Int): Int -def |(x: Long): Long -def |(x: Short): Int -def →[B](y: B): (Int, B) +def youCompleteMe(other: One.type): Unit +def →[B](y: B): (Snippet.x.type, B) final def !=(x$1: Any): Boolean final def ##(): Int final def ==(x$1: Any): Boolean @@ -192,20 +31,4 @@ final def synchronized[T0](x$1: T0): T0 final def wait(): Unit final def wait(x$1: Long): Unit final def wait(x$1: Long,x$2: Int): Unit -override def abs: Double -override def isValidByte: Boolean -override def isValidChar: Boolean -override def isValidInt: Boolean -override def isValidShort: Boolean -override def isWhole(): Boolean -override def max(that: Double): Double -override def max(that: Float): Float -override def max(that: Int): Int -override def max(that: Long): Long -override def min(that: Double): Double -override def min(that: Float): Float -override def min(that: Int): Int -override def min(that: Long): Long -override def signum: Int -private[this] val self: Double ================================================================================ diff --git a/test/files/presentation/infix-completion2/src/Snippet.scala b/test/files/presentation/infix-completion2/src/Snippet.scala index 4eb8c24a2e55..9ffac983b3d6 100644 --- a/test/files/presentation/infix-completion2/src/Snippet.scala +++ b/test/files/presentation/infix-completion2/src/Snippet.scala @@ -1 +1,5 @@ -object Snippet{val x = 123; 1 + x./*!*/} +object Snippet{val x: One.type = 123; One + x./*!*/} +object One { + def +(other: One) = this + def youCompleteMe(other: One.type) = () +} diff --git a/test/files/run/classfile-format-52.scala b/test/files/run/classfile-format-52.scala index b832219bea29..b64837a360ec 100644 --- a/test/files/run/classfile-format-52.scala +++ b/test/files/run/classfile-format-52.scala @@ -63,7 +63,7 @@ class Driver extends HasDefaultMethod { testUnderJavaAtLeast("1.8") { generateInterface() compile() - Class.forName("Driver").newInstance() + Class.forName("Driver").getDeclaredConstructor().newInstance() () } otherwise { println("hello from publicMethod") diff --git a/test/files/run/getClassTest-new.scala b/test/files/run/getClassTest-new.scala index 7d8ec930f476..cea3adc45dce 100644 --- a/test/files/run/getClassTest-new.scala +++ b/test/files/run/getClassTest-new.scala @@ -28,12 +28,12 @@ class AnyRefs { class A class B extends A - def f1 = (new B: Any).getClass().newInstance() - def f2 = (new B: AnyRef).getClass().newInstance() - def f3 = (new B: A).getClass().newInstance() - def f4 = (new B: B).getClass().newInstance() + def f1 = (new B: Any).getClass().getDeclaredConstructor().newInstance() + def f2 = (new B: AnyRef).getClass().getDeclaredConstructor().newInstance() + def f3 = (new B: A).getClass().getDeclaredConstructor().newInstance() + def f4 = (new B: B).getClass().getDeclaredConstructor().newInstance() - def f0[T >: B] = (new B: T).getClass().newInstance() + def f0[T >: B] = (new B: T).getClass().getDeclaredConstructor().newInstance() def f5 = f0[Any] def f6 = f0[AnyRef] @@ -65,4 +65,4 @@ object Test { returnTypes[AnyRefs] foreach println returnTypes[MoreAnyRefs] foreach println } -} \ No newline at end of file +} diff --git a/test/files/run/getClassTest-old.scala b/test/files/run/getClassTest-old.scala index cd1b6b07f630..c916050ddfd2 100644 --- a/test/files/run/getClassTest-old.scala +++ b/test/files/run/getClassTest-old.scala @@ -26,12 +26,12 @@ class AnyRefs { class A class B extends A - def f1 = (new B: Any).getClass().newInstance() - def f2 = (new B: AnyRef).getClass().newInstance() - def f3 = (new B: A).getClass().newInstance() - def f4 = (new B: B).getClass().newInstance() + def f1 = (new B: Any).getClass().getDeclaredConstructor().newInstance() + def f2 = (new B: AnyRef).getClass().getDeclaredConstructor().newInstance() + def f3 = (new B: A).getClass().getDeclaredConstructor().newInstance() + def f4 = (new B: B).getClass().getDeclaredConstructor().newInstance() - def f0[T >: B] = (new B: T).getClass().newInstance() + def f0[T >: B] = (new B: T).getClass().getDeclaredConstructor().newInstance() def f5 = f0[Any] def f6 = f0[AnyRef] diff --git a/test/files/run/global-showdef.scala b/test/files/run/global-showdef.scala index 276fcc1e7c95..eb4b145116ad 100644 --- a/test/files/run/global-showdef.scala +++ b/test/files/run/global-showdef.scala @@ -39,7 +39,7 @@ object Bippy { def interesting(line: String) = (line contains "def showdefTestMember") || (line startsWith "<<-- ") - def run(args: String*) = slurp(args: _*).lines filter interesting foreach println + def run(args: String*) = slurp(args: _*).linesIterator filter interesting foreach println classes.zipAll(objects, "", "") foreach { case (c, "") => run("-Xshow-class", c) diff --git a/test/files/run/junitForwarders/C_1.scala b/test/files/run/junitForwarders/C_1.scala index 9fa7830a97bc..919f3118c157 100644 --- a/test/files/run/junitForwarders/C_1.scala +++ b/test/files/run/junitForwarders/C_1.scala @@ -6,10 +6,10 @@ class C extends T object Test extends App { def check(c: Class[_], e: String) = { - val s = c.getDeclaredMethods.sortBy(_.getName).map(m => s"${m.getName} - ${m.getDeclaredAnnotations.mkString(", ")}").mkString(";") + val s = c.getDeclaredMethods.sortBy(_.getName).map(m => s"${m.getName} - ${m.getDeclaredAnnotations.map(ann => "@" + ann.annotationType().getName) mkString(", ")}").mkString(";") assert(s == e, s"found: $s\nexpected: $e") } - check(classOf[C], "foo - @org.junit.Test(timeout=0, expected=class org.junit.Test$None)") + check(classOf[C], "foo - @org.junit.Test") // scala/scala-dev#213, scala/scala#5570: `foo$` should not have the @Test annotation - check(classOf[T], "$init$ - ;foo - @org.junit.Test(timeout=0, expected=class org.junit.Test$None);foo$ - ") + check(classOf[T], "$init$ - ;foo - @org.junit.Test;foo$ - ") } diff --git a/test/files/run/lambda-serialization-gc.scala b/test/files/run/lambda-serialization-gc.scala index 8fa0b4b40206..cc61436e32e3 100644 --- a/test/files/run/lambda-serialization-gc.scala +++ b/test/files/run/lambda-serialization-gc.scala @@ -15,9 +15,7 @@ class C { } object Test { - def main(args: Array[String]): Unit = { - test() - } + def main(args: Array[String]): Unit = test() def test(): Unit = { val loader = getClass.getClassLoader.asInstanceOf[URLClassLoader] @@ -29,7 +27,7 @@ object Test { } val clazz = throwawayLoader.loadClass("C") assert(clazz != loaderCClass) - clazz.newInstance() + clazz.getConstructor().newInstance() } (1 to 4) foreach { i => // This would OOM by the third iteration if we leaked `throwawayLoader` during diff --git a/test/files/run/numbereq.scala b/test/files/run/numbereq.scala index baaf3d8c2ebd..840b0a863902 100644 --- a/test/files/run/numbereq.scala +++ b/test/files/run/numbereq.scala @@ -4,15 +4,15 @@ object Test { val base = List[AnyRef]( BigDecimal(x), BigInt(x), - new java.lang.Double(x.toDouble), - new java.lang.Float(x.toFloat), - new java.lang.Long(x.toLong), - new java.lang.Integer(x) + java.lang.Double.valueOf(x.toDouble), + java.lang.Float.valueOf(x.toFloat), + java.lang.Long.valueOf(x.toLong), + java.lang.Integer.valueOf(x) ) val extras = List( - if (x >= Short.MinValue && x <= Short.MaxValue) List(new java.lang.Short(x.toShort)) else Nil, - if (x >= Byte.MinValue && x <= Byte.MaxValue) List(new java.lang.Byte(x.toByte)) else Nil, - if (x >= Char.MinValue && x <= Char.MaxValue) List(new java.lang.Character(x.toChar)) else Nil + if (x >= Short.MinValue && x <= Short.MaxValue) List(java.lang.Short.valueOf(x.toShort)) else Nil, + if (x >= Byte.MinValue && x <= Byte.MaxValue) List(java.lang.Byte.valueOf(x.toByte)) else Nil, + if (x >= Char.MinValue && x <= Char.MaxValue) List(java.lang.Character.valueOf(x.toChar)) else Nil ).flatten base ::: extras @@ -22,13 +22,13 @@ object Test { List( List(BigDecimal(x, java.math.MathContext.UNLIMITED)), List(x), - if (x.isValidDouble) List(new java.lang.Double(x.toDouble)) else Nil, - if (x.isValidFloat) List(new java.lang.Float(x.toFloat)) else Nil, - if (x.isValidLong) List(new java.lang.Long(x.toLong)) else Nil, - if (x.isValidInt) List(new java.lang.Integer(x.toInt)) else Nil, - if (x.isValidShort) List(new java.lang.Short(x.toShort)) else Nil, - if (x.isValidByte) List(new java.lang.Byte(x.toByte)) else Nil, - if (x.isValidChar) List(new java.lang.Character(x.toChar)) else Nil + if (x.isValidDouble) List(java.lang.Double.valueOf(x.toDouble)) else Nil, + if (x.isValidFloat) List(java.lang.Float.valueOf(x.toFloat)) else Nil, + if (x.isValidLong) List(java.lang.Long.valueOf(x.toLong)) else Nil, + if (x.isValidInt) List(java.lang.Integer.valueOf(x.toInt)) else Nil, + if (x.isValidShort) List(java.lang.Short.valueOf(x.toShort)) else Nil, + if (x.isValidByte) List(java.lang.Byte.valueOf(x.toByte)) else Nil, + if (x.isValidChar) List(java.lang.Character.valueOf(x.toChar)) else Nil ).flatten } diff --git a/test/files/run/reflection-magicsymbols-invoke.check b/test/files/run/reflection-magicsymbols-invoke.check index 9ae42cc182d3..cca4151e49fb 100644 --- a/test/files/run/reflection-magicsymbols-invoke.check +++ b/test/files/run/reflection-magicsymbols-invoke.check @@ -64,8 +64,13 @@ testing Object.finalize: () testing Object.getClass: class java.lang.String testing Object.hashCode: 50 testing Object.ne: false +#partest !java15+ testing Object.notify: class java.lang.IllegalMonitorStateException: null testing Object.notifyAll: class java.lang.IllegalMonitorStateException: null +#partest java15+ +testing Object.notify: class java.lang.IllegalMonitorStateException: current thread is not owner +testing Object.notifyAll: class java.lang.IllegalMonitorStateException: current thread is not owner +#partest testing Object.synchronized: 2 testing Object.toString: 2 TODO: also test AnyRef.wait overloads diff --git a/test/files/run/richs.scala b/test/files/run/richs.scala index 4b5345757cba..0cadff88f478 100644 --- a/test/files/run/richs.scala +++ b/test/files/run/richs.scala @@ -75,11 +75,11 @@ object RichStringTest1 extends RichTest { object RichStringTest2 extends RichTest { def run { println("\n" + getObjectName + ":") - Console.print("s1: "); s1.lines foreach println - Console.print("s2: "); s2.lines foreach println - Console.print("s3: "); s3.lines foreach println - Console.print("s4: "); s4.lines foreach println - Console.print("s5: "); s5.lines foreach println + Console.print("s1: "); s1.linesIterator foreach println + Console.print("s2: "); s2.linesIterator foreach println + Console.print("s3: "); s3.linesIterator foreach println + Console.print("s4: "); s4.linesIterator foreach println + Console.print("s5: "); s5.linesIterator foreach println } } object RichStringTest3 extends RichTest { diff --git a/test/files/run/t10471.scala b/test/files/run/t10471.scala index 26d9f1c38ea3..df98544f651c 100644 --- a/test/files/run/t10471.scala +++ b/test/files/run/t10471.scala @@ -21,7 +21,7 @@ object Test extends StoreReporterDirectTest { Console.withOut(baos)(Console.withErr(baos)(compile())) val out = baos.toString("UTF-8") - val fooDefs = out.lines.filter(_.contains("private[this] val foo")).map(_.trim).toList + val fooDefs = out.linesIterator.filter(_.contains("private[this] val foo")).map(_.trim).toList assert(fooDefs.length == 2) assert(fooDefs.forall(_.startsWith("@blort private[this] val foo: String =")), fooDefs) } diff --git a/test/files/run/t1167.check b/test/files/run/t1167.check index 06fedebe7110..7d3a7d740c14 100644 --- a/test/files/run/t1167.check +++ b/test/files/run/t1167.check @@ -1,3 +1,8 @@ +#partest java9+ + + + +#partest java8 anon$1 anon$2 $anonfun$testFunc$1 diff --git a/test/files/run/t1167.scala b/test/files/run/t1167.scala index daf8112a1d99..0374e20a7e02 100644 --- a/test/files/run/t1167.scala +++ b/test/files/run/t1167.scala @@ -4,9 +4,7 @@ */ trait Test1 { - def testFunc(i:Int): Unit = { - (i:Int) => i + 5 - } + def testFunc(i: Int): Unit = (i: Int) => i + 5 } /* getName diff --git a/test/files/run/t2318.scala b/test/files/run/t2318.scala index 4239c017b83b..f455fe250643 100644 --- a/test/files/run/t2318.scala +++ b/test/files/run/t2318.scala @@ -1,6 +1,8 @@ +// filter: WARNING.* +// for now, ignore warnings due to reflective invocation import java.security._ -import scala.language.{ reflectiveCalls } +import scala.language.reflectiveCalls object Test { trait Bar { def bar: Unit } diff --git a/test/files/run/t3425b/Base_1.scala b/test/files/run/t3425b/Base_1.scala index bdbc124d2913..e1b1eb17c1e1 100644 --- a/test/files/run/t3425b/Base_1.scala +++ b/test/files/run/t3425b/Base_1.scala @@ -9,7 +9,7 @@ class ABC extends A with B with C { private def reflected = ( Thread.currentThread.getStackTrace takeWhile (_.getMethodName != "main") - exists (_.toString contains "sun.reflect.") + exists (_.toString contains ".reflect.") ) lazy val y: PQ = new PQ(reflected) } diff --git a/test/files/run/t3613.scala b/test/files/run/t3613.scala index 1293f62c0fd4..d8a6a862c925 100644 --- a/test/files/run/t3613.scala +++ b/test/files/run/t3613.scala @@ -1,15 +1,14 @@ class Boopy { - private val s = new Schnuck - def observer : PartialFunction[ Any, Unit ] = s.observer + private val s = new Schnuck + def observer : PartialFunction[ Any, Unit ] = s.observer - private class Schnuck extends javax.swing.AbstractListModel { - model => - val observer : PartialFunction[ Any, Unit ] = { - case "Boopy" => fireIntervalAdded( model, 0, 1 ) - } - def getSize = 0 - def getElementAt( idx: Int ) = ??? + private class Schnuck extends javax.swing.AbstractListModel[AnyRef] { model => + val observer : PartialFunction[ Any, Unit ] = { + case "Boopy" => fireIntervalAdded( model, 0, 1 ) } + def getSize = 0 + def getElementAt(idx: Int): AnyRef = null + } } diff --git a/test/files/run/t4148.scala b/test/files/run/t4148.scala index d543e093abde..44851f257d74 100644 --- a/test/files/run/t4148.scala +++ b/test/files/run/t4148.scala @@ -1,7 +1,7 @@ object Test { val x1 = try { "aaa".asInstanceOf[Int] } catch { case _: Throwable => "cce1" } val x2 = try { (5: Any).asInstanceOf[Int] } catch { case _: Throwable => "cce2" } - val x3 = try { (new java.lang.Short(100.toShort).asInstanceOf[Int]) } catch { case _: Throwable => "cce3" } + val x3 = try { (java.lang.Short.valueOf(100.toShort).asInstanceOf[Int]) } catch { case _: Throwable => "cce3" } def main(args: Array[String]): Unit = { List(x1, x2, x3) foreach println diff --git a/test/files/run/t5256h.scala b/test/files/run/t5256h.scala index 435124a469f9..497faf0de3f0 100644 --- a/test/files/run/t5256h.scala +++ b/test/files/run/t5256h.scala @@ -7,5 +7,5 @@ object Test extends App { println(c) println(c.fullName) // under -Xcheckinit there's an additional $init$ field - c.info.toString.lines.filter(_ != " private var bitmap$init$0: Boolean") foreach println + c.info.toString.linesIterator.filter(_ != " private var bitmap$init$0: Boolean") foreach println } diff --git a/test/files/run/t6130.scala b/test/files/run/t6130.scala index d20ff9208d91..d11cfbf23b29 100644 --- a/test/files/run/t6130.scala +++ b/test/files/run/t6130.scala @@ -52,7 +52,7 @@ object Test extends StoreReporterDirectTest { Console.withOut(baos)(Console.withErr(baos)(compile())) val out = baos.toString("UTF-8") - val unapplySelectorDummies = out.lines.filter(_.contains("")).map(_.trim).toList + val unapplySelectorDummies = out.linesIterator.filter(_.contains("")).map(_.trim).toList assert(unapplySelectorDummies.isEmpty, unapplySelectorDummies) } } diff --git a/test/files/run/t6240-universe-code-gen.scala b/test/files/run/t6240-universe-code-gen.scala index f628299a3e4b..9dce0bf2219b 100644 --- a/test/files/run/t6240-universe-code-gen.scala +++ b/test/files/run/t6240-universe-code-gen.scala @@ -77,7 +77,7 @@ object Test extends App { val actualFile = new java.io.File(testFile.getParent + "/../../../src/reflect/scala/reflect/runtime/JavaUniverseForce.scala").getCanonicalFile val actual = scala.io.Source.fromFile(actualFile) val actualLines = actual.getLines.toList - val generatedLines = code.lines.toList + val generatedLines = code.linesIterator.toList if (actualLines != generatedLines) { val msg = s"""|${actualFile} must be updated. |=========================================================== diff --git a/test/files/run/t6344.check b/test/files/run/t6344.check index b535f31cca05..03f2468145d1 100644 --- a/test/files/run/t6344.check +++ b/test/files/run/t6344.check @@ -4,7 +4,11 @@ public
int C0.v1(int) public int C0.v3() public int C0.v3() public int C0.v4(int,scala.collection.immutable.List) +#partest !java15+ public int C0.v4(int,scala.collection.immutable.List>) +#partest java15+ +public int C0.v4(int,scala.collection.immutable.List>) +#partest public scala.collection.immutable.List C0.v2() public scala.collection.immutable.List> C0.v2() @@ -14,7 +18,11 @@ public java.lang.Object C1.v1(java.lang.Object) public java.lang.Object C1.v3() public java.lang.Object C1.v3() public java.lang.Object C1.v4(java.lang.Object,scala.collection.immutable.List) +#partest !java15+ public java.lang.Object C1.v4(java.lang.Object,scala.collection.immutable.List>) +#partest java15+ +public java.lang.Object C1.v4(java.lang.Object,scala.collection.immutable.List>) +#partest public scala.collection.immutable.List C1.v2() public scala.collection.immutable.List> C1.v2() @@ -24,7 +32,11 @@ public java.lang.String C2.v1(java.lang.String) public java.lang.String C2.v3() public java.lang.String C2.v3() public java.lang.String C2.v4(java.lang.String,scala.collection.immutable.List) +#partest !java15+ public java.lang.String C2.v4(java.lang.String,scala.collection.immutable.List>) +#partest java15+ +public java.lang.String C2.v4(java.lang.String,scala.collection.immutable.List>) +#partest public scala.collection.immutable.List C2.v2() public scala.collection.immutable.List> C2.v2() diff --git a/test/files/run/t6411a.scala b/test/files/run/t6411a.scala index 71a848c00db2..90d50ed68ebd 100644 --- a/test/files/run/t6411a.scala +++ b/test/files/run/t6411a.scala @@ -30,6 +30,9 @@ object a { } object Test extends App { + // strip module name + def filtered(s: Any) = s.toString.replaceAllLiterally("java.base/", "") + def test(methName: String, arg: Any) = { val moduleA = cm.reflect(a) val msym = moduleA.symbol.info.decl(TermName(methName)).asMethod @@ -44,7 +47,7 @@ object Test extends App { } println(s"as seen by Scala reflection: ${msym.asInstanceOf[scala.reflect.internal.Symbols#Symbol].defString}") println(s"as seen by Java reflection: ${mmirror.asInstanceOf[{val jmeth: java.lang.reflect.Method}].jmeth}") - println(s"result = $mresult") + println(s"result = ${filtered(mresult)}") } test("yg_1", new Y(1)) @@ -78,4 +81,4 @@ object Test extends App { // test("zv_6", new Z("6")) test("zni_7", new Z(7)) test("zns_8", new Z("8")) -} \ No newline at end of file +} diff --git a/test/files/run/t6669.scala b/test/files/run/t6669.scala index 27c4970d60ec..fd1fa1124a2f 100644 --- a/test/files/run/t6669.scala +++ b/test/files/run/t6669.scala @@ -10,11 +10,15 @@ object Test extends App { scala.tools.scalap.Main.main(Array("-verbose", "java.lang.Object")) } - val currentLocationCpFragment = File.pathSeparator + "." + // on java 10, lone . instead of something/. + //val currentLocationCpFragment = File.pathSeparator + "." + + // let's assume dirs don't normally have dots + def hasCurrentDir(s: String): Boolean = s.linesIterator.next.split("[ ,:;]").exists(_.endsWith(".")) // now make sure we saw the '.' in the classpath val msg1 = baos.toString() - assert(msg1 contains currentLocationCpFragment, s"Did not see '.' in the default class path. Full results were:\n$msg1") + assert(hasCurrentDir(msg1), s"Did not see '.' in the default class path. Full results were:\n$msg1") // then test again with a user specified classpath baos.reset @@ -25,5 +29,5 @@ object Test extends App { // now make sure we did not see the '.' in the classpath val msg2 = baos.toString() - assert(!(msg2 contains currentLocationCpFragment), s"Did saw '.' in the user specified class path. Full results were:\n$msg2") + assert(!hasCurrentDir(msg2), s"Did see '.' in the user specified class path. Full results were:\n$msg2") } diff --git a/test/files/run/t7455.check b/test/files/run/t7455.check index 0eb9342888be..a8b6e7b2e9fc 100644 --- a/test/files/run/t7455.check +++ b/test/files/run/t7455.check @@ -1,4 +1,6 @@ +#partest java8 private[package ] def (x$1: String): Outer[E] private[package ] def (): Outer$PrivateInner private[package ] def (): Outer$PrivateStaticInner private[package ] def (x$2: String): Outer$PublicInner +#partest \ No newline at end of file diff --git a/test/files/run/t7741a/GroovyInterface$1Dump.java b/test/files/run/t7741a/GroovyInterface$1Dump.java index 0c0eab3f1b6d..cc187f353ed4 100644 --- a/test/files/run/t7741a/GroovyInterface$1Dump.java +++ b/test/files/run/t7741a/GroovyInterface$1Dump.java @@ -175,7 +175,7 @@ public static byte[] dump () throws Exception { { mv = cw.visitMethod(ACC_PRIVATE + ACC_STATIC + ACC_SYNTHETIC, "$createCallSiteArray", "()Lorg/codehaus/groovy/runtime/callsite/CallSiteArray;", null, null); mv.visitCode(); - mv.visitLdcInsn(new Integer(0)); + mv.visitLdcInsn(Integer.valueOf(0)); mv.visitTypeInsn(ANEWARRAY, "java/lang/String"); mv.visitVarInsn(ASTORE, 0); mv.visitTypeInsn(NEW, "org/codehaus/groovy/runtime/callsite/CallSiteArray"); diff --git a/test/files/run/t8015-ffc.scala b/test/files/run/t8015-ffc.scala index fe6781be42b1..449faa5bb0f0 100644 --- a/test/files/run/t8015-ffc.scala +++ b/test/files/run/t8015-ffc.scala @@ -2,6 +2,6 @@ object Test extends App { val ms = """This is a long multiline string with \u000d\u000a CRLF embedded.""" - assert(ms.lines.size == 3, s"lines.size ${ms.lines.size}") + assert(ms.linesIterator.size == 3, s"lines.size ${ms.linesIterator.size}") assert(ms contains "\r\n CRLF", "no CRLF") } diff --git a/test/files/run/t9030.scala b/test/files/run/t9030.scala index 48d24e5b547e..82b694e95813 100644 --- a/test/files/run/t9030.scala +++ b/test/files/run/t9030.scala @@ -10,10 +10,10 @@ object Test extends App { def charObject(a: java.lang.Character, b: java.lang.Object) = assert(a == b) - numNum(new Integer(1), new Integer(1)) - numChar(new Integer(97), new Character('a')) - numObject(new Integer(1), new Integer(1)) - numObject(new Integer(97), new Character('a')) + numNum(Integer.valueOf(1), Integer.valueOf(1)) + numChar(Integer.valueOf(97), Character.valueOf('a')) + numObject(Integer.valueOf(1), Integer.valueOf(1)) + numObject(Integer.valueOf(97), Character.valueOf('a')) - charObject(new Character('a'), new Integer(97)) + charObject(Character.valueOf('a'), Integer.valueOf(97)) } diff --git a/test/files/run/t9097.scala b/test/files/run/t9097.scala index 49c9e2f2e5d8..b41c9e410b89 100644 --- a/test/files/run/t9097.scala +++ b/test/files/run/t9097.scala @@ -28,6 +28,6 @@ object Test extends StoreReporterDirectTest { assert(!storeReporter.hasErrors, message = filteredInfos map (_.msg) mkString "; ") val out = baos.toString("UTF-8") // was 2 before the fix, the two PackageDefs for a would both contain the ClassDef for the closure - assert(out.lines.count(_ contains "def $anonfun$hihi$1(x$1: Int): String") == 1, out) + assert(out.linesIterator.count(_ contains "def $anonfun$hihi$1(x$1: Int): String") == 1, out) } } diff --git a/test/files/run/t9437b.scala b/test/files/run/t9437b.scala index 4be233a258c8..9278e02ec8d4 100644 --- a/test/files/run/t9437b.scala +++ b/test/files/run/t9437b.scala @@ -84,7 +84,7 @@ class Driver { try { generateCode() compile() - Class.forName("Driver").newInstance() + Class.forName("Driver").getDeclaredConstructor().newInstance() } finally System.setErr(prevErr) diff --git a/test/files/run/t9529.check b/test/files/run/t9529.check index 552d4d38ae8a..38ad198f56ba 100644 --- a/test/files/run/t9529.check +++ b/test/files/run/t9529.check @@ -1,6 +1,6 @@ #partest java8 A: List() -B: List(@javax.annotation.Resource(shareable=true, lookup=, name=B, description=, authenticationType=CONTAINER, type=class java.lang.Object, mappedName=)) +B: List(@java.lang.Deprecated()) C: List(@anns.Ann_0(name=C, value=see)) D: List(@anns.Ann_0$Container(value=[@anns.Ann_0(name=D, value=dee), @anns.Ann_0(name=D, value=dye)])) @@ -15,7 +15,7 @@ u: List(@anns.Ann_0$Container(value=[@anns.Ann_0(name=u, value=you), @anns.Ann_0 List(@anns.Ann_0$Container(value=[@anns.Ann_0(name=, value=constructor), @anns.Ann_0(name=, value=initializer)])) -#partest !java8 +#partest java11 A: List() B: List(@java.lang.Deprecated(forRemoval=false, since="")) C: List(@anns.Ann_0(name="C", value="see")) @@ -32,3 +32,20 @@ u: List(@anns.Ann_0$Container(value={@anns.Ann_0(name="u", value="you"), @anns.A List(@anns.Ann_0$Container(value={@anns.Ann_0(name="", value="constructor"), @anns.Ann_0(name="", value="initializer")})) +#partest java15+ +A: List() +B: List(@java.lang.Deprecated(forRemoval=false, since="")) +C: List(@anns.Ann_0(name="C", value="see")) +D: List(@anns.Ann_0$Container({@anns.Ann_0(name="D", value="dee"), @anns.Ann_0(name="D", value="dye")})) + +x: List(@anns.Ann_0(name="x", value="eks")) +y: List(@anns.Ann_0$Container({@anns.Ann_0(name="y", value="why"), @anns.Ann_0(name="y", value="wye")})) + +t: List(@anns.Ann_0(name="t", value="tee")) +u: List(@anns.Ann_0$Container({@anns.Ann_0(name="u", value="you"), @anns.Ann_0(name="u", value="yew")})) + +1: List(@anns.Ann_0(name="1", value="one")) +2: List(@anns.Ann_0$Container({@anns.Ann_0(name="2", value="two"), @anns.Ann_0(name="2", value="tew")})) + +List(@anns.Ann_0$Container({@anns.Ann_0(name="", value="constructor"), @anns.Ann_0(name="", value="initializer")})) + diff --git a/test/files/run/t9529/Test_1.scala b/test/files/run/t9529/Test_1.scala index d4efcddeb079..5df64f9c89a9 100644 --- a/test/files/run/t9529/Test_1.scala +++ b/test/files/run/t9529/Test_1.scala @@ -2,7 +2,7 @@ import java.lang.reflect._ import anns._ class A -@javax.annotation.Resource(name = "B") class B +@java.lang.Deprecated class B @Ann_0(name = "C", value = "see") class C @Ann_0(name = "D", value = "dee") @Ann_0(name = "D", value = "dye") class D diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/BoxUnboxTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/BoxUnboxTest.scala index 3ed9b019ae9c..89afa68f9ec9 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/opt/BoxUnboxTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/BoxUnboxTest.scala @@ -71,8 +71,8 @@ class BoxUnboxTest extends BytecodeTesting { | } | | def t6: Long = { - | val y = new java.lang.Boolean(true) - | val i: Integer = if (y) new Integer(10) else 13 + | val y = java.lang.Boolean.valueOf(true) + | val i: Integer = if (y) Integer.valueOf(10) else 13 | val j: java.lang.Long = 3l | j + i | } @@ -289,7 +289,7 @@ class BoxUnboxTest extends BytecodeTesting { | | def t3 = { | // boxed before tuple creation, a non-specialized tuple is created - | val t = (new Integer(3), Integer.valueOf(4)) + | val t = (Integer.valueOf(3), Integer.valueOf(4)) | t._1 + t._2 // invokes the generic `_1` / `_2` getters, both values unboxed by Integer2int | } | diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/CallGraphTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/CallGraphTest.scala index 4cd530d1eed5..05df54bc08e6 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/opt/CallGraphTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/CallGraphTest.scala @@ -13,6 +13,7 @@ import scala.reflect.internal.util.JavaClearable import scala.tools.asm.tree._ import scala.tools.nsc.backend.jvm.BackendReporting._ import scala.tools.nsc.reporters.StoreReporter +import scala.tools.testing.AssertUtil._ import scala.tools.testing.BytecodeTesting import scala.tools.testing.BytecodeTesting._ @@ -47,7 +48,7 @@ class CallGraphTest extends BytecodeTesting { val callee = callsite.callee.get assert(callee.callee == target) assert(callee.calleeDeclarationClass == calleeDeclClass) - assert(callee.safeToInline == safeToInline) + assertEquals("safeToInline", safeToInline, callee.safeToInline) assert(callee.annotatedInline == atInline) assert(callee.annotatedNoInline == atNoInline) assert(callsite.argInfos == argInfos) @@ -203,9 +204,9 @@ class CallGraphTest extends BytecodeTesting { compileClasses(code) def callIn(m: String) = callGraph.callsites.find(_._1.name == m).get._2.values.head - assertEquals(callIn("t1").argInfos.toList, List((1, FunctionLiteral))) - assertEquals(callIn("t2").argInfos.toList, List((1, ForwardedParam(2)))) - assertEquals(callIn("t3").argInfos.toList, List((1, FunctionLiteral))) - assertEquals(callIn("t4").argInfos.toList, Nil) + assertEquals(List((1, FunctionLiteral)), callIn("t1").argInfos.toList) + assertEquals(List((1, ForwardedParam(2))), callIn("t2").argInfos.toList) + assertEquals(List((1, FunctionLiteral)), callIn("t3").argInfos.toList) + assertEquals(Nil, callIn("t4").argInfos.toList) } } diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/MethodLevelOptsTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/MethodLevelOptsTest.scala index f0e52f03c604..a19e63047c57 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/opt/MethodLevelOptsTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/MethodLevelOptsTest.scala @@ -159,7 +159,7 @@ class MethodLevelOptsTest extends BytecodeTesting { | } |} """.stripMargin - val c = compileClass(code, allowMessage = (info: StoreReporter.Info) => info.msg.contains("there was one deprecation warning")) + val c = compileClass(code, allowMessage = ignoreDeprecations) assertSameCode(getMethod(c, "t"), List( IntOp(BIPUSH, 23), IntOp(NEWARRAY, 5), Op(POP), VarOp(ILOAD, 1), VarOp(ILOAD, 2), Op(IADD), Op(IRETURN))) } @@ -174,7 +174,7 @@ class MethodLevelOptsTest extends BytecodeTesting { | } |} """.stripMargin - val c = compileClass(code, allowMessage = (info: StoreReporter.Info) => info.msg.contains("there was one deprecation warning")) + val c = compileClass(code, allowMessage = ignoreDeprecations) assertSameCode(getMethod(c, "t"), List( TypeOp(NEW, "java/lang/Integer"), Ldc(LDC, "nono"), Invoke(INVOKESPECIAL, "java/lang/Integer", "", "(Ljava/lang/String;)V", false), VarOp(ILOAD, 1), VarOp(ILOAD, 2), Op(IADD), Op(IRETURN))) diff --git a/test/junit/scala/tools/testing/AssertUtil.scala b/test/junit/scala/tools/testing/AssertUtil.scala index e59b70523d53..b18421d0daa6 100644 --- a/test/junit/scala/tools/testing/AssertUtil.scala +++ b/test/junit/scala/tools/testing/AssertUtil.scala @@ -1,6 +1,15 @@ package scala.tools package testing +import org.junit.Assert +import Assert._ +import scala.reflect.ClassTag +import scala.runtime.ScalaRunTime.stringOf +import scala.collection.GenIterable +import scala.collection.JavaConverters._ +import scala.collection.mutable +import scala.tools.nsc.settings.ScalaVersion +import scala.util.Properties.javaSpecVersion import java.lang.ref._ import java.lang.reflect.{Field, Modifier} import java.util.IdentityHashMap @@ -114,4 +123,11 @@ object AssertUtil { body roots.foreach(assertNoRef) } + + private[this] val version8 = ScalaVersion("8") + + /** Assert on Java 8, but on later versions, just print if assert would fail. */ + def assert8(b: => Boolean, msg: => Any) = + if (ScalaVersion(javaSpecVersion) == version8) assert(b, msg) + else if (!b) println(s"assert not $msg") } diff --git a/test/junit/scala/tools/testing/BytecodeTesting.scala b/test/junit/scala/tools/testing/BytecodeTesting.scala index 3f4f57781fb1..fd19b35fa978 100644 --- a/test/junit/scala/tools/testing/BytecodeTesting.scala +++ b/test/junit/scala/tools/testing/BytecodeTesting.scala @@ -338,5 +338,5 @@ object BytecodeTesting { def stringLines = l.mkString("\n") } - val ignoreDeprecations = (info: StoreReporter.Info) => info.msg.contains("deprecation") + val ignoreDeprecations = (info: StoreReporter#Info) => info.msg.contains("deprecation") } diff --git a/test/junit/scala/tools/testing/Resource.java b/test/junit/scala/tools/testing/Resource.java new file mode 100644 index 000000000000..36a11b99dd12 --- /dev/null +++ b/test/junit/scala/tools/testing/Resource.java @@ -0,0 +1,13 @@ + +package scala.tools.testing; + +import java.lang.annotation.*; +import static java.lang.annotation.RetentionPolicy.RUNTIME; + +/** + * An annotation for test scenarios, akin to common Resource. + */ +@Retention(RUNTIME) +public @interface Resource { + Class type(); +} diff --git a/test/osgi/src/ScalaOsgiHelper.scala b/test/osgi/src/ScalaOsgiHelper.scala index 7ba8883bb8cf..3864498d4a89 100644 --- a/test/osgi/src/ScalaOsgiHelper.scala +++ b/test/osgi/src/ScalaOsgiHelper.scala @@ -20,7 +20,11 @@ trait ScalaOsgiHelper { def standardOptions: Array[exam.Option] = { val bundles = (allBundleFiles map makeBundle) - bundles ++ Array[exam.Option](junitBundles()) + bundles ++ Array[exam.Option](junitBundles(), bootDelegationPackages( + "sun.*", + "com.sun.*", + "jdk.*" + )) // to change the local repo used (for some operations, but not all -- which is why I didn't bother): // systemProperty("org.ops4j.pax.url.mvn.localRepository").value(sys.props("maven.repo.local"))) } diff --git a/test/scalacheck/scala/tools/nsc/scaladoc/HtmlFactoryTest.scala b/test/scalacheck/scala/tools/nsc/scaladoc/HtmlFactoryTest.scala index 5833b6cf5fe4..00577bc729cc 100644 --- a/test/scalacheck/scala/tools/nsc/scaladoc/HtmlFactoryTest.scala +++ b/test/scalacheck/scala/tools/nsc/scaladoc/HtmlFactoryTest.scala @@ -31,8 +31,8 @@ object HtmlFactoryTest extends Properties("HtmlFactory") { import scala.tools.nsc.doc.{DocFactory, Settings} import scala.tools.nsc.doc.html.HtmlFactory - def createFactory = { - val settings = new Settings({Console.err.println(_)}) + def createFactory: DocFactory = { + val settings = new Settings(Console.err.println) settings.scaladocQuietRun = true settings.nowarn.value = true SettingsUtil.configureClassAndSourcePath(settings) diff --git a/test/scalacheck/scala/tools/nsc/scaladoc/SettingsUtil.scala b/test/scalacheck/scala/tools/nsc/scaladoc/SettingsUtil.scala index 2620bbe91233..6ab703a437a6 100644 --- a/test/scalacheck/scala/tools/nsc/scaladoc/SettingsUtil.scala +++ b/test/scalacheck/scala/tools/nsc/scaladoc/SettingsUtil.scala @@ -7,15 +7,14 @@ import scala.tools.nsc.Settings import scala.tools.nsc.scaladoc.HtmlFactoryTest.RESOURCES object SettingsUtil { + /* If the context CL is the application (system) CL, use "java.class.path"; + * otherwise call the hook to set the parent CL to use, assume we're running under SBT. + */ def configureClassAndSourcePath(settings: Settings): Settings = { - val ourClassLoader = HtmlFactoryTest.getClass.getClassLoader - Thread.currentThread.getContextClassLoader match { - case loader: URLClassLoader => - val paths = loader.getURLs.map(u => URLDecoder.decode(u.getPath)) - settings.classpath.value = paths mkString java.io.File.pathSeparator - case loader => - settings.embeddedDefaults(ourClassLoader) // Running in SBT without forking, we have to ask the SBT classloader for the classpath - } + if (Thread.currentThread.getContextClassLoader == ClassLoader.getSystemClassLoader) + settings.usejavacp.value = true + else + settings.embeddedDefaults[HtmlFactoryTest.type] settings } From d25165422100395133faa82396f4feb45c8ef77b Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Thu, 1 Jul 2021 16:50:50 +1000 Subject: [PATCH 269/769] [backport] avoid MiMa complaints on JDK 11/16 --- project/MimaFilters.scala | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/project/MimaFilters.scala b/project/MimaFilters.scala index 6df340f475e6..235be79b2afd 100644 --- a/project/MimaFilters.scala +++ b/project/MimaFilters.scala @@ -19,6 +19,19 @@ object MimaFilters extends AutoPlugin { val mimaFilters: Seq[ProblemFilter] = Seq[ProblemFilter]( // KEEP: scala.reflect.internal isn't public API ProblemFilters.exclude[Problem]("scala.reflect.internal.*"), + + // KEEP: java.util.Enumeration.asIterator only exists in later JDK versions (11 at least). If you build + // with JDK 11 and run MiMa it'll complain IteratorWrapper isn't forwards compatible with 2.13.0 - but we + // don't publish the artifact built with JDK 11 anyways + ProblemFilters.exclude[DirectMissingMethodProblem]("scala.collection.convert.Wrappers#IteratorWrapper.asIterator"), + + // KEEP: when building on a recent JDK, classes implementing `CharSequence` get a mixin forwarder for + // the `isEmpty` default method that was added in JDK 15 + ProblemFilters.exclude[DirectMissingMethodProblem]("scala.Predef#SeqCharSequence.isEmpty"), + ProblemFilters.exclude[DirectMissingMethodProblem]("scala.runtime.SeqCharSequence.isEmpty"), + ProblemFilters.exclude[DirectMissingMethodProblem]("scala.Predef#ArrayCharSequence.isEmpty"), + ProblemFilters.exclude[DirectMissingMethodProblem]("scala.runtime.ArrayCharSequence.isEmpty"), + ) override val buildSettings = Seq( From 1939633e973df192ebb116c4a9f7a184bd30f153 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Thu, 1 Jul 2021 14:46:26 +1000 Subject: [PATCH 270/769] Temporarily move to JDK 11 for Travis --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index b0b6e9083e2a..02b352c0a1d9 100644 --- a/.travis.yml +++ b/.travis.yml @@ -60,7 +60,7 @@ jobs: env: global: - - ADOPTOPENJDK=8 + - ADOPTOPENJDK=11 - secure: "TuJOUtALynPd+MV1AuMeIpVb8BUBHr7Ul7FS48XhS2PyuTRpEBkSWybYcNg3AXyzmWDAuOjUxbaNMQBvP8vvehTbIYls5H5wTGKvj0D0TNVaPIXjF8bA8KyNat9xGNzhnWm2/2BMaWpKBJWRF7Jb+zHhijMYCJEbkMtoiE5R/mY=" # PRIV_KEY_SECRET, for scripts/travis-publish-spec.sh - secure: "T1fxtvLTxioyXJYiC/zVYdNYsBOt+0Piw+xE04rB1pzeKahm9+G2mISdcAyqv6/vze9eIJt6jNHHpKX32/Z3Cs1/Ruha4m3k+jblj3S0SbxV6ht2ieJXLT5WoUPFRrU68KXI8wqUadXpjxeJJV53qF2FC4lhfMUsw1IwwMhdaE8=" # PRIVATE_REPO_PASS - secure: "feE5A8mYNpkNQKVwCj3aXrwjVrJWh/4ENpRfFlr2HOD9ORk1GORD5Yq907WZd+dTkYK54Lh1gA+qHOCIDgJHbi9ZLU+kjzEjtYKF6lQy6Wb0LI8smTOnAA6IWVVYifiXw8d66MI2MKZb2jjGeIzy8Q00SZjLhEGjLyTeCIB88Ws=" # SONA_USER From a73c33525e8e5bfa58772a4c2bbfabae3a9c0e77 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Thu, 1 Jul 2021 14:46:40 +1000 Subject: [PATCH 271/769] Temporarily move to JDK 16 for Travis --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 02b352c0a1d9..8ab8f61342b4 100644 --- a/.travis.yml +++ b/.travis.yml @@ -60,7 +60,7 @@ jobs: env: global: - - ADOPTOPENJDK=11 + - ADOPTOPENJDK=16 - secure: "TuJOUtALynPd+MV1AuMeIpVb8BUBHr7Ul7FS48XhS2PyuTRpEBkSWybYcNg3AXyzmWDAuOjUxbaNMQBvP8vvehTbIYls5H5wTGKvj0D0TNVaPIXjF8bA8KyNat9xGNzhnWm2/2BMaWpKBJWRF7Jb+zHhijMYCJEbkMtoiE5R/mY=" # PRIV_KEY_SECRET, for scripts/travis-publish-spec.sh - secure: "T1fxtvLTxioyXJYiC/zVYdNYsBOt+0Piw+xE04rB1pzeKahm9+G2mISdcAyqv6/vze9eIJt6jNHHpKX32/Z3Cs1/Ruha4m3k+jblj3S0SbxV6ht2ieJXLT5WoUPFRrU68KXI8wqUadXpjxeJJV53qF2FC4lhfMUsw1IwwMhdaE8=" # PRIVATE_REPO_PASS - secure: "feE5A8mYNpkNQKVwCj3aXrwjVrJWh/4ENpRfFlr2HOD9ORk1GORD5Yq907WZd+dTkYK54Lh1gA+qHOCIDgJHbi9ZLU+kjzEjtYKF6lQy6Wb0LI8smTOnAA6IWVVYifiXw8d66MI2MKZb2jjGeIzy8Q00SZjLhEGjLyTeCIB88Ws=" # SONA_USER From 9ced62e5e790dd21155fa9fe839a123009c79516 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Thu, 1 Jul 2021 14:47:15 +1000 Subject: [PATCH 272/769] Revert to JDK 8 --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 8ab8f61342b4..b0b6e9083e2a 100644 --- a/.travis.yml +++ b/.travis.yml @@ -60,7 +60,7 @@ jobs: env: global: - - ADOPTOPENJDK=16 + - ADOPTOPENJDK=8 - secure: "TuJOUtALynPd+MV1AuMeIpVb8BUBHr7Ul7FS48XhS2PyuTRpEBkSWybYcNg3AXyzmWDAuOjUxbaNMQBvP8vvehTbIYls5H5wTGKvj0D0TNVaPIXjF8bA8KyNat9xGNzhnWm2/2BMaWpKBJWRF7Jb+zHhijMYCJEbkMtoiE5R/mY=" # PRIV_KEY_SECRET, for scripts/travis-publish-spec.sh - secure: "T1fxtvLTxioyXJYiC/zVYdNYsBOt+0Piw+xE04rB1pzeKahm9+G2mISdcAyqv6/vze9eIJt6jNHHpKX32/Z3Cs1/Ruha4m3k+jblj3S0SbxV6ht2ieJXLT5WoUPFRrU68KXI8wqUadXpjxeJJV53qF2FC4lhfMUsw1IwwMhdaE8=" # PRIVATE_REPO_PASS - secure: "feE5A8mYNpkNQKVwCj3aXrwjVrJWh/4ENpRfFlr2HOD9ORk1GORD5Yq907WZd+dTkYK54Lh1gA+qHOCIDgJHbi9ZLU+kjzEjtYKF6lQy6Wb0LI8smTOnAA6IWVVYifiXw8d66MI2MKZb2jjGeIzy8Q00SZjLhEGjLyTeCIB88Ws=" # SONA_USER From b938a7997062f23084ca8eb6236e29f589f745d8 Mon Sep 17 00:00:00 2001 From: Martijn Hoekstra Date: Thu, 1 Jul 2021 18:26:13 +0200 Subject: [PATCH 273/769] redo documentation of isInstanceOf with more details --- src/library-aux/scala/Any.scala | 29 ++++++++++++++++++++++------- 1 file changed, 22 insertions(+), 7 deletions(-) diff --git a/src/library-aux/scala/Any.scala b/src/library-aux/scala/Any.scala index a38b563d9bf0..39f3f635572a 100644 --- a/src/library-aux/scala/Any.scala +++ b/src/library-aux/scala/Any.scala @@ -110,13 +110,28 @@ abstract class Any { */ final def ## : Int = sys.error("##") - /** Test whether the dynamic type of the receiver object is `T0`. - * - * Note that the result of the test is modulo Scala's erasure semantics. - * Therefore the expression `1.isInstanceOf[String]` will return `false`, while the - * expression `List(1).isInstanceOf[List[String]]` will return `true`. - * In the latter example, because the type argument is erased as part of compilation it is - * not possible to check whether the contents of the list are of the specified type. + /** Test whether the dynamic type of the receiver object has the same erasure to `T0`. + * + * Depending on what `T0` is, the test is done in one of the below ways: + * + * - `T0` is a non-parameterized class type, e.g. `BigDecimal`: this method returns `true` if + * the value of the receiver object is a `BigDecimal` or a subtype of `BigDecimal`. + * - `T0` is a parameterized class type, e.g. `List[Int]`: this method returns `true` if + * the value of the receiver object is some `List[X]` for any `X`. + * For example, `List(1, 2, 3).isInstanceOf[List[String]]` will return true. + * - `T0` is some singleton type `x.type` or literal `x`: this method returns `this.eq(x)`. + * For example, `x.isInstanceOf[1]` is equivalent to `x.eq(1)` + * - `T0` is a union `X with Y`: this method is equivalent to `x.isInstanceOf[X] && x.isInstanceOf[Y]` + * - `T0` is a type parameter or an abstract type member: this method is equivalent + * to `isInstanceOf[U]` where `U` is `A`'s upper bound, `Any` if `A` is unbounded. + * For example, `x.isInstanceOf[A]` where `A` is an unbounded type parameter + * will return true for any value of `x`. + * + * This is exactly equivalent to the type pattern `_: T0` + * + * @note due to the unexpectedness of `List(1, 2, 3).isInstanceOf[List[String]]` returning true and + * `x.isInstanceOf[A]` where `A` is a type parameter or abstract member returning true, + * these forms issue a warning. * * @return `true` if the receiver object is an instance of erasure of type `T0`; `false` otherwise. */ From 35e576f8ae4cf586645b94b9f81adbfd467b10aa Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Sat, 19 Jun 2021 10:58:37 -0700 Subject: [PATCH 274/769] Refactor checkable and use set Textual refactor for readability. Prefer local defs. Previous optimization used ListBuffer to collect type args, but since a Set is required, use one directly. --- .../tools/nsc/typechecker/Checkable.scala | 180 +++++++++--------- .../scala/tools/nsc/typechecker/Infer.scala | 2 +- .../tools/nsc/typechecker/PatternTypers.scala | 7 +- .../scala/reflect/internal/Types.scala | 30 +-- 4 files changed, 110 insertions(+), 109 deletions(-) diff --git a/src/compiler/scala/tools/nsc/typechecker/Checkable.scala b/src/compiler/scala/tools/nsc/typechecker/Checkable.scala index ed210ff3b83b..ed146327730c 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Checkable.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Checkable.scala @@ -13,8 +13,6 @@ package scala.tools.nsc package typechecker -import Checkability._ -import scala.collection.mutable.ListBuffer import scala.tools.nsc.Reporting.WarningCategory /** On pattern matcher checkability: @@ -39,7 +37,7 @@ import scala.tools.nsc.Reporting.WarningCategory * * There are four possibilities to consider: * [P1] X will always conform to P - * [P2] x will never conform to P + * [P2] X will never conform to P * [P3] X will conform to P if some runtime test is true * [P4] X cannot be checked against P * @@ -52,7 +50,7 @@ import scala.tools.nsc.Reporting.WarningCategory * which is essentially the intersection of X and |P|, where |P| is * the erasure of P. If XR <: P, then no warning is emitted. * - * We evaluate "X with conform to P" by checking `X <: P_wild`, where + * We evaluate "X will conform to P" by checking `X <: P_wild`, where * P_wild is the result of substituting wildcard types in place of * pattern type variables. This is intentionally stricter than * (X matchesPattern P), see scala/bug#8597 for motivating test cases. @@ -77,6 +75,22 @@ trait Checkable { import global._ import definitions._ + type Checkability = Int + object Checkability { + final val StaticallyTrue = 0 + final val StaticallyFalse = 1 + final val RuntimeCheckable = 2 + final val Uncheckable = 3 + final val CheckabilityError = 4 + lazy val describe: (Int => String) = List( + "statically true", + "statically false", + "runtime checkable", + "uncheckable", + "error", + ) + } + /** The applied type of class 'to' after inferring anything * possible from the knowledge that 'to' must also be of the * type given in 'from'. @@ -114,62 +128,30 @@ trait Checkable { appliedType(to, resArgs) } - private def isUnwarnableTypeArgSymbol(sym: Symbol) = ( - sym.isTypeParameter // dummy - || (sym.name.toTermName == nme.WILDCARD) // _ - || nme.isVariableName(sym.name) // type variable - ) - private def isUnwarnableTypeArg(arg: Type) = ( - uncheckedOk(arg) // @unchecked T - || isUnwarnableTypeArgSymbol(arg.typeSymbolDirect) // has to be direct: see pos/t1439 - ) - private def uncheckedOk(tp: Type) = tp hasAnnotation UncheckedClass - - private def typeArgsInTopLevelType(tp: Type): List[Type] = { - val res: ListBuffer[Type] = ListBuffer.empty[Type] - def add(t: Type) = if (!isUnwarnableTypeArg(t)) res += t - def loop(tp: Type): Unit = tp match { - case RefinedType(parents, _) => - parents foreach loop - case TypeRef(_, ArrayClass, arg :: Nil) => - if (arg.typeSymbol.isAbstractType) add(arg) else loop(arg) - case TypeRef(pre, sym, args) => - loop(pre) - args.foreach(add) - case ExistentialType(tparams, underlying) => - tparams.foreach(tp => add(tp.tpe)) - loop(underlying) - case _ => () - } - loop(tp) - res.toList - } + private def uncheckedOk(tp: Type) = tp.hasAnnotation(UncheckedClass) private def scrutConformsToPatternType(scrut: Type, pattTp: Type): Boolean = { - def typeVarToWildcard(tp: Type) = { - // The need for typeSymbolDirect is demonstrated in neg/t8597b.scala - if (tp.typeSymbolDirect.isPatternTypeVariable) WildcardType else tp - } + // The need for typeSymbolDirect is demonstrated in neg/t8597b.scala + def typeVarToWildcard(tp: Type) = if (tp.typeSymbolDirect.isPatternTypeVariable) WildcardType else tp val pattTpWild = pattTp.map(typeVarToWildcard) scrut <:< pattTpWild } private class CheckabilityChecker(val X: Type, val P: Type, isRecheck: Boolean = false) { + import Checkability._ + import erasure.GenericArray def Xsym = X.typeSymbol def Psym = P.typeSymbol - def PErased = { + def PErased = P match { - case erasure.GenericArray(n, core) => existentialAbstraction(core.typeSymbol :: Nil, P) - case _ => existentialAbstraction(Psym.typeParams, Psym.tpe_*) + case GenericArray(n, core) => existentialAbstraction(core.typeSymbol :: Nil, P) + case _ => existentialAbstraction(Psym.typeParams, Psym.tpe_*) } - } - def XR = if (Xsym == AnyClass) PErased else propagateKnownTypes(X, Psym) - - // sadly the spec says (new java.lang.Boolean(true)).isInstanceOf[scala.Boolean] - def P1 = scrutConformsToPatternType(X, P) - def P2 = !Psym.isPrimitiveValueClass && isNeverSubType(X, P) - def P3 = isNonRefinementClassType(P) && scrutConformsToPatternType(XR, P) - def P4 = !(P1 || P2 || P3) + def XR = if (Xsym == AnyClass) PErased else propagateKnownTypes(X, Psym) + def P1 = scrutConformsToPatternType(X, P) + def P2 = !Psym.isPrimitiveValueClass && isNeverSubType(X, P) + def P3 = isNonRefinementClassType(P) && scrutConformsToPatternType(XR, P) + def P4 = !(P1 || P2 || P3) def summaryString = f""" |Checking checkability of (x: $X) against pattern $P @@ -179,20 +161,47 @@ trait Checkable { |[P4] $P4%-6s None of the above // !(P1 || P2 || P3) """.stripMargin.trim - val result = ( + val result: Checkability = if (X.isErroneous || P.isErroneous) CheckabilityError else if (P1) StaticallyTrue else if (P2) StaticallyFalse else if (P3) RuntimeCheckable - else if (uncheckableType == NoType) { - // Avoid warning (except ourselves) if we can't pinpoint the uncheckable type - debuglog("Checkability checker says 'Uncheckable', but uncheckable type cannot be found:\n" + summaryString) + else if (uncheckableType != NoType) Uncheckable + else { // Avoid warning (except ourselves) if we can't pinpoint the uncheckable type + debuglog(s"Checkability checker says 'Uncheckable', but uncheckable type cannot be found:\n$summaryString") CheckabilityError } - else Uncheckable - ) + // collect type args which are candidates for warning because uncheckable + private def typeArgsInTopLevelType(tp: Type): Set[Type] = { + def isUnwarnableTypeArg(arg: Type) = { + def isUnwarnableTypeArgSymbol(sym: Symbol) = { + sym.isTypeParameter || // dummy + sym.name.toTermName == nme.WILDCARD || // _ + nme.isVariableName(sym.name) // type variable + } + uncheckedOk(arg) || // @unchecked T + isUnwarnableTypeArgSymbol(arg.typeSymbolDirect) // has to be direct: see pos/t1439 + } + var res: Set[Type] = Set.empty[Type] + def add(t: Type): Unit = if (!isUnwarnableTypeArg(t)) res += t + def loop(tp: Type): Unit = tp match { + case RefinedType(parents, _) => + parents.foreach(loop) + case TypeRef(_, ArrayClass, arg :: Nil) => + if (arg.typeSymbol.isAbstractType) add(arg) else loop(arg) + case TypeRef(pre, sym, args) => + loop(pre) + args.foreach(add) + case ExistentialType(tparams, underlying) => + tparams.foreach(tp => add(tp.tpe)) + loop(underlying) + case _ => () + } + loop(tp) + res + } lazy val uncheckableType = if (Psym.isAbstractType) P else { - val possibles = typeArgsInTopLevelType(P).toSet + val possibles = typeArgsInTopLevelType(P) val opt = possibles find { targ => // Create a derived type with every possibly uncheckable type replaced // with a WildcardType, except for 'targ'. If !(XR <: derived) then @@ -214,20 +223,6 @@ trait Checkable { case tp => "non-variable type argument " + tp } - /** Are these symbols classes with no subclass relationship? */ - def areUnrelatedClasses(sym1: Symbol, sym2: Symbol) = ( - sym1.isClass - && sym2.isClass - && !(sym1 isSubClass sym2) - && !(sym2 isSubClass sym1) - ) - /** Are all children of these symbols pairwise irreconcilable? */ - def allChildrenAreIrreconcilable(sym1: Symbol, sym2: Symbol) = { - val sc1 = sym1.sealedChildren - val sc2 = sym2.sealedChildren - sc1.forall(c1 => sc2.forall(c2 => areIrreconcilableAsParents(c1, c2))) - } - /** Is it impossible for the given symbols to be parents in the same class? * This means given A and B, can there be an instance of A with B? This is the * case if neither A nor B is a subclass of the other, and one of the following @@ -242,12 +237,27 @@ trait Checkable { * populated until typer. As a workaround, in this case, this check is performed a second * time at the end of typer. #6537, #12414 */ - def areIrreconcilableAsParents(sym1: Symbol, sym2: Symbol): Boolean = areUnrelatedClasses(sym1, sym2) && ( + def areIrreconcilableAsParents(sym1: Symbol, sym2: Symbol): Boolean = { + // Are these symbols classes with no subclass relationship? + def areUnrelatedClasses(sym1: Symbol, sym2: Symbol) = ( + sym1.isClass + && sym2.isClass + && !sym1.isSubClass(sym2) + && !sym2.isSubClass(sym1) + ) + // Are all children of these symbols pairwise irreconcilable? + def allChildrenAreIrreconcilable(sym1: Symbol, sym2: Symbol) = { + val sc1 = sym1.sealedChildren + val sc2 = sym2.sealedChildren + sc1.forall(c1 => sc2.forall(c2 => areIrreconcilableAsParents(c1, c2))) + } + areUnrelatedClasses(sym1, sym2) && ( isEffectivelyFinal(sym1) // initialization important || isEffectivelyFinal(sym2) || !sym1.isTrait && !sym2.isTrait || isSealedOrFinal(sym1) && isSealedOrFinal(sym2) && allChildrenAreIrreconcilable(sym1, sym2) && (isRecheck || !currentRun.compiles(sym1) && !currentRun.compiles(sym2)) - ) + ) + } private def isSealedOrFinal(sym: Symbol) = sym.isSealed || sym.isFinal // initialization important private def isEffectivelyFinal(sym: Symbol): Boolean = sym.initialize.isEffectivelyFinalOrNotOverridden @@ -288,14 +298,13 @@ trait Checkable { def isUncheckable(P0: Type) = !isCheckable(P0) - def isCheckable(P0: Type): Boolean = ( + def isCheckable(P0: Type): Boolean = uncheckedOk(P0) || (P0.widen match { case TypeRef(_, NothingClass | NullClass | AnyValClass, _) => false case RefinedType(_, decls) if !decls.isEmpty => false - case RefinedType(parents, _) => parents forall isCheckable + case RefinedType(parents, _) => parents.forall(isCheckable) case p => new CheckabilityChecker(AnyTpe, p).isCheckable }) - ) /** TODO: much better error positions. * Kind of stuck right now because they just pass us the one tree. @@ -304,11 +313,12 @@ trait Checkable { * Instead of the canRemedy flag, annotate uncheckable types that have become checkable because of the availability of a class tag? */ def checkCheckable(tree: Tree, P0: Type, X0: Type, inPattern: Boolean, canRemedy: Boolean = false): Unit = if (!uncheckedOk(P0)) { + import Checkability._ + if (P0.typeSymbol == SingletonClass) context.warning(tree.pos, s"fruitless type test: every non-null value will be a Singleton dynamically", WarningCategory.Other) else { - def where = if (inPattern) "pattern " else "" - // singleton types not considered here, dealias the pattern for SI-XXXX + // singleton types not considered here, dealias the pattern val P = P0.dealiasWiden val X = X0.widen @@ -325,7 +335,7 @@ trait Checkable { case RefinedType(_, decls) if !decls.isEmpty => context.warning(tree.pos, s"a pattern match on a refinement type is unchecked", WarningCategory.Unchecked) case RefinedType(parents, _) => - parents foreach (p => checkCheckable(tree, p, X, inPattern, canRemedy)) + parents.foreach(checkCheckable(tree, _, X, inPattern, canRemedy)) case _ => val checker = new CheckabilityChecker(X, P) if (checker.result == RuntimeCheckable) @@ -338,10 +348,11 @@ trait Checkable { if (checker.neverMatches) neverMatchesWarning(checker) else if (checker.isUncheckable) { - val msg = ( + val msg = { + val where = if (inPattern) "pattern " else "" if (checker.uncheckableType =:= P) s"abstract type $where$PString" else s"${checker.uncheckableMessage} in type $where$PString" - ) + } context.warning(tree.pos, s"$msg is unchecked since it is eliminated by erasure", WarningCategory.Unchecked) } else if (checker.result == RuntimeCheckable) { @@ -361,12 +372,3 @@ trait Checkable { } } } - -private[typechecker] final class Checkability(val value: Int) extends AnyVal -private[typechecker] object Checkability { - val StaticallyTrue = new Checkability(0) - val StaticallyFalse = new Checkability(1) - val RuntimeCheckable = new Checkability(2) - val Uncheckable = new Checkability(3) - val CheckabilityError = new Checkability(4) -} diff --git a/src/compiler/scala/tools/nsc/typechecker/Infer.scala b/src/compiler/scala/tools/nsc/typechecker/Infer.scala index 3457e2326bc5..4fce2215fe15 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Infer.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Infer.scala @@ -1273,7 +1273,7 @@ trait Infer extends Checkable { return ErrorType } - checkCheckable(tree0, pattp, pt, inPattern = true, canRemedy) + checkCheckable(tree0, pattp, pt, inPattern = true, canRemedy = canRemedy) if (pattp <:< pt) () else { debuglog("free type params (1) = " + tpparams) diff --git a/src/compiler/scala/tools/nsc/typechecker/PatternTypers.scala b/src/compiler/scala/tools/nsc/typechecker/PatternTypers.scala index 43e4560772a1..be8279b9bc9c 100644 --- a/src/compiler/scala/tools/nsc/typechecker/PatternTypers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/PatternTypers.scala @@ -316,13 +316,12 @@ trait PatternTypers { case OverloadedType(_, _) => OverloadedUnapplyError(funOverloadResolved); ErrorType case _ => UnapplyWithSingleArgError(funOverloadResolved); ErrorType } - val GenPolyType(freeVars, unappFormal) = freshArgType(unapplyType.skolemizeExistential(context.owner, tree)) val unapplyContext = context.makeNewScope(tree, context.owner) - freeVars foreach unapplyContext.scope.enter - val pattp = newTyper(unapplyContext).infer.inferTypedPattern(tree, unappFormal, pt, canRemedy) + freeVars.foreach(unapplyContext.scope.enter) + val pattp = newTyper(unapplyContext).infer.inferTypedPattern(tree, unappFormal, pt, canRemedy = canRemedy) // turn any unresolved type variables in freevars into existential skolems - val skolems = freeVars map (fv => unapplyContext.owner.newExistentialSkolem(fv, fv)) + val skolems = freeVars.map(fv => unapplyContext.owner.newExistentialSkolem(fv, fv)) pattp.substSym(freeVars, skolems) } } diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala index 1cefcf355dfd..597689d1b079 100644 --- a/src/reflect/scala/reflect/internal/Types.scala +++ b/src/reflect/scala/reflect/internal/Types.scala @@ -16,7 +16,7 @@ package internal import java.util.Objects -import scala.collection.{immutable, mutable} +import scala.collection.mutable import scala.ref.WeakReference import mutable.{ListBuffer, LinkedHashSet} import Flags._ @@ -98,7 +98,7 @@ trait Types import statistics._ private[this] var explainSwitch = false - @unused private final val emptySymbolSet = immutable.Set.empty[Symbol] + @unused private final val emptySymbolSet = Set.empty[Symbol] @unused private final val breakCycles = settings.breakCycles.value /** In case anyone wants to turn on type parameter bounds being used @@ -836,7 +836,7 @@ trait Types case _ => false } case TypeRef(_, sym, args) => - val that1 = existentialAbstraction(args map (_.typeSymbol), that) + val that1 = existentialAbstraction(args.map(_.typeSymbol), that) (that ne that1) && (this <:< that1) && { debuglog(s"$this.matchesPattern($that) depended on discarding args and testing <:< $that1") true @@ -1913,7 +1913,7 @@ trait Types private final val Initializing = 1 private final val Initialized = 2 - private type RefMap = Map[Symbol, immutable.Set[Symbol]] + private type RefMap = Map[Symbol, Set[Symbol]] /** All type parameters reachable from given type parameter * by a path which contains at least one expansive reference. @@ -4298,14 +4298,14 @@ trait Types * - closed: already in closure, and we already searched for new elements. * * Invariant: pending, closed, and border form a partition of `tparams`. - * Each element in tparams goes from pending to border, and from border to closed + * Each element in tparams goes from pending to border, and from border to closed. * We separate border from closed to avoid recomputing `Type.contains` for same elements. */ - val pending = mutable.ListBuffer.empty[Symbol] - var border = mutable.ListBuffer.empty[Symbol] + val pending = ListBuffer.empty[Symbol] + var border = ListBuffer.empty[Symbol] partitionInto(tparams, tpe.contains, border, pending) - val closed = mutable.ListBuffer.empty[Symbol] - var nextBorder = mutable.ListBuffer.empty[Symbol] + val closed = ListBuffer.empty[Symbol] + var nextBorder = ListBuffer.empty[Symbol] while (!border.isEmpty) { nextBorder.clear() pending.filterInPlace { paramTodo => @@ -4322,15 +4322,15 @@ trait Types if (closed.length == tparams.length) tparams else closed.toList } - if (tparams.isEmpty || (tpe0 eq NoType) ) tpe0 + if (tparams.isEmpty || (tpe0 eq NoType)) tpe0 else { - val tpe = normalizeAliases(tpe0) + val tpe = normalizeAliases(tpe0) val extrapolation = new ExistentialExtrapolation(tparams) if (flipVariance) extrapolation.variance = Contravariant - val tpe1 = extrapolation extrapolate tpe + val tpe1 = extrapolation.extrapolate(tpe) newExistentialType(transitiveReferredFrom(tpe1), tpe1) } - } + } // end existentialAbstraction // Hash consing -------------------------------------------------------------- @@ -5043,8 +5043,8 @@ trait Types } if (!needsStripping) (ts, Nil) // fast path for common case else { - val tparams = mutable.ListBuffer[Symbol]() - val stripped = mutable.ListBuffer[Type]() + val tparams = ListBuffer[Symbol]() + val stripped = ListBuffer[Type]() def stripType(tp: Type): Unit = tp match { case rt: RefinedType if isIntersectionTypeForLazyBaseType(rt) => if (expandLazyBaseType) From 323c9c9dc889d42db4f530ba2cb0bf1974bce7c0 Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Sat, 19 Jun 2021 11:20:32 -0700 Subject: [PATCH 275/769] Update lib for uncheckable type args --- src/library/scala/collection/Iterable.scala | 2 +- .../scala/collection/concurrent/TrieMap.scala | 72 +++++++++---------- .../scala/collection/immutable/HashMap.scala | 6 +- .../scala/collection/immutable/Queue.scala | 2 +- .../scala/collection/immutable/TreeSet.scala | 2 +- .../mutable/CollisionProofHashMap.scala | 51 ++++++------- .../scala/concurrent/impl/Promise.scala | 64 ++++++++--------- 7 files changed, 98 insertions(+), 101 deletions(-) diff --git a/src/library/scala/collection/Iterable.scala b/src/library/scala/collection/Iterable.scala index c76b1c9f6f2d..6721ea5920dc 100644 --- a/src/library/scala/collection/Iterable.scala +++ b/src/library/scala/collection/Iterable.scala @@ -994,7 +994,7 @@ trait MapFactoryDefaults[K, +V, override protected def newSpecificBuilder: mutable.Builder[(K, V @uncheckedVariance), CC[K, V @uncheckedVariance]] = mapFactory.newBuilder[K, V] override def empty: CC[K, V @uncheckedVariance] = (this: AnyRef) match { // Implemented here instead of in TreeSeqMap since overriding empty in TreeSeqMap is not forwards compatible (should be moved) - case self: immutable.TreeSeqMap[K, V] => immutable.TreeSeqMap.empty(self.orderedBy).asInstanceOf[CC[K, V]] + case self: immutable.TreeSeqMap[_, _] => immutable.TreeSeqMap.empty(self.orderedBy).asInstanceOf[CC[K, V]] case _ => mapFactory.empty } diff --git a/src/library/scala/collection/concurrent/TrieMap.scala b/src/library/scala/collection/concurrent/TrieMap.scala index 83e83f5e31be..474cbc1317a7 100644 --- a/src/library/scala/collection/concurrent/TrieMap.scala +++ b/src/library/scala/collection/concurrent/TrieMap.scala @@ -16,6 +16,7 @@ package concurrent import java.util.concurrent.atomic._ +import scala.{unchecked => uc} import scala.annotation.tailrec import scala.collection.generic.DefaultSerializable import scala.collection.immutable.{List, Nil} @@ -114,13 +115,13 @@ private[collection] final class INode[K, V](bn: MainNode[K, V], g: Gen, equiv: E if ((bmp & flag) != 0) { // 1a) insert below cn.array(pos) match { - case in: INode[K, V] => + case in: INode[K, V] @uc => if (startgen eq in.gen) in.rec_insert(k, v, hc, lev + 5, this, startgen, ct) else { if (GCAS(cn, cn.renewed(startgen, ct), ct)) rec_insert(k, v, hc, lev, parent, startgen, ct) else false } - case sn: SNode[K, V] => + case sn: SNode[K, V] @uc => if (sn.hc == hc && equal(sn.k, k, ct)) GCAS(cn, cn.updatedAt(pos, new SNode(sn.k, v, hc), gen), ct) else { val rn = if (cn.gen eq gen) cn else cn.renewed(gen, ct) @@ -169,13 +170,13 @@ private[collection] final class INode[K, V](bn: MainNode[K, V], g: Gen, equiv: E if ((bmp & flag) != 0) { // 1a) insert below cn.array(pos) match { - case in: INode[K, V] => + case in: INode[K, V] @uc => if (startgen eq in.gen) in.rec_insertif(k, v, hc, cond, lev + 5, this, startgen, ct) else { if (GCAS(cn, cn.renewed(startgen, ct), ct)) rec_insertif(k, v, hc, cond, lev, parent, startgen, ct) else null } - case sn: SNode[K, V] => cond match { + case sn: SNode[K, V] @uc => cond match { case INode.KEY_PRESENT_OR_ABSENT => if (sn.hc == hc && equal(sn.k, k, ct)) { if (GCAS(cn, cn.updatedAt(pos, new SNode(sn.k, v, hc), gen), ct)) Some(sn.v) else null @@ -264,19 +265,19 @@ private[collection] final class INode[K, V](bn: MainNode[K, V], g: Gen, equiv: E val pos = if (bmp == 0xffffffff) idx else Integer.bitCount(bmp & (flag - 1)) val sub = cn.array(pos) sub match { - case in: INode[K, V] => + case in: INode[K, V] @uc => if (ct.isReadOnly || (startgen eq in.gen)) in.rec_lookup(k, hc, lev + 5, this, startgen, ct) else { if (GCAS(cn, cn.renewed(startgen, ct), ct)) rec_lookup(k, hc, lev, parent, startgen, ct) else RESTART } - case sn: SNode[K, V] => // 2) singleton node + case sn: SNode[K, V] @uc => // 2) singleton node if (sn.hc == hc && equal(sn.k, k, ct)) sn.v.asInstanceOf[AnyRef] else NO_SUCH_ELEMENT_SENTINEL case basicNode => throw new MatchError(basicNode) } } - case tn: TNode[K, V] => // 3) non-live node + case tn: TNode[_, _] => // 3) non-live node def cleanReadOnly(tn: TNode[K, V]) = if (ct.nonReadOnly) { clean(parent, ct, lev - 5) RESTART @@ -322,13 +323,13 @@ private[collection] final class INode[K, V](bn: MainNode[K, V], g: Gen, equiv: E val pos = Integer.bitCount(bmp & (flag - 1)) val sub = cn.array(pos) val res = sub match { - case in: INode[K, V] => + case in: INode[K, V] @uc => if (startgen eq in.gen) in.rec_remove(k, v, removeAlways, hc, lev + 5, this, startgen, ct) else { if (GCAS(cn, cn.renewed(startgen, ct), ct)) rec_remove(k, v, removeAlways, hc, lev, parent, startgen, ct) else null } - case sn: SNode[K, V] => + case sn: SNode[K, V] @uc => if (sn.hc == hc && equal(sn.k, k, ct) && (removeAlways || sn.v == v)) { val ncn = cn.removedAt(pos, flag, gen).toContracted(lev) if (GCAS(cn, ncn, ct)) Some(sn.v) else null @@ -349,8 +350,8 @@ private[collection] final class INode[K, V](bn: MainNode[K, V], g: Gen, equiv: E else { val pos = Integer.bitCount(bmp & (flag - 1)) val sub = cn.array(pos) - if (sub eq this) (nonlive: @unchecked) match { - case tn: TNode[K, V] => + if (sub eq this) (nonlive: @uc) match { + case tn: TNode[K, V] @uc => val ncn = cn.updatedAt(pos, tn.copyUntombed, gen).toContracted(lev - 5) if (!parent.GCAS(cn, ncn, ct)) if (ct.readRoot().gen == startgen) cleanParent(nonlive) @@ -535,9 +536,9 @@ private[collection] final class CNode[K, V](val bitmap: Int, val array: Array[Ba while (i < array.length) { val pos = (i + offset) % array.length array(pos) match { - case sn: SNode[_, _] => sz += 1 - case in: INode[K, V] => sz += in.cachedSize(ct) - case basicNode => throw new MatchError(basicNode) + case sn: SNode[_, _] => sz += 1 + case in: INode[K, V] @uc => sz += in.cachedSize(ct) + case basicNode => throw new MatchError(basicNode) } i += 1 } @@ -581,8 +582,8 @@ private[collection] final class CNode[K, V](val bitmap: Int, val array: Array[Ba val narr = new Array[BasicNode](len) while (i < len) { arr(i) match { - case in: INode[K, V] => narr(i) = in.copyToGen(ngen, ct) - case bn: BasicNode => narr(i) = bn + case in: INode[K, V] @uc => narr(i) = in.copyToGen(ngen, ct) + case bn: BasicNode => narr(i) = bn } i += 1 } @@ -595,7 +596,7 @@ private[collection] final class CNode[K, V](val bitmap: Int, val array: Array[Ba } def toContracted(lev: Int): MainNode[K, V] = if (array.length == 1 && lev > 0) array(0) match { - case sn: SNode[K, V] => sn.copyTombed + case sn: SNode[K, V] @uc => sn.copyTombed case _ => this } else this @@ -613,11 +614,11 @@ private[collection] final class CNode[K, V](val bitmap: Int, val array: Array[Ba while (i < arr.length) { // construct new bitmap val sub = arr(i) sub match { - case in: INode[K, V] => + case in: INode[K, V] @uc => val inodemain = in.gcasRead(ct) assert(inodemain ne null) tmparray(i) = resurrect(in, inodemain) - case sn: SNode[K, V] => + case sn: SNode[K, V] @uc => tmparray(i) = sn case basicNode => throw new MatchError(basicNode) } @@ -629,19 +630,16 @@ private[collection] final class CNode[K, V](val bitmap: Int, val array: Array[Ba def string(lev: Int): String = "CNode %x\n%s".format(bitmap, array.map(_.string(lev + 1)).mkString("\n")) - private def collectLocalElems: Seq[String] = array.flatMap({ - case sn: SNode[K, V] => Iterable.single(sn.kvPair._2.toString) - case in: INode[K, V] => Iterable.single(scala.Predef.augmentString(in.toString).drop(14) + "(" + in.gen + ")") - case basicNode => throw new MatchError(basicNode) - }) - override def toString = { - val elems = collectLocalElems - "CNode(sz: %d; %s)".format(elems.size, elems.sorted.mkString(", ")) + def elems: Seq[String] = array.flatMap { + case sn: SNode[K, V] @uc => Iterable.single(sn.kvPair._2.toString) + case in: INode[K, V] @uc => Iterable.single(augmentString(in.toString).drop(14) + "(" + in.gen + ")") + case basicNode => throw new MatchError(basicNode) + } + f"CNode(sz: ${elems.size}%d; ${elems.sorted.mkString(", ")})" } } - private[concurrent] object CNode { def dual[K, V](x: SNode[K, V], xhc: Int, y: SNode[K, V], yhc: Int, lev: Int, gen: Gen, equiv: Equiv[K]): MainNode[K, V] = if (lev < 35) { @@ -745,17 +743,17 @@ final class TrieMap[K, V] private (r: AnyRef, rtupd: AtomicReferenceFieldUpdater private[concurrent] def RDCSS_READ_ROOT(abort: Boolean = false): INode[K, V] = { val r = /*READ*/root r match { - case in: INode[K, V] => in - case desc: RDCSS_Descriptor[K, V] => RDCSS_Complete(abort) - case x => throw new MatchError(x) + case in: INode[K, V] @uc => in + case desc: RDCSS_Descriptor[K, V] @uc => RDCSS_Complete(abort) + case x => throw new MatchError(x) } } @tailrec private def RDCSS_Complete(abort: Boolean): INode[K, V] = { val v = /*READ*/root v match { - case in: INode[K, V] => in - case desc: RDCSS_Descriptor[K, V] => + case in: INode[K, V] @uc => in + case desc: RDCSS_Descriptor[K, V] @uc => val RDCSS_Descriptor(ov, exp, nv) = desc if (abort) { if (CAS_ROOT(desc, ov)) ov @@ -1094,11 +1092,9 @@ private[collection] class TrieMapIterator[K, V](var level: Int, private var ct: if (npos < stack(depth).length) { stackpos(depth) = npos stack(depth)(npos) match { - case sn: SNode[K, V] => - current = sn - case in: INode[K, V] => - readin(in) - case basicNode => throw new MatchError(basicNode) + case sn: SNode[K, V] @uc => current = sn + case in: INode[K, V] @uc => readin(in) + case basicNode => throw new MatchError(basicNode) } } else { depth -= 1 diff --git a/src/library/scala/collection/immutable/HashMap.scala b/src/library/scala/collection/immutable/HashMap.scala index d59841853476..0b2cfa4246f0 100644 --- a/src/library/scala/collection/immutable/HashMap.scala +++ b/src/library/scala/collection/immutable/HashMap.scala @@ -20,7 +20,7 @@ import scala.annotation.unchecked.{uncheckedVariance => uV} import scala.collection.Hashing.improve import scala.collection.Stepper.EfficientSplit import scala.collection.generic.DefaultSerializable -import scala.collection.mutable.ReusableBuilder +import scala.collection.mutable, mutable.ReusableBuilder import scala.collection.{Iterator, MapFactory, MapFactoryDefaults, Stepper, StepperShape, mutable} import scala.runtime.AbstractFunction2 import scala.runtime.Statics.releaseFence @@ -169,7 +169,7 @@ final class HashMap[K, +V] private[immutable] (private[immutable] val rootNode: if (newNode eq hm.rootNode) hm else newHashMapOrThis(rootNode.concat(hm.rootNode, 0)) } - case hm: collection.mutable.HashMap[K, V] => + case hm: mutable.HashMap[K @unchecked, V @unchecked] => val iter = hm.nodeIterator var current = rootNode while (iter.hasNext) { @@ -1270,7 +1270,7 @@ private final class BitmapIndexedMapNode[K, +V]( index += 1 } } - case _: HashCollisionMapNode[K, V] => + case _: HashCollisionMapNode[_, _] => throw new Exception("Cannot merge BitmapIndexedMapNode with HashCollisionMapNode") } diff --git a/src/library/scala/collection/immutable/Queue.scala b/src/library/scala/collection/immutable/Queue.scala index 9c8a32d95a3e..eb12f6fd8b14 100644 --- a/src/library/scala/collection/immutable/Queue.scala +++ b/src/library/scala/collection/immutable/Queue.scala @@ -122,7 +122,7 @@ sealed class Queue[+A] protected(protected val in: List[A], protected val out: L override def appendedAll[B >: A](that: scala.collection.IterableOnce[B]): Queue[B] = { val newIn = that match { case that: Queue[B] => that.in ++ (that.out reverse_::: this.in) - case that: List[A] => that reverse_::: this.in + case that: List[B] => that reverse_::: this.in case _ => var result: List[B] = this.in val iter = that.iterator diff --git a/src/library/scala/collection/immutable/TreeSet.scala b/src/library/scala/collection/immutable/TreeSet.scala index 51e55782b19f..9b40536d5ff9 100644 --- a/src/library/scala/collection/immutable/TreeSet.scala +++ b/src/library/scala/collection/immutable/TreeSet.scala @@ -278,7 +278,7 @@ object TreeSet extends SortedIterableFactory[TreeSet] { case ts: TreeSet[A] if ts.ordering == ordering => if (tree eq null) tree = ts.tree else tree = RB.union(beforePublish(tree), ts.tree)(ordering) - case ts: TreeMap[A, _] if ts.ordering == ordering => + case ts: TreeMap[A @unchecked, _] if ts.ordering == ordering => if (tree eq null) tree = ts.tree0 else tree = RB.union(beforePublish(tree), ts.tree0)(ordering) case _ => diff --git a/src/library/scala/collection/mutable/CollisionProofHashMap.scala b/src/library/scala/collection/mutable/CollisionProofHashMap.scala index 639f59c3b190..4382a31a0f5e 100644 --- a/src/library/scala/collection/mutable/CollisionProofHashMap.scala +++ b/src/library/scala/collection/mutable/CollisionProofHashMap.scala @@ -13,6 +13,7 @@ package scala.collection package mutable +import scala.{unchecked => uc} import scala.annotation.{implicitNotFound, tailrec, unused} import scala.annotation.unchecked.uncheckedVariance import scala.collection.generic.DefaultSerializationProxy @@ -72,8 +73,8 @@ final class CollisionProofHashMap[K, V](initialCapacity: Int, loadFactor: Double def get(key: K): Option[V] = findNode(key) match { case null => None case nd => Some(nd match { - case nd: LLNode => nd.value - case nd: RBNode => nd.value + case nd: LLNode @uc => nd.value + case nd: RBNode @uc => nd.value }) } @@ -81,15 +82,15 @@ final class CollisionProofHashMap[K, V](initialCapacity: Int, loadFactor: Double override def apply(key: K): V = findNode(key) match { case null => default(key) case nd => nd match { - case nd: LLNode => nd.value - case nd: RBNode => nd.value + case nd: LLNode @uc => nd.value + case nd: RBNode @uc => nd.value } } override def getOrElse[V1 >: V](key: K, default: => V1): V1 = { val nd = findNode(key) if (nd eq null) default else nd match { - case nd: LLNode => nd.value + case nd: LLNode @uc => nd.value case n => n.asInstanceOf[RBNode].value } } @@ -98,7 +99,7 @@ final class CollisionProofHashMap[K, V](initialCapacity: Int, loadFactor: Double val hash = computeHash(elem) table(index(hash)) match { case null => null - case n: LLNode => n.getNode(elem, hash) + case n: LLNode @uc => n.getNode(elem, hash) case n => n.asInstanceOf[RBNode].getNode(elem, hash) } } @@ -129,7 +130,7 @@ final class CollisionProofHashMap[K, V](initialCapacity: Int, loadFactor: Double private[this] def put0(key: K, value: V, getOld: Boolean, hash: Int, idx: Int): Some[V] = { val res = table(idx) match { - case n: RBNode => + case n: RBNode @uc => insert(n, idx, key, hash, value) case _old => val old: LLNode = _old.asInstanceOf[LLNode] @@ -184,16 +185,16 @@ final class CollisionProofHashMap[K, V](initialCapacity: Int, loadFactor: Double val idx = index(hash) table(idx) match { case null => Statics.pfMarker - case t: RBNode => + case t: RBNode @uc => val v = delete(t, idx, elem, hash) if(v.asInstanceOf[AnyRef] ne Statics.pfMarker) contentSize -= 1 v - case nd: LLNode if nd.hash == hash && nd.key == elem => + case nd: LLNode @uc if nd.hash == hash && nd.key == elem => // first element matches table(idx) = nd.next contentSize -= 1 nd.value - case nd: LLNode => + case nd: LLNode @uc => // find an element that matches var prev = nd var next = nd.next @@ -226,10 +227,10 @@ final class CollisionProofHashMap[K, V](initialCapacity: Int, loadFactor: Double i += 1 n match { case null => - case n: RBNode => + case n: RBNode @uc => node = CollisionProofHashMap.minNodeNonNull(n) return true - case n: LLNode => + case n: LLNode @uc => node = n return true } @@ -241,11 +242,11 @@ final class CollisionProofHashMap[K, V](initialCapacity: Int, loadFactor: Double def next(): R = if(!hasNext) Iterator.empty.next() else node match { - case n: RBNode => + case n: RBNode @uc => val r = extract(n) node = CollisionProofHashMap.successor(n ) r - case n: LLNode => + case n: LLNode @uc => val r = extract(n) node = n.next r @@ -289,8 +290,8 @@ final class CollisionProofHashMap[K, V](initialCapacity: Int, loadFactor: Double } @`inline` private[this] def splitBucket(tree: Node, lowBucket: Int, highBucket: Int, mask: Int): Unit = tree match { - case t: LLNode => splitBucket(t, lowBucket, highBucket, mask) - case t: RBNode => splitBucket(t, lowBucket, highBucket, mask) + case t: LLNode @uc => splitBucket(t, lowBucket, highBucket, mask) + case t: RBNode @uc => splitBucket(t, lowBucket, highBucket, mask) } private[this] def splitBucket(list: LLNode, lowBucket: Int, highBucket: Int, mask: Int): Unit = { @@ -361,8 +362,8 @@ final class CollisionProofHashMap[K, V](initialCapacity: Int, loadFactor: Double while(i < len) { val n = table(i) if(n ne null) n match { - case n: LLNode => n.foreach(f) - case n: RBNode => n.foreach(f) + case n: LLNode @uc => n.foreach(f) + case n: RBNode @uc => n.foreach(f) } i += 1 } @@ -374,8 +375,8 @@ final class CollisionProofHashMap[K, V](initialCapacity: Int, loadFactor: Double while(i < len) { val n = table(i) if(n ne null) n match { - case n: LLNode => n.foreachEntry(f) - case n: RBNode => n.foreachEntry(f) + case n: LLNode @uc => n.foreachEntry(f) + case n: RBNode @uc => n.foreachEntry(f) } i += 1 } @@ -390,7 +391,7 @@ final class CollisionProofHashMap[K, V](initialCapacity: Int, loadFactor: Double val idx = index(hash) table(idx) match { case null => () - case n: LLNode => + case n: LLNode @uc => val nd = n.getNode(key, hash) if(nd != null) return nd.value case n => @@ -711,8 +712,8 @@ final class CollisionProofHashMap[K, V](initialCapacity: Int, loadFactor: Double case 1 => val nn = xs.next() val (key, hash, value) = nn match { - case nn: LLNode => (nn.key, nn.hash, nn.value) - case nn: RBNode => (nn.key, nn.hash, nn.value) + case nn: LLNode @uc => (nn.key, nn.hash, nn.value) + case nn: RBNode @uc => (nn.key, nn.hash, nn.value) } new RBNode(key, hash, value, level == maxUsedDepth && level != 1, null, null, null) case n => @@ -721,8 +722,8 @@ final class CollisionProofHashMap[K, V](initialCapacity: Int, loadFactor: Double val nn = xs.next() val right = f(level+1, size-1-leftSize) val (key, hash, value) = nn match { - case nn: LLNode => (nn.key, nn.hash, nn.value) - case nn: RBNode => (nn.key, nn.hash, nn.value) + case nn: LLNode @uc => (nn.key, nn.hash, nn.value) + case nn: RBNode @uc => (nn.key, nn.hash, nn.value) } val n = new RBNode(key, hash, value, false, left, right, null) if(left ne null) left.parent = n diff --git a/src/library/scala/concurrent/impl/Promise.scala b/src/library/scala/concurrent/impl/Promise.scala index e031e51bd011..7024344c1184 100644 --- a/src/library/scala/concurrent/impl/Promise.scala +++ b/src/library/scala/concurrent/impl/Promise.scala @@ -72,10 +72,10 @@ private[concurrent] object Promise { **/ @inline @tailrec private[this] final def compressed(current: DefaultPromise[T], target: DefaultPromise[T], owner: DefaultPromise[T]): DefaultPromise[T] = { val value = target.get() - if (value.isInstanceOf[Callbacks[T]]) { + if (value.isInstanceOf[Callbacks[_]]) { if (compareAndSet(current, target)) target // Link else compressed(current = get(), target = target, owner = owner) // Retry - } else if (value.isInstanceOf[Link[T]]) compressed(current = current, target = value.asInstanceOf[Link[T]].get(), owner = owner) // Compress + } else if (value.isInstanceOf[Link[_]]) compressed(current = current, target = value.asInstanceOf[Link[T]].get(), owner = owner) // Compress else /*if (value.isInstanceOf[Try[T]])*/ { owner.unlink(value.asInstanceOf[Try[T]]) // Discard links owner @@ -132,7 +132,7 @@ private[concurrent] object Promise { override final def zipWith[U, R](that: Future[U])(f: (T, U) => R)(implicit executor: ExecutionContext): Future[R] = { val state = get() - if (state.isInstanceOf[Try[T]]) { + if (state.isInstanceOf[Try[_]]) { if (state.asInstanceOf[Try[T]].isFailure) this.asInstanceOf[Future[R]] else { val l = state.asInstanceOf[Success[T]].get @@ -143,7 +143,7 @@ private[concurrent] object Promise { val zipped = new DefaultPromise[R]() val thisF: Try[T] => Unit = { - case left: Success[T] => + case left: Success[_] => val right = buffer.getAndSet(left).asInstanceOf[Success[U]] if (right ne null) zipped.tryComplete(try Success(f(left.get, right.get)) catch { case e if NonFatal(e) => Failure(e) }) @@ -152,7 +152,7 @@ private[concurrent] object Promise { } val thatF: Try[U] => Unit = { - case right: Success[U] => + case right: Success[_] => val left = buffer.getAndSet(right).asInstanceOf[Success[T]] if (left ne null) zipped.tryComplete(try Success(f(left.get, right.get)) catch { case e if NonFatal(e) => Failure(e) }) @@ -168,47 +168,47 @@ private[concurrent] object Promise { override final def foreach[U](f: T => U)(implicit executor: ExecutionContext): Unit = { val state = get() - if (!state.isInstanceOf[Failure[T]]) dispatchOrAddCallbacks(state, new Transformation[T, Unit](Xform_foreach, f, executor)) + if (!state.isInstanceOf[Failure[_]]) dispatchOrAddCallbacks(state, new Transformation[T, Unit](Xform_foreach, f, executor)) } override final def flatMap[S](f: T => Future[S])(implicit executor: ExecutionContext): Future[S] = { val state = get() - if (!state.isInstanceOf[Failure[T]]) dispatchOrAddCallbacks(state, new Transformation[T, S](Xform_flatMap, f, executor)) + if (!state.isInstanceOf[Failure[_]]) dispatchOrAddCallbacks(state, new Transformation[T, S](Xform_flatMap, f, executor)) else this.asInstanceOf[Future[S]] } override final def map[S](f: T => S)(implicit executor: ExecutionContext): Future[S] = { val state = get() - if (!state.isInstanceOf[Failure[T]]) dispatchOrAddCallbacks(state, new Transformation[T, S](Xform_map, f, executor)) + if (!state.isInstanceOf[Failure[_]]) dispatchOrAddCallbacks(state, new Transformation[T, S](Xform_map, f, executor)) else this.asInstanceOf[Future[S]] } override final def filter(p: T => Boolean)(implicit executor: ExecutionContext): Future[T] = { val state = get() - if (!state.isInstanceOf[Failure[T]]) dispatchOrAddCallbacks(state, new Transformation[T, T](Xform_filter, p, executor)) // Short-circuit if we get a Success + if (!state.isInstanceOf[Failure[_]]) dispatchOrAddCallbacks(state, new Transformation[T, T](Xform_filter, p, executor)) // Short-circuit if we get a Success else this } override final def collect[S](pf: PartialFunction[T, S])(implicit executor: ExecutionContext): Future[S] = { val state = get() - if (!state.isInstanceOf[Failure[T]]) dispatchOrAddCallbacks(state, new Transformation[T, S](Xform_collect, pf, executor)) // Short-circuit if we get a Success + if (!state.isInstanceOf[Failure[_]]) dispatchOrAddCallbacks(state, new Transformation[T, S](Xform_collect, pf, executor)) // Short-circuit if we get a Success else this.asInstanceOf[Future[S]] } override final def recoverWith[U >: T](pf: PartialFunction[Throwable, Future[U]])(implicit executor: ExecutionContext): Future[U] = { val state = get() - if (!state.isInstanceOf[Success[T]]) dispatchOrAddCallbacks(state, new Transformation[T, U](Xform_recoverWith, pf, executor)) // Short-circuit if we get a Failure + if (!state.isInstanceOf[Success[_]]) dispatchOrAddCallbacks(state, new Transformation[T, U](Xform_recoverWith, pf, executor)) // Short-circuit if we get a Failure else this.asInstanceOf[Future[U]] } override final def recover[U >: T](pf: PartialFunction[Throwable, U])(implicit executor: ExecutionContext): Future[U] = { val state = get() - if (!state.isInstanceOf[Success[T]]) dispatchOrAddCallbacks(state, new Transformation[T, U](Xform_recover, pf, executor)) // Short-circuit if we get a Failure + if (!state.isInstanceOf[Success[_]]) dispatchOrAddCallbacks(state, new Transformation[T, U](Xform_recover, pf, executor)) // Short-circuit if we get a Failure else this.asInstanceOf[Future[U]] } override final def mapTo[S](implicit tag: scala.reflect.ClassTag[S]): Future[S] = - if (!get().isInstanceOf[Failure[T]]) super[Future].mapTo[S](tag) // Short-circuit if we get a Success + if (!get().isInstanceOf[Failure[_]]) super[Future].mapTo[S](tag) // Short-circuit if we get a Success else this.asInstanceOf[Future[S]] @@ -216,13 +216,13 @@ private[concurrent] object Promise { dispatchOrAddCallbacks(get(), new Transformation[T, Unit](Xform_onComplete, func, executor)) override final def failed: Future[Throwable] = - if (!get().isInstanceOf[Success[T]]) super.failed + if (!get().isInstanceOf[Success[_]]) super.failed else Future.failedFailureFuture // Cached instance in case of already known success @tailrec override final def toString: String = { val state = get() - if (state.isInstanceOf[Try[T]]) "Future("+state+")" - else if (state.isInstanceOf[Link[T]]) state.asInstanceOf[Link[T]].promise(this).toString + if (state.isInstanceOf[Try[_]]) "Future("+state+")" + else if (state.isInstanceOf[Link[_]]) state.asInstanceOf[Link[T]].promise(this).toString else /*if (state.isInstanceOf[Callbacks[T]]) */ "Future()" } @@ -267,25 +267,25 @@ private[concurrent] object Promise { @tailrec // returns null if not completed private final def value0: Try[T] = { val state = get() - if (state.isInstanceOf[Try[T]]) state.asInstanceOf[Try[T]] - else if (state.isInstanceOf[Link[T]]) state.asInstanceOf[Link[T]].promise(this).value0 + if (state.isInstanceOf[Try[_]]) state.asInstanceOf[Try[T]] + else if (state.isInstanceOf[Link[_]]) state.asInstanceOf[Link[T]].promise(this).value0 else /*if (state.isInstanceOf[Callbacks[T]])*/ null } override final def tryComplete(value: Try[T]): Boolean = { val state = get() - if (state.isInstanceOf[Try[T]]) false + if (state.isInstanceOf[Try[_]]) false else tryComplete0(state, resolve(value)) } @tailrec // WARNING: important that the supplied Try really is resolve():d private[Promise] final def tryComplete0(state: AnyRef, resolved: Try[T]): Boolean = - if (state.isInstanceOf[Callbacks[T]]) { + if (state.isInstanceOf[Callbacks[_]]) { if (compareAndSet(state, resolved)) { if (state ne Noop) submitWithValue(state.asInstanceOf[Callbacks[T]], resolved) true } else tryComplete0(get(), resolved) - } else if (state.isInstanceOf[Link[T]]) { + } else if (state.isInstanceOf[Link[_]]) { val p = state.asInstanceOf[Link[T]].promise(this) // If this returns owner/this, we are in a completed link (p ne this) && p.tryComplete0(p.get(), resolved) // Use this to get tailcall optimization and avoid re-resolution } else /* if(state.isInstanceOf[Try[T]]) */ false @@ -293,8 +293,8 @@ private[concurrent] object Promise { override final def completeWith(other: Future[T]): this.type = { if (other ne this) { val state = get() - if (!state.isInstanceOf[Try[T]]) { - val resolved = if (other.isInstanceOf[DefaultPromise[T]]) other.asInstanceOf[DefaultPromise[T]].value0 else other.value.orNull + if (!state.isInstanceOf[Try[_]]) { + val resolved = if (other.isInstanceOf[DefaultPromise[_]]) other.asInstanceOf[DefaultPromise[T]].value0 else other.value.orNull if (resolved ne null) tryComplete0(state, resolved) else other.onComplete(this)(ExecutionContext.parasitic) } @@ -308,10 +308,10 @@ private[concurrent] object Promise { * to the root promise when linking two promises together. */ @tailrec private final def dispatchOrAddCallbacks[C <: Callbacks[T]](state: AnyRef, callbacks: C): C = - if (state.isInstanceOf[Try[T]]) { + if (state.isInstanceOf[Try[_]]) { submitWithValue(callbacks, state.asInstanceOf[Try[T]]) // invariant: callbacks should never be Noop here callbacks - } else if (state.isInstanceOf[Callbacks[T]]) { + } else if (state.isInstanceOf[Callbacks[_]]) { if(compareAndSet(state, if (state ne Noop) concatCallbacks(callbacks, state.asInstanceOf[Callbacks[T]]) else callbacks)) callbacks else dispatchOrAddCallbacks(get(), callbacks) } else /*if (state.isInstanceOf[Link[T]])*/ { @@ -343,10 +343,10 @@ private[concurrent] object Promise { @tailrec private[concurrent] final def linkRootOf(target: DefaultPromise[T], link: Link[T]): Unit = if (this ne target) { val state = get() - if (state.isInstanceOf[Try[T]]) { + if (state.isInstanceOf[Try[_]]) { if(!target.tryComplete0(target.get(), state.asInstanceOf[Try[T]])) throw new IllegalStateException("Cannot link completed promises together") - } else if (state.isInstanceOf[Callbacks[T]]) { + } else if (state.isInstanceOf[Callbacks[_]]) { val l = if (link ne null) link else new Link(target) val p = l.promise(this) if ((this ne p) && compareAndSet(state, l)) { @@ -362,7 +362,7 @@ private[concurrent] object Promise { **/ @tailrec private[concurrent] final def unlink(resolved: Try[T]): Unit = { val state = get() - if (state.isInstanceOf[Link[T]]) { + if (state.isInstanceOf[Link[_]]) { val next = if (compareAndSet(state, resolved)) state.asInstanceOf[Link[T]].get() else this next.unlink(resolved) } else tryComplete0(state, resolved) @@ -468,23 +468,23 @@ private[concurrent] object Promise { case Xform_flatMap => if (v.isInstanceOf[Success[F]]) { val f = fun(v.get) - if (f.isInstanceOf[DefaultPromise[T]]) f.asInstanceOf[DefaultPromise[T]].linkRootOf(this, null) else completeWith(f.asInstanceOf[Future[T]]) + if (f.isInstanceOf[DefaultPromise[_]]) f.asInstanceOf[DefaultPromise[T]].linkRootOf(this, null) else completeWith(f.asInstanceOf[Future[T]]) null } else v case Xform_transform => resolve(fun(v).asInstanceOf[Try[T]]) case Xform_transformWith => val f = fun(v) - if (f.isInstanceOf[DefaultPromise[T]]) f.asInstanceOf[DefaultPromise[T]].linkRootOf(this, null) else completeWith(f.asInstanceOf[Future[T]]) + if (f.isInstanceOf[DefaultPromise[_]]) f.asInstanceOf[DefaultPromise[T]].linkRootOf(this, null) else completeWith(f.asInstanceOf[Future[T]]) null case Xform_foreach => - v foreach fun + v.foreach(fun) null case Xform_onComplete => fun(v) null case Xform_recover => - if (v.isInstanceOf[Failure[F]]) resolve(v.recover(fun.asInstanceOf[PartialFunction[Throwable, F]])) else v //recover F=:=T + if (v.isInstanceOf[Failure[_]]) resolve(v.recover(fun.asInstanceOf[PartialFunction[Throwable, F]])) else v //recover F=:=T case Xform_recoverWith => if (v.isInstanceOf[Failure[F]]) { val f = fun.asInstanceOf[PartialFunction[Throwable, Future[T]]].applyOrElse(v.asInstanceOf[Failure[F]].exception, Future.recoverWithFailed) From fce63f592addd0d5eab7a5b3adf25b71f72c685c Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Thu, 1 Jul 2021 11:16:04 -0700 Subject: [PATCH 276/769] Tweak message if multiple unchecked args. --- .../tools/nsc/typechecker/Checkable.scala | 47 ++++++----- test/files/neg/t12408.check | 15 ++++ test/files/neg/t12408.scala | 82 +++++++++++++++++++ test/files/neg/t3692-new.check | 4 +- test/files/neg/t3692-new.scala | 2 +- test/files/neg/unchecked-refinement.check | 4 +- test/files/run/patmat-exprs.check | 1 - test/files/run/patmat-exprs.scala | 10 +-- 8 files changed, 134 insertions(+), 31 deletions(-) create mode 100644 test/files/neg/t12408.check create mode 100644 test/files/neg/t12408.scala delete mode 100644 test/files/run/patmat-exprs.check diff --git a/src/compiler/scala/tools/nsc/typechecker/Checkable.scala b/src/compiler/scala/tools/nsc/typechecker/Checkable.scala index ed146327730c..fb7e53f06ed1 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Checkable.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Checkable.scala @@ -37,7 +37,7 @@ import scala.tools.nsc.Reporting.WarningCategory * * There are four possibilities to consider: * [P1] X will always conform to P - * [P2] X will never conform to P + * [P2] x will never be a P, because it is an X * [P3] X will conform to P if some runtime test is true * [P4] X cannot be checked against P * @@ -82,7 +82,7 @@ trait Checkable { final val RuntimeCheckable = 2 final val Uncheckable = 3 final val CheckabilityError = 4 - lazy val describe: (Int => String) = List( + lazy val describe: (Checkability => String) = List( "statically true", "statically false", "runtime checkable", @@ -175,7 +175,7 @@ trait Checkable { private def typeArgsInTopLevelType(tp: Type): Set[Type] = { def isUnwarnableTypeArg(arg: Type) = { def isUnwarnableTypeArgSymbol(sym: Symbol) = { - sym.isTypeParameter || // dummy + sym.isTypeParameter || // dummy sym.name.toTermName == nme.WILDCARD || // _ nme.isVariableName(sym.name) // type variable } @@ -200,28 +200,25 @@ trait Checkable { loop(tp) res } - lazy val uncheckableType = if (Psym.isAbstractType) P else { - val possibles = typeArgsInTopLevelType(P) - val opt = possibles find { targ => + lazy val (uncheckableType, uncheckableCard) = + if (Psym.isAbstractType) (P, 1) + else { + val possibles = typeArgsInTopLevelType(P) // Create a derived type with every possibly uncheckable type replaced // with a WildcardType, except for 'targ'. If !(XR <: derived) then // 'targ' is uncheckable. - val derived = P map (tp => if (possibles(tp) && !(tp =:= targ)) WildcardType else tp) - !(XR <:< derived) + def candidate(targ: Type) = { + val derived = P.map(tp => if (possibles(tp) && !(tp =:= targ)) WildcardType else tp) + !(XR <:< derived) + } + val opt = possibles.find(candidate) + opt.map(res => (res, possibles.iterator.map(candidate).take(2).size)).getOrElse((NoType, 0)) } - opt getOrElse NoType - } def neverSubClass = isNeverSubClass(Xsym, Psym) def neverMatches = result == StaticallyFalse def isUncheckable = result == Uncheckable def isCheckable = !isUncheckable - def uncheckableMessage = uncheckableType match { - case NoType => "something" - case tp @ RefinedType(_, _) => "refinement " + tp - case TypeRef(_, sym, _) if sym.isAbstractType => "abstract type " + sym.name - case tp => "non-variable type argument " + tp - } /** Is it impossible for the given symbols to be parents in the same class? * This means given A and B, can there be an instance of A with B? This is the @@ -348,12 +345,24 @@ trait Checkable { if (checker.neverMatches) neverMatchesWarning(checker) else if (checker.isUncheckable) { + def uncheckableMessage = checker.uncheckableType match { + case NoType => "something" + case tp @ RefinedType(_, _) => "refinement " + tp + case TypeRef(_, sym, _) if sym.isAbstractType => "abstract type " + sym.name + case tp => "non-variable type argument " + tp + } val msg = { val where = if (inPattern) "pattern " else "" - if (checker.uncheckableType =:= P) s"abstract type $where$PString" - else s"${checker.uncheckableMessage} in type $where$PString" + if (checker.uncheckableCard == 2) + s"the type test for $where$PString cannot be checked at runtime because it has type parameters eliminated by erasure" + else { + val thing = + if (checker.uncheckableType =:= P) s"abstract type $where$PString" + else s"$uncheckableMessage in type $where$PString" + s"$thing is unchecked since it is eliminated by erasure" + } } - context.warning(tree.pos, s"$msg is unchecked since it is eliminated by erasure", WarningCategory.Unchecked) + context.warning(tree.pos, msg, WarningCategory.Unchecked) } else if (checker.result == RuntimeCheckable) { // register deferred checking for sealed types in current run diff --git a/test/files/neg/t12408.check b/test/files/neg/t12408.check new file mode 100644 index 000000000000..32641513cca1 --- /dev/null +++ b/test/files/neg/t12408.check @@ -0,0 +1,15 @@ +t12408.scala:6: warning: abstract type pattern B is unchecked since it is eliminated by erasure + def f1[B] = a match { case _: B => } // warn + ^ +t12408.scala:7: warning: abstract type B in type pattern t12408.Renderer[B] is unchecked since it is eliminated by erasure + def f2[B] = a match { case _: Renderer[B] => } // warn + ^ +t12408.scala:8: warning: non-variable type argument Int in type pattern List[Int] (the underlying of List[Int]) is unchecked since it is eliminated by erasure + def f3[B](xs: List[A]) = xs match { case _: List[Int] => } // warn + ^ +t12408.scala:17: warning: the type test for pattern (A, B, C, D, E, F, G, H, I, J, K, L, M) cannot be checked at runtime because it has type parameters eliminated by erasure + case _: (A,B,C,D,E,F,G,H,I,J,K,L,M) => + ^ +error: No warnings can be incurred under -Werror. +4 warnings +1 error diff --git a/test/files/neg/t12408.scala b/test/files/neg/t12408.scala new file mode 100644 index 000000000000..ab5879ae5c6d --- /dev/null +++ b/test/files/neg/t12408.scala @@ -0,0 +1,82 @@ +// scalac: -Werror + +package t12408 { + class Renderer[A] + class Test[A](a: Any) { + def f1[B] = a match { case _: B => } // warn + def f2[B] = a match { case _: Renderer[B] => } // warn + def f3[B](xs: List[A]) = xs match { case _: List[Int] => } // warn + def g = a match { case _: Renderer[A] => } // now also warn + } + + trait T[A,B,C,D,E,F,G,H,I,J,K,L,M] { + def f(a: Any) = a match { + case _: (A,B,C,D,E,F,G,H,I,J,K,L,M) => + } + def g[A,B,C,D,E,F,G,H,I,J,K,L,M] = (null: Any) match { + case _: (A,B,C,D,E,F,G,H,I,J,K,L,M) => + } + } + class C[A,B,C,D,E,F,G,H,I,J,K,L,M] { + def f(a: Any) = a match { + case _: (A,B,C,D,E,F,G,H,I,J,K,L,M) => + } + } +} + +package t12408b { + // trait's type params align with class C + sealed trait T[A, B] + final case class C[A, B](a: A, b: B) extends T[A, B] + + class Test[A, B] { + def test(t: T[A, B]) = t match { case _: C[A, B] => } // nowarn + } + object Test extends App { + println { + new Test[String, Int]().test(C("hi", 42)) + } + } +} + +package t12408c { + sealed trait T[A] + final case class C[A, B](a: A, b: B) extends T[A] + + class Test[A, B] { + def test(t: T[A]) = t match { case _: C[A, B] => } // warn on B + } + object Test extends App { + println { + new Test[String, Int]().test(C("hi", 42)) + } + } +} + +package reported { + sealed trait Action[Page] + final case class Renderer[Page, Props]() extends Action[Page] + sealed trait Redirect[Page] extends Action[Page] + + final class RouterLogic[Page, Props] { + + def hmm1(a: Action[Page]): Int = + a match { + case r: Renderer[Page, Props] => 1 // warn as above + case _ => 2 + } + + def hmm2(a: Action[Page]): Int = + a match { + case r: Redirect[Page] => 2 // nowarn + case _ => 1 + } + } +} + +package regression { + object unchecked3 { + /* nowarn */ def tparamLeakage1(x: Any) = x match { case Array() => 1 } + /* nowarn */ def tparamLeakage2(x: Any) = x match { case List() => 1 } + } +} diff --git a/test/files/neg/t3692-new.check b/test/files/neg/t3692-new.check index b1d23eb8d24b..93104d8a1e69 100644 --- a/test/files/neg/t3692-new.check +++ b/test/files/neg/t3692-new.check @@ -1,10 +1,10 @@ t3692-new.scala:17: warning: non-variable type argument Int in type pattern scala.collection.immutable.Map[Int,Int] (the underlying of Map[Int,Int]) is unchecked since it is eliminated by erasure case m0: Map[Int, Int] => new java.util.HashMap[Integer, Integer] ^ -t3692-new.scala:18: warning: non-variable type argument Int in type pattern scala.collection.immutable.Map[Int,V] (the underlying of Map[Int,V]) is unchecked since it is eliminated by erasure +t3692-new.scala:18: warning: the type test for pattern scala.collection.immutable.Map[Int,V] (the underlying of Map[Int,V]) cannot be checked at runtime because it has type parameters eliminated by erasure case m1: Map[Int, V] => new java.util.HashMap[Integer, V] ^ -t3692-new.scala:19: warning: non-variable type argument Int in type pattern scala.collection.immutable.Map[T,Int] (the underlying of Map[T,Int]) is unchecked since it is eliminated by erasure +t3692-new.scala:19: warning: the type test for pattern scala.collection.immutable.Map[T,Int] (the underlying of Map[T,Int]) cannot be checked at runtime because it has type parameters eliminated by erasure case m2: Map[T, Int] => new java.util.HashMap[T, Integer] ^ t3692-new.scala:18: warning: unreachable code diff --git a/test/files/neg/t3692-new.scala b/test/files/neg/t3692-new.scala index 1fe209fe9629..063e141cb4a3 100644 --- a/test/files/neg/t3692-new.scala +++ b/test/files/neg/t3692-new.scala @@ -1,4 +1,4 @@ -// scalac: -Xfatal-warnings +// scalac: -Werror // import scala.reflect.{ClassTag, classTag} import java.lang.Integer diff --git a/test/files/neg/unchecked-refinement.check b/test/files/neg/unchecked-refinement.check index 95dcec0c89ea..8a65d1a3b08a 100644 --- a/test/files/neg/unchecked-refinement.check +++ b/test/files/neg/unchecked-refinement.check @@ -1,7 +1,7 @@ -unchecked-refinement.scala:19: warning: abstract type U in type pattern Foo[U,U,V] is unchecked since it is eliminated by erasure +unchecked-refinement.scala:19: warning: the type test for pattern Foo[U,U,V] cannot be checked at runtime because it has type parameters eliminated by erasure /* warn */ case _: Foo[U, U, V] if b => () ^ -unchecked-refinement.scala:21: warning: non-variable type argument Any in type pattern Foo[Any,U,V] is unchecked since it is eliminated by erasure +unchecked-refinement.scala:21: warning: the type test for pattern Foo[Any,U,V] cannot be checked at runtime because it has type parameters eliminated by erasure /* warn */ case _: Foo[Any, U, V] if b => () ^ unchecked-refinement.scala:25: warning: a pattern match on a refinement type is unchecked diff --git a/test/files/run/patmat-exprs.check b/test/files/run/patmat-exprs.check deleted file mode 100644 index b6df9385faa0..000000000000 --- a/test/files/run/patmat-exprs.check +++ /dev/null @@ -1 +0,0 @@ -((5 + 10) + 300) diff --git a/test/files/run/patmat-exprs.scala b/test/files/run/patmat-exprs.scala index bece2d04a1ac..464a6920ada4 100644 --- a/test/files/run/patmat-exprs.scala +++ b/test/files/run/patmat-exprs.scala @@ -1,4 +1,4 @@ -// scalac: -deprecation +// scalac: -Werror -Xlint // import scala.language.{ implicitConversions } @@ -31,7 +31,7 @@ object Test { } def main(args: Array[String]): Unit = { - println((5: Expr[Int]) + 10 + 15 * 20) + assert("((5 + 10) + 300)" == ((5: Expr[Int]) + 10 + 15 * 20).toString) } } @@ -156,7 +156,7 @@ trait Pattern { if (f.isDefinedAt(this)) (f(this) :: a) else a } - def leaves: List[Leaf[T]] = collect { case l: Leaf[T] => l } + def leaves: List[Leaf[T]] = collect { case l: Leaf[T @unchecked] => l } def + (other: Expr[T])(implicit n: NumericOps[T]) = Add(List(this, other)) def - (other: Expr[T])(implicit n: NumericOps[T]) = Sub(this, other) @@ -512,9 +512,7 @@ trait Pattern { override lazy val hashCode = ScalaRunTime._hashCode(this); } - - abstract class Compare[T](left: Expr[T], right: Expr[T], cmp: (T, T) => Boolean)(implicit num: NumericOps[T]) - extends Expr[Boolean] { + abstract class Compare[T: NumericOps](left: Expr[T], right: Expr[T], cmp: (T, T) => Boolean) extends Expr[Boolean] { def derivative(v: Var[Boolean]) = throw new IllegalStateException("Derivative of Boolean not allowed") def eval(f: Any => Any) = cmp(left.eval(f), right.eval(f)) val args = List(left, right) From 942b1f027e27fae0d2957937d839b61a2f392499 Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Tue, 29 Jun 2021 15:09:24 +0200 Subject: [PATCH 277/769] fix more uncheckable type args in the library --- src/library/scala/collection/Map.scala | 2 +- src/library/scala/collection/Seq.scala | 2 +- src/library/scala/collection/Set.scala | 2 +- src/library/scala/collection/SortedMap.scala | 2 +- src/library/scala/collection/SortedSet.scala | 2 +- .../scala/collection/immutable/HashMap.scala | 10 ++++----- .../scala/collection/immutable/HashSet.scala | 4 ++-- .../scala/collection/immutable/TreeMap.scala | 4 ++-- .../scala/collection/immutable/TreeSet.scala | 2 +- src/library/scala/math/Equiv.scala | 20 ++++++++--------- src/library/scala/math/Ordering.scala | 22 +++++++++---------- 11 files changed, 36 insertions(+), 36 deletions(-) diff --git a/src/library/scala/collection/Map.scala b/src/library/scala/collection/Map.scala index 59e1b5db0651..44ebf10025d0 100644 --- a/src/library/scala/collection/Map.scala +++ b/src/library/scala/collection/Map.scala @@ -62,7 +62,7 @@ trait Map[K, +V] */ override def equals(o: Any): Boolean = (this eq o.asInstanceOf[AnyRef]) || (o match { - case map: Map[K, _] if map.canEqual(this) => + case map: Map[K @unchecked, _] if map.canEqual(this) => (this.size == map.size) && { try this.forall(kv => map.getOrElse(kv._1, Map.DefaultSentinelFn()) == kv._2) catch { case _: ClassCastException => false } // PR #9565 / scala/bug#12228 diff --git a/src/library/scala/collection/Seq.scala b/src/library/scala/collection/Seq.scala index 19dc0b3377b9..c0a0da8577cb 100644 --- a/src/library/scala/collection/Seq.scala +++ b/src/library/scala/collection/Seq.scala @@ -34,7 +34,7 @@ trait Seq[+A] override def equals(o: Any): Boolean = (this eq o.asInstanceOf[AnyRef]) || (o match { - case seq: Seq[A] if seq.canEqual(this) => sameElements(seq) + case seq: Seq[A @unchecked] if seq.canEqual(this) => sameElements(seq) case _ => false }) diff --git a/src/library/scala/collection/Set.scala b/src/library/scala/collection/Set.scala index d35494cd1eb5..784e7e8a4fc5 100644 --- a/src/library/scala/collection/Set.scala +++ b/src/library/scala/collection/Set.scala @@ -61,7 +61,7 @@ trait Set[A] */ override def equals(that: Any): Boolean = (this eq that.asInstanceOf[AnyRef]) || (that match { - case set: Set[A] if set.canEqual(this) => + case set: Set[A @unchecked] if set.canEqual(this) => (this.size == set.size) && { try this.subsetOf(set) catch { case _: ClassCastException => false } // PR #9565 / scala/bug#12228 diff --git a/src/library/scala/collection/SortedMap.scala b/src/library/scala/collection/SortedMap.scala index 29ebc304678c..86cad03869ec 100644 --- a/src/library/scala/collection/SortedMap.scala +++ b/src/library/scala/collection/SortedMap.scala @@ -30,7 +30,7 @@ trait SortedMap[K, +V] override def equals(that: Any): Boolean = that match { case _ if this eq that.asInstanceOf[AnyRef] => true - case sm: SortedMap[K, _] if sm.ordering == this.ordering => + case sm: SortedMap[K @unchecked, _] if sm.ordering == this.ordering => (sm canEqual this) && (this.size == sm.size) && { val i1 = this.iterator diff --git a/src/library/scala/collection/SortedSet.scala b/src/library/scala/collection/SortedSet.scala index 6dc3ed6242e6..4bbe8576802a 100644 --- a/src/library/scala/collection/SortedSet.scala +++ b/src/library/scala/collection/SortedSet.scala @@ -29,7 +29,7 @@ trait SortedSet[A] extends Set[A] override def equals(that: Any): Boolean = that match { case _ if this eq that.asInstanceOf[AnyRef] => true - case ss: SortedSet[A] if ss.ordering == this.ordering => + case ss: SortedSet[A @unchecked] if ss.ordering == this.ordering => (ss canEqual this) && (this.size == ss.size) && { val i1 = this.iterator diff --git a/src/library/scala/collection/immutable/HashMap.scala b/src/library/scala/collection/immutable/HashMap.scala index 0b2cfa4246f0..7a9231231d32 100644 --- a/src/library/scala/collection/immutable/HashMap.scala +++ b/src/library/scala/collection/immutable/HashMap.scala @@ -1171,7 +1171,7 @@ private final class BitmapIndexedMapNode[K, +V]( } override def mergeInto[V1 >: V](that: MapNode[K, V1], builder: HashMapBuilder[K, V1], shift: Int)(mergef: ((K, V), (K, V1)) => (K, V1)): Unit = that match { - case bm: BitmapIndexedMapNode[K, V] => + case bm: BitmapIndexedMapNode[K, V] @unchecked => if (size == 0) { that.buildTo(builder) return @@ -1276,7 +1276,7 @@ private final class BitmapIndexedMapNode[K, +V]( override def equals(that: Any): Boolean = that match { - case node: BitmapIndexedMapNode[K, V] => + case node: BitmapIndexedMapNode[_, _] => (this eq node) || (this.cachedJavaKeySetHashCode == node.cachedJavaKeySetHashCode) && (this.nodeMap == node.nodeMap) && @@ -1307,7 +1307,7 @@ private final class BitmapIndexedMapNode[K, +V]( throw new UnsupportedOperationException("Trie nodes do not support hashing.") override def concat[V1 >: V](that: MapNode[K, V1], shift: Int): BitmapIndexedMapNode[K, V1] = that match { - case bm: BitmapIndexedMapNode[K, V] => + case bm: BitmapIndexedMapNode[K, V] @unchecked => if (size == 0) return bm else if (bm.size == 0 || (bm eq this)) return this else if (bm.size == 1) { @@ -1821,7 +1821,7 @@ private final class HashCollisionMapNode[K, +V ]( releaseFence() - private[immutable] def indexOf(key: K): Int = { + private[immutable] def indexOf(key: Any): Int = { val iter = content.iterator var i = 0 while (iter.hasNext) { @@ -1944,7 +1944,7 @@ private final class HashCollisionMapNode[K, +V ]( override def equals(that: Any): Boolean = that match { - case node: HashCollisionMapNode[K, V] => + case node: HashCollisionMapNode[_, _] => (this eq node) || (this.hash == node.hash) && (this.content.length == node.content.length) && { diff --git a/src/library/scala/collection/immutable/HashSet.scala b/src/library/scala/collection/immutable/HashSet.scala index 1c08da18023b..1785ceb2c0ea 100644 --- a/src/library/scala/collection/immutable/HashSet.scala +++ b/src/library/scala/collection/immutable/HashSet.scala @@ -1377,7 +1377,7 @@ private final class BitmapIndexedSetNode[A]( override def equals(that: Any): Boolean = that match { - case node: BitmapIndexedSetNode[A] => + case node: BitmapIndexedSetNode[_] => (this eq node) || (this.cachedJavaKeySetHashCode == node.cachedJavaKeySetHashCode) && (this.nodeMap == node.nodeMap) && @@ -1805,7 +1805,7 @@ private final class HashCollisionSetNode[A](val originalHash: Int, val hash: Int override def equals(that: Any): Boolean = that match { - case node: HashCollisionSetNode[A] => + case node: HashCollisionSetNode[_] => (this eq node) || (this.hash == node.hash) && (this.content.size == node.content.size) && diff --git a/src/library/scala/collection/immutable/TreeMap.scala b/src/library/scala/collection/immutable/TreeMap.scala index 90441e867052..a0f0e8692f97 100644 --- a/src/library/scala/collection/immutable/TreeMap.scala +++ b/src/library/scala/collection/immutable/TreeMap.scala @@ -140,7 +140,7 @@ final class TreeMap[K, +V] private (private val tree: RB.Tree[K, V])(implicit va override def concat[V1 >: V](that: collection.IterableOnce[(K, V1)]): TreeMap[K, V1] = newMapOrSelf(that match { - case tm: TreeMap[K, V] if ordering == tm.ordering => + case tm: TreeMap[K, V] @unchecked if ordering == tm.ordering => RB.union(tree, tm.tree) case ls: LinearSeq[(K,V1)] => if (ls.isEmpty) tree //to avoid the creation of the adder @@ -283,7 +283,7 @@ final class TreeMap[K, +V] private (private val tree: RB.Tree[K, V])(implicit va } } override def equals(obj: Any): Boolean = obj match { - case that: TreeMap[K, _] if ordering == that.ordering => RB.entriesEqual(tree, that.tree) + case that: TreeMap[K @unchecked, _] if ordering == that.ordering => RB.entriesEqual(tree, that.tree) case _ => super.equals(obj) } diff --git a/src/library/scala/collection/immutable/TreeSet.scala b/src/library/scala/collection/immutable/TreeSet.scala index 9b40536d5ff9..e51479ae657b 100644 --- a/src/library/scala/collection/immutable/TreeSet.scala +++ b/src/library/scala/collection/immutable/TreeSet.scala @@ -221,7 +221,7 @@ final class TreeSet[A] private[immutable] (private[immutable] val tree: RB.Tree[ } override def equals(obj: Any): Boolean = obj match { - case that: TreeSet[A] if ordering == that.ordering => RB.keysEqual(tree, that.tree) + case that: TreeSet[A @unchecked] if ordering == that.ordering => RB.keysEqual(tree, that.tree) case _ => super.equals(obj) } diff --git a/src/library/scala/math/Equiv.scala b/src/library/scala/math/Equiv.scala index efb281ceed3d..f615963f1f04 100644 --- a/src/library/scala/math/Equiv.scala +++ b/src/library/scala/math/Equiv.scala @@ -87,7 +87,7 @@ object Equiv extends LowPriorityEquiv { override def equals(obj: scala.Any): Boolean = obj match { case that: AnyRef if this eq that => true - case that: IterableEquiv[CC, T] => this.eqv == that.eqv + case that: IterableEquiv[_, _] => this.eqv == that.eqv case _ => false } override def hashCode(): Int = eqv.hashCode() * iterableSeed @@ -256,7 +256,7 @@ object Equiv extends LowPriorityEquiv { override def equals(obj: scala.Any): Boolean = obj match { case that: AnyRef if this eq that => true - case that: OptionEquiv[T] => this.eqv == that.eqv + case that: OptionEquiv[_] => this.eqv == that.eqv case _ => false } override def hashCode(): Int = eqv.hashCode() * optionSeed @@ -273,7 +273,7 @@ object Equiv extends LowPriorityEquiv { override def equals(obj: scala.Any): Boolean = obj match { case that: AnyRef if this eq that => true - case that: Tuple2Equiv[T1, T2] => + case that: Tuple2Equiv[_, _] => this.eqv1 == that.eqv1 && this.eqv2 == that.eqv2 case _ => false @@ -294,7 +294,7 @@ object Equiv extends LowPriorityEquiv { override def equals(obj: scala.Any): Boolean = obj match { case that: AnyRef if this eq that => true - case that: Tuple3Equiv[T1, T2, T3] => + case that: Tuple3Equiv[_, _, _] => this.eqv1 == that.eqv1 && this.eqv2 == that.eqv2 && this.eqv3 == that.eqv3 @@ -319,7 +319,7 @@ object Equiv extends LowPriorityEquiv { override def equals(obj: scala.Any): Boolean = obj match { case that: AnyRef if this eq that => true - case that: Tuple4Equiv[T1, T2, T3, T4] => + case that: Tuple4Equiv[_, _, _, _] => this.eqv1 == that.eqv1 && this.eqv2 == that.eqv2 && this.eqv3 == that.eqv3 && @@ -347,7 +347,7 @@ object Equiv extends LowPriorityEquiv { override def equals(obj: scala.Any): Boolean = obj match { case that: AnyRef if this eq that => true - case that: Tuple5Equiv[T1, T2, T3, T4, T5] => + case that: Tuple5Equiv[_, _, _, _, _] => this.eqv1 == that.eqv1 && this.eqv2 == that.eqv2 && this.eqv3 == that.eqv3 && @@ -378,7 +378,7 @@ object Equiv extends LowPriorityEquiv { override def equals(obj: scala.Any): Boolean = obj match { case that: AnyRef if this eq that => true - case that: Tuple6Equiv[T1, T2, T3, T4, T5, T6] => + case that: Tuple6Equiv[_, _, _, _, _, _] => this.eqv1 == that.eqv1 && this.eqv2 == that.eqv2 && this.eqv3 == that.eqv3 && @@ -412,7 +412,7 @@ object Equiv extends LowPriorityEquiv { override def equals(obj: scala.Any): Boolean = obj match { case that: AnyRef if this eq that => true - case that: Tuple7Equiv[T1, T2, T3, T4, T5, T6, T7] => + case that: Tuple7Equiv[_, _, _, _, _, _, _] => this.eqv1 == that.eqv1 && this.eqv2 == that.eqv2 && this.eqv3 == that.eqv3 && @@ -449,7 +449,7 @@ object Equiv extends LowPriorityEquiv { override def equals(obj: scala.Any): Boolean = obj match { case that: AnyRef if this eq that => true - case that: Tuple8Equiv[T1, T2, T3, T4, T5, T6, T7, T8] => + case that: Tuple8Equiv[_, _, _, _, _, _, _, _] => this.eqv1 == that.eqv1 && this.eqv2 == that.eqv2 && this.eqv3 == that.eqv3 && @@ -489,7 +489,7 @@ object Equiv extends LowPriorityEquiv { override def equals(obj: scala.Any): Boolean = obj match { case that: AnyRef if this eq that => true - case that: Tuple9Equiv[T1, T2, T3, T4, T5, T6, T7, T8, T9] => + case that: Tuple9Equiv[_, _, _, _, _, _, _, _, _] => this.eqv1 == that.eqv1 && this.eqv2 == that.eqv2 && this.eqv3 == that.eqv3 && diff --git a/src/library/scala/math/Ordering.scala b/src/library/scala/math/Ordering.scala index c1adece01993..a7756b9f8638 100644 --- a/src/library/scala/math/Ordering.scala +++ b/src/library/scala/math/Ordering.scala @@ -258,7 +258,7 @@ object Ordering extends LowPriorityOrderingImplicits { override def equals(obj: scala.Any): Boolean = obj match { case that: AnyRef if this eq that => true - case that: Reverse[T] => this.outer == that.outer + case that: Reverse[_] => this.outer == that.outer case _ => false } override def hashCode(): Int = outer.hashCode() * reverseSeed @@ -279,7 +279,7 @@ object Ordering extends LowPriorityOrderingImplicits { override def equals(obj: scala.Any): Boolean = obj match { case that: AnyRef if this eq that => true - case that: IterableOrdering[CC, T] => this.ord == that.ord + case that: IterableOrdering[_, _] => this.ord == that.ord case _ => false } override def hashCode(): Int = ord.hashCode() * iterableSeed @@ -591,7 +591,7 @@ object Ordering extends LowPriorityOrderingImplicits { override def equals(obj: scala.Any): Boolean = obj match { case that: AnyRef if this eq that => true - case that: OptionOrdering[T] => this.optionOrdering == that.optionOrdering + case that: OptionOrdering[_] => this.optionOrdering == that.optionOrdering case _ => false } override def hashCode(): Int = optionOrdering.hashCode() * optionSeed @@ -622,7 +622,7 @@ object Ordering extends LowPriorityOrderingImplicits { override def equals(obj: scala.Any): Boolean = obj match { case that: AnyRef if this eq that => true - case that: Tuple2Ordering[T1, T2] => + case that: Tuple2Ordering[_, _] => this.ord1 == that.ord1 && this.ord2 == that.ord2 case _ => false @@ -646,7 +646,7 @@ object Ordering extends LowPriorityOrderingImplicits { override def equals(obj: scala.Any): Boolean = obj match { case that: AnyRef if this eq that => true - case that: Tuple3Ordering[T1, T2, T3] => + case that: Tuple3Ordering[_, _, _] => this.ord1 == that.ord1 && this.ord2 == that.ord2 && this.ord3 == that.ord3 @@ -675,7 +675,7 @@ object Ordering extends LowPriorityOrderingImplicits { override def equals(obj: scala.Any): Boolean = obj match { case that: AnyRef if this eq that => true - case that: Tuple4Ordering[T1, T2, T3, T4] => + case that: Tuple4Ordering[_, _, _, _] => this.ord1 == that.ord1 && this.ord2 == that.ord2 && this.ord3 == that.ord3 && @@ -708,7 +708,7 @@ object Ordering extends LowPriorityOrderingImplicits { override def equals(obj: scala.Any): Boolean = obj match { case that: AnyRef if this eq that => true - case that: Tuple5Ordering[T1, T2, T3, T4, T5] => + case that: Tuple5Ordering[_, _, _, _, _] => this.ord1 == that.ord1 && this.ord2 == that.ord2 && this.ord3 == that.ord3 && @@ -745,7 +745,7 @@ object Ordering extends LowPriorityOrderingImplicits { override def equals(obj: scala.Any): Boolean = obj match { case that: AnyRef if this eq that => true - case that: Tuple6Ordering[T1, T2, T3, T4, T5, T6] => + case that: Tuple6Ordering[_, _, _, _, _, _] => this.ord1 == that.ord1 && this.ord2 == that.ord2 && this.ord3 == that.ord3 && @@ -786,7 +786,7 @@ object Ordering extends LowPriorityOrderingImplicits { override def equals(obj: scala.Any): Boolean = obj match { case that: AnyRef if this eq that => true - case that: Tuple7Ordering[T1, T2, T3, T4, T5, T6, T7] => + case that: Tuple7Ordering[_, _, _, _, _, _, _] => this.ord1 == that.ord1 && this.ord2 == that.ord2 && this.ord3 == that.ord3 && @@ -831,7 +831,7 @@ object Ordering extends LowPriorityOrderingImplicits { override def equals(obj: scala.Any): Boolean = obj match { case that: AnyRef if this eq that => true - case that: Tuple8Ordering[T1, T2, T3, T4, T5, T6, T7, T8] => + case that: Tuple8Ordering[_, _, _, _, _, _, _, _] => this.ord1 == that.ord1 && this.ord2 == that.ord2 && this.ord3 == that.ord3 && @@ -880,7 +880,7 @@ object Ordering extends LowPriorityOrderingImplicits { override def equals(obj: scala.Any): Boolean = obj match { case that: AnyRef if this eq that => true - case that: Tuple9Ordering[T1, T2, T3, T4, T5, T6, T7, T8, T9] => + case that: Tuple9Ordering[_, _, _, _, _, _, _, _, _] => this.ord1 == that.ord1 && this.ord2 == that.ord2 && this.ord3 == that.ord3 && From 7726038fce151b849943d6e5a3e31926389632d4 Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Thu, 24 Jun 2021 19:39:32 +0200 Subject: [PATCH 278/769] More correcter type patterns in synthetic code Synthetic `case _: Foo[_]` patterns were generated with a weird `TypeRef(pre, fooSymbol, args)` where the `args` are Foo's type parameters. Now we generate something closer to what the type checker would do. --- .../tools/nsc/typechecker/Checkable.scala | 8 +-- .../scala/tools/nsc/typechecker/Infer.scala | 4 +- .../tools/nsc/typechecker/PatternTypers.scala | 17 ++++-- .../nsc/typechecker/SyntheticMethods.scala | 8 +-- test/files/neg/t12408.check | 17 +++++- test/files/run/patmat-behavior.check | 60 +++++++++---------- test/files/run/patmat-exprs.scala | 2 +- test/files/run/t12405.check | 2 +- test/macro-annot/run/kase/macro_kase_1.scala | 3 +- 9 files changed, 69 insertions(+), 52 deletions(-) diff --git a/src/compiler/scala/tools/nsc/typechecker/Checkable.scala b/src/compiler/scala/tools/nsc/typechecker/Checkable.scala index fb7e53f06ed1..559a8bd481f2 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Checkable.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Checkable.scala @@ -174,11 +174,9 @@ trait Checkable { // collect type args which are candidates for warning because uncheckable private def typeArgsInTopLevelType(tp: Type): Set[Type] = { def isUnwarnableTypeArg(arg: Type) = { - def isUnwarnableTypeArgSymbol(sym: Symbol) = { - sym.isTypeParameter || // dummy - sym.name.toTermName == nme.WILDCARD || // _ - nme.isVariableName(sym.name) // type variable - } + def isUnwarnableTypeArgSymbol(sym: Symbol) = + sym.name.toTermName == nme.WILDCARD || // don't warn for `case l: List[_]`. Here, `List[_]` is a TypeRef, the arg refers an abstract type symbol `_` + nme.isVariableName(sym.name) // don't warn for `x.isInstanceOf[List[_]]`. Here, `List[_]` is an existential, quantified sym has `isVariableName` uncheckedOk(arg) || // @unchecked T isUnwarnableTypeArgSymbol(arg.typeSymbolDirect) // has to be direct: see pos/t1439 } diff --git a/src/compiler/scala/tools/nsc/typechecker/Infer.scala b/src/compiler/scala/tools/nsc/typechecker/Infer.scala index 4fce2215fe15..7ece43627871 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Infer.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Infer.scala @@ -1256,7 +1256,7 @@ trait Infer extends Checkable { } } - def inferTypedPattern(tree0: Tree, pattp: Type, pt0: Type, canRemedy: Boolean): Type = { + def inferTypedPattern(tree0: Tree, pattp: Type, pt0: Type, canRemedy: Boolean, isUnapply: Boolean): Type = { val pt = abstractTypesToBounds(pt0) val ptparams = freeTypeParamsOfTerms(pt) val tpparams = freeTypeParamsOfTerms(pattp) @@ -1273,7 +1273,7 @@ trait Infer extends Checkable { return ErrorType } - checkCheckable(tree0, pattp, pt, inPattern = true, canRemedy = canRemedy) + checkCheckable(tree0, if (isUnapply) typer.applyTypeToWildcards(pattp) else pattp, pt, inPattern = true, canRemedy = canRemedy) if (pattp <:< pt) () else { debuglog("free type params (1) = " + tpparams) diff --git a/src/compiler/scala/tools/nsc/typechecker/PatternTypers.scala b/src/compiler/scala/tools/nsc/typechecker/PatternTypers.scala index be8279b9bc9c..1c42cab12493 100644 --- a/src/compiler/scala/tools/nsc/typechecker/PatternTypers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/PatternTypers.scala @@ -72,6 +72,14 @@ trait PatternTypers { case tp => tp } + def applyTypeToWildcards(tp: Type) = tp match { + case tr @ TypeRef(pre, sym, args) if args.nonEmpty => + // similar to `typedBind` + def wld = context.owner.newAbstractType(tpnme.WILDCARD, sym.pos) setInfo TypeBounds.empty + copyTypeRef(tr, pre, sym, args.map(_ => wld.tpe)) + case t => t + } + def typedConstructorPattern(fun0: Tree, pt: Type): Tree = { // Do some ad-hoc overloading resolution and update the tree's symbol and type // do not update the symbol if the tree's symbol's type does not define an unapply member @@ -183,7 +191,7 @@ trait PatternTypers { case _ => extractor.nonEmpty } - val ownType = inferTypedPattern(tptTyped, tpe, pt, canRemedy) + val ownType = inferTypedPattern(tptTyped, tpe, pt, canRemedy, isUnapply = false) val treeTyped = treeCopy.Typed(tree, exprTyped, tptTyped) setType ownType extractor match { @@ -319,7 +327,7 @@ trait PatternTypers { val GenPolyType(freeVars, unappFormal) = freshArgType(unapplyType.skolemizeExistential(context.owner, tree)) val unapplyContext = context.makeNewScope(tree, context.owner) freeVars.foreach(unapplyContext.scope.enter) - val pattp = newTyper(unapplyContext).infer.inferTypedPattern(tree, unappFormal, pt, canRemedy = canRemedy) + val pattp = newTyper(unapplyContext).infer.inferTypedPattern(tree, unappFormal, pt, canRemedy = canRemedy, isUnapply = true) // turn any unresolved type variables in freevars into existential skolems val skolems = freeVars.map(fv => unapplyContext.owner.newExistentialSkolem(fv, fv)) pattp.substSym(freeVars, skolems) @@ -389,10 +397,7 @@ trait PatternTypers { } // only look at top-level type, can't (reliably) do anything about unchecked type args (in general) // but at least make a proper type before passing it elsewhere - val pt1 = pt.dealiasWiden match { - case tr @ TypeRef(pre, sym, args) if args.nonEmpty => copyTypeRef(tr, pre, sym, sym.typeParams map (_.tpeHK)) // replace actual type args with dummies - case pt1 => pt1 - } + val pt1 = applyTypeToWildcards(pt.dealiasWiden) if (isCheckable(pt1)) EmptyTree else resolveClassTag(pos, pt1) match { case tree if unapplyMember(tree.tpe).exists => tree diff --git a/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala b/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala index 4097d6c3510b..f0e89af2ff4e 100644 --- a/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala +++ b/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala @@ -77,10 +77,8 @@ trait SyntheticMethods extends ast.TreeDSL { if (!syntheticsOk) return templ - val synthesizer = new ClassMethodSynthesis( - clazz0, - newTyper( if (reporter.hasErrors) context makeSilent false else context ) - ) + val typer = newTyper(if (reporter.hasErrors) context.makeSilent(false) else context) + val synthesizer = new ClassMethodSynthesis(clazz0, typer) import synthesizer._ if (clazz0 == AnyValClass || isPrimitiveValueClass(clazz0)) return { @@ -154,7 +152,7 @@ trait SyntheticMethods extends ast.TreeDSL { Match( Ident(eqmeth.firstParam), List( - CaseDef(Typed(Ident(nme.WILDCARD), TypeTree(clazz.tpe)), EmptyTree, TRUE), + CaseDef(Typed(Ident(nme.WILDCARD), TypeTree(typer.applyTypeToWildcards(clazz.tpe))), EmptyTree, TRUE), CaseDef(Ident(nme.WILDCARD), EmptyTree, FALSE) ) ) diff --git a/test/files/neg/t12408.check b/test/files/neg/t12408.check index 32641513cca1..33be21bb4ecd 100644 --- a/test/files/neg/t12408.check +++ b/test/files/neg/t12408.check @@ -7,9 +7,24 @@ t12408.scala:7: warning: abstract type B in type pattern t12408.Renderer[B] is u t12408.scala:8: warning: non-variable type argument Int in type pattern List[Int] (the underlying of List[Int]) is unchecked since it is eliminated by erasure def f3[B](xs: List[A]) = xs match { case _: List[Int] => } // warn ^ +t12408.scala:9: warning: abstract type A in type pattern t12408.Renderer[A] is unchecked since it is eliminated by erasure + def g = a match { case _: Renderer[A] => } // now also warn + ^ +t12408.scala:14: warning: the type test for pattern (A, B, C, D, E, F, G, H, I, J, K, L, M) cannot be checked at runtime because it has type parameters eliminated by erasure + case _: (A,B,C,D,E,F,G,H,I,J,K,L,M) => + ^ t12408.scala:17: warning: the type test for pattern (A, B, C, D, E, F, G, H, I, J, K, L, M) cannot be checked at runtime because it has type parameters eliminated by erasure case _: (A,B,C,D,E,F,G,H,I,J,K,L,M) => ^ +t12408.scala:22: warning: the type test for pattern (A, B, C, D, E, F, G, H, I, J, K, L, M) cannot be checked at runtime because it has type parameters eliminated by erasure + case _: (A,B,C,D,E,F,G,H,I,J,K,L,M) => + ^ +t12408.scala:47: warning: the type test for pattern t12408c.C[A,B] cannot be checked at runtime because it has type parameters eliminated by erasure + def test(t: T[A]) = t match { case _: C[A, B] => } // warn on B + ^ +t12408.scala:65: warning: the type test for pattern reported.Renderer[Page,Props] cannot be checked at runtime because it has type parameters eliminated by erasure + case r: Renderer[Page, Props] => 1 // warn as above + ^ error: No warnings can be incurred under -Werror. -4 warnings +9 warnings 1 error diff --git a/test/files/run/patmat-behavior.check b/test/files/run/patmat-behavior.check index e36e3add5503..ec81fbb143e2 100644 --- a/test/files/run/patmat-behavior.check +++ b/test/files/run/patmat-behavior.check @@ -1,91 +1,91 @@ -patmat-behavior.scala:82: warning: fruitless type test: a value of type s.C00[A] cannot also be a s.C10[A] +patmat-behavior.scala:82: warning: fruitless type test: a value of type s.C00[A] cannot also be a s.C10[_] def gd1[A](x: C00[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x } ^ -patmat-behavior.scala:82: warning: fruitless type test: a value of type s.C00[A] cannot also be a s.C20[A] +patmat-behavior.scala:82: warning: fruitless type test: a value of type s.C00[A] cannot also be a s.C20[_] def gd1[A](x: C00[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x } ^ -patmat-behavior.scala:82: warning: fruitless type test: a value of type s.C00[A] cannot also be a s.C01[A] +patmat-behavior.scala:82: warning: fruitless type test: a value of type s.C00[A] cannot also be a s.C01[_] def gd1[A](x: C00[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x } ^ -patmat-behavior.scala:82: warning: fruitless type test: a value of type s.C00[A] cannot also be a s.C11[A] +patmat-behavior.scala:82: warning: fruitless type test: a value of type s.C00[A] cannot also be a s.C11[_] def gd1[A](x: C00[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x } ^ -patmat-behavior.scala:82: warning: fruitless type test: a value of type s.C00[A] cannot also be a s.C21[A] +patmat-behavior.scala:82: warning: fruitless type test: a value of type s.C00[A] cannot also be a s.C21[_] def gd1[A](x: C00[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x } ^ -patmat-behavior.scala:83: warning: fruitless type test: a value of type s.C10[A] cannot also be a s.C00[A] +patmat-behavior.scala:83: warning: fruitless type test: a value of type s.C10[A] cannot also be a s.C00[_] def gd2[A](x: C10[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x } ^ -patmat-behavior.scala:83: warning: fruitless type test: a value of type s.C10[A] cannot also be a s.C20[A] +patmat-behavior.scala:83: warning: fruitless type test: a value of type s.C10[A] cannot also be a s.C20[_] def gd2[A](x: C10[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x } ^ -patmat-behavior.scala:83: warning: fruitless type test: a value of type s.C10[A] cannot also be a s.C01[A] +patmat-behavior.scala:83: warning: fruitless type test: a value of type s.C10[A] cannot also be a s.C01[_] def gd2[A](x: C10[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x } ^ -patmat-behavior.scala:83: warning: fruitless type test: a value of type s.C10[A] cannot also be a s.C11[A] +patmat-behavior.scala:83: warning: fruitless type test: a value of type s.C10[A] cannot also be a s.C11[_] def gd2[A](x: C10[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x } ^ -patmat-behavior.scala:83: warning: fruitless type test: a value of type s.C10[A] cannot also be a s.C21[A] +patmat-behavior.scala:83: warning: fruitless type test: a value of type s.C10[A] cannot also be a s.C21[_] def gd2[A](x: C10[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x } ^ -patmat-behavior.scala:84: warning: fruitless type test: a value of type s.C20[A] cannot also be a s.C00[A] +patmat-behavior.scala:84: warning: fruitless type test: a value of type s.C20[A] cannot also be a s.C00[_] def gd3[A](x: C20[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x } ^ -patmat-behavior.scala:84: warning: fruitless type test: a value of type s.C20[A] cannot also be a s.C10[A] +patmat-behavior.scala:84: warning: fruitless type test: a value of type s.C20[A] cannot also be a s.C10[_] def gd3[A](x: C20[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x } ^ -patmat-behavior.scala:84: warning: fruitless type test: a value of type s.C20[A] cannot also be a s.C01[A] +patmat-behavior.scala:84: warning: fruitless type test: a value of type s.C20[A] cannot also be a s.C01[_] def gd3[A](x: C20[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x } ^ -patmat-behavior.scala:84: warning: fruitless type test: a value of type s.C20[A] cannot also be a s.C11[A] +patmat-behavior.scala:84: warning: fruitless type test: a value of type s.C20[A] cannot also be a s.C11[_] def gd3[A](x: C20[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x } ^ -patmat-behavior.scala:84: warning: fruitless type test: a value of type s.C20[A] cannot also be a s.C21[A] +patmat-behavior.scala:84: warning: fruitless type test: a value of type s.C20[A] cannot also be a s.C21[_] def gd3[A](x: C20[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x } ^ -patmat-behavior.scala:85: warning: fruitless type test: a value of type s.C01[A] cannot also be a s.C00[A] +patmat-behavior.scala:85: warning: fruitless type test: a value of type s.C01[A] cannot also be a s.C00[_] def gd4[A](x: C01[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x } ^ -patmat-behavior.scala:85: warning: fruitless type test: a value of type s.C01[A] cannot also be a s.C10[A] +patmat-behavior.scala:85: warning: fruitless type test: a value of type s.C01[A] cannot also be a s.C10[_] def gd4[A](x: C01[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x } ^ -patmat-behavior.scala:85: warning: fruitless type test: a value of type s.C01[A] cannot also be a s.C20[A] +patmat-behavior.scala:85: warning: fruitless type test: a value of type s.C01[A] cannot also be a s.C20[_] def gd4[A](x: C01[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x } ^ -patmat-behavior.scala:85: warning: fruitless type test: a value of type s.C01[A] cannot also be a s.C11[A] +patmat-behavior.scala:85: warning: fruitless type test: a value of type s.C01[A] cannot also be a s.C11[_] def gd4[A](x: C01[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x } ^ -patmat-behavior.scala:85: warning: fruitless type test: a value of type s.C01[A] cannot also be a s.C21[A] +patmat-behavior.scala:85: warning: fruitless type test: a value of type s.C01[A] cannot also be a s.C21[_] def gd4[A](x: C01[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x } ^ -patmat-behavior.scala:86: warning: fruitless type test: a value of type s.C11[A] cannot also be a s.C00[A] +patmat-behavior.scala:86: warning: fruitless type test: a value of type s.C11[A] cannot also be a s.C00[_] def gd5[A](x: C11[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x } ^ -patmat-behavior.scala:86: warning: fruitless type test: a value of type s.C11[A] cannot also be a s.C10[A] +patmat-behavior.scala:86: warning: fruitless type test: a value of type s.C11[A] cannot also be a s.C10[_] def gd5[A](x: C11[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x } ^ -patmat-behavior.scala:86: warning: fruitless type test: a value of type s.C11[A] cannot also be a s.C20[A] +patmat-behavior.scala:86: warning: fruitless type test: a value of type s.C11[A] cannot also be a s.C20[_] def gd5[A](x: C11[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x } ^ -patmat-behavior.scala:86: warning: fruitless type test: a value of type s.C11[A] cannot also be a s.C01[A] +patmat-behavior.scala:86: warning: fruitless type test: a value of type s.C11[A] cannot also be a s.C01[_] def gd5[A](x: C11[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x } ^ -patmat-behavior.scala:86: warning: fruitless type test: a value of type s.C11[A] cannot also be a s.C21[A] +patmat-behavior.scala:86: warning: fruitless type test: a value of type s.C11[A] cannot also be a s.C21[_] def gd5[A](x: C11[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x } ^ -patmat-behavior.scala:87: warning: fruitless type test: a value of type s.C21[A] cannot also be a s.C00[A] +patmat-behavior.scala:87: warning: fruitless type test: a value of type s.C21[A] cannot also be a s.C00[_] def gd6[A](x: C21[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x } ^ -patmat-behavior.scala:87: warning: fruitless type test: a value of type s.C21[A] cannot also be a s.C10[A] +patmat-behavior.scala:87: warning: fruitless type test: a value of type s.C21[A] cannot also be a s.C10[_] def gd6[A](x: C21[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x } ^ -patmat-behavior.scala:87: warning: fruitless type test: a value of type s.C21[A] cannot also be a s.C20[A] +patmat-behavior.scala:87: warning: fruitless type test: a value of type s.C21[A] cannot also be a s.C20[_] def gd6[A](x: C21[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x } ^ -patmat-behavior.scala:87: warning: fruitless type test: a value of type s.C21[A] cannot also be a s.C01[A] +patmat-behavior.scala:87: warning: fruitless type test: a value of type s.C21[A] cannot also be a s.C01[_] def gd6[A](x: C21[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x } ^ -patmat-behavior.scala:87: warning: fruitless type test: a value of type s.C21[A] cannot also be a s.C11[A] +patmat-behavior.scala:87: warning: fruitless type test: a value of type s.C21[A] cannot also be a s.C11[_] def gd6[A](x: C21[A]) = x match { case G00() => ??? ; case G10(x) => x ; case G20(x, y) => x ; case G01(xs @ _*) => xs.head ; case G11(x, ys @ _*) => x ; case G21(x, y, zs @ _*) => x } ^ patmat-behavior.scala:43: warning: match may not be exhaustive. diff --git a/test/files/run/patmat-exprs.scala b/test/files/run/patmat-exprs.scala index 464a6920ada4..0f7b71803a3f 100644 --- a/test/files/run/patmat-exprs.scala +++ b/test/files/run/patmat-exprs.scala @@ -301,7 +301,7 @@ trait Pattern { private def optimizeWith(f: Expr[T] => Expr[T]): Expr[T] = { f(mapArgs(EndoFunction[Expr[_]]( - a => a match { case x: Expr[T] => x.optimizeWith(f) } + a => a match { case x: Expr[T @unchecked] => x.optimizeWith(f) } ))) } diff --git a/test/files/run/t12405.check b/test/files/run/t12405.check index a7a8f9bd39f6..439f2ccf16b9 100644 --- a/test/files/run/t12405.check +++ b/test/files/run/t12405.check @@ -24,7 +24,7 @@ package { final def equals$extension[A]($this: C[A])(x$1: Any): Boolean = { case val x1: Any = x$1; case5(){ - if (x1.isInstanceOf[C[A]]) + if (x1.isInstanceOf[C[$this._]]) matchEnd4(true) else case6() diff --git a/test/macro-annot/run/kase/macro_kase_1.scala b/test/macro-annot/run/kase/macro_kase_1.scala index abd75e2304c6..a1a9b420443d 100644 --- a/test/macro-annot/run/kase/macro_kase_1.scala +++ b/test/macro-annot/run/kase/macro_kase_1.scala @@ -74,6 +74,7 @@ object kaseMacro { val primaryParams = primaryParamss.head val secondaryParamss = primaryParamss.tail val ourPolyType = if (tparams.nonEmpty) AppliedTypeTree(Ident(name), tparams.map(tparam => Ident(tparam.name))) else Ident(name) + val ourWildType = if (tparams.nonEmpty) AppliedTypeTree(Ident(name), tparams.map(_ => Bind(typeNames.WILDCARD, EmptyTree))) else Ident(name) val tparamUnderscores = tparams.zipWithIndex.map{ case (tdef, i) => TypeDef(makeDeferredSynthetic(unmakeParam(tdef.mods)), TypeName("x$" + (i+1)), tdef.tparams, tdef.rhs) } val ourExistentialType = ExistentialTypeTree(AppliedTypeTree(Ident(name), tparamUnderscores.map(tdef => Ident(tdef.name))), tparamUnderscores) @@ -154,7 +155,7 @@ object kaseMacro { Apply(Select(thatC, TermName("canEqual")), List(This(name))) } def sameTypeCheck = { - val ifSameType = CaseDef(Typed(Ident(termNames.WILDCARD), ourPolyType), EmptyTree, Literal(Constant(true))) + val ifSameType = CaseDef(Typed(Ident(termNames.WILDCARD), ourWildType), EmptyTree, Literal(Constant(true))) val otherwise = CaseDef(Ident(termNames.WILDCARD), EmptyTree, Literal(Constant(false))) Match(Ident(equalsParam.name), List(ifSameType, otherwise)) } From 35defb9989f266d3afd3418af32896ff9bbfc0ba Mon Sep 17 00:00:00 2001 From: Martijn Hoekstra Date: Fri, 2 Jul 2021 13:37:42 +0200 Subject: [PATCH 279/769] union/intersection --- src/library-aux/scala/Any.scala | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/library-aux/scala/Any.scala b/src/library-aux/scala/Any.scala index 39f3f635572a..0f769be8e99a 100644 --- a/src/library-aux/scala/Any.scala +++ b/src/library-aux/scala/Any.scala @@ -121,7 +121,8 @@ abstract class Any { * For example, `List(1, 2, 3).isInstanceOf[List[String]]` will return true. * - `T0` is some singleton type `x.type` or literal `x`: this method returns `this.eq(x)`. * For example, `x.isInstanceOf[1]` is equivalent to `x.eq(1)` - * - `T0` is a union `X with Y`: this method is equivalent to `x.isInstanceOf[X] && x.isInstanceOf[Y]` + * - `T0` is an intersection `X with Y` or `X & Y: this method is equivalent to `x.isInstanceOf[X] && x.isInstanceOf[Y]` + * - `T0` is a union `X | Y`: this method is equivalent to `x.isInstanceOf[X] || x.isInstanceOf[Y]` * - `T0` is a type parameter or an abstract type member: this method is equivalent * to `isInstanceOf[U]` where `U` is `A`'s upper bound, `Any` if `A` is unbounded. * For example, `x.isInstanceOf[A]` where `A` is an unbounded type parameter From e9446cc1938d905cc9607ba30164f5b811198ccd Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Fri, 2 Jul 2021 12:09:10 -0700 Subject: [PATCH 280/769] Tweaks for readability, provisional test --- .../tools/nsc/typechecker/Checkable.scala | 15 +++--- .../tools/nsc/typechecker/PatternTypers.scala | 2 +- test/files/neg/patmat-exprs-b.check | 21 ++++++++ test/files/neg/patmat-exprs-b.scala | 53 +++++++++++++++++++ test/files/neg/unchecked-refinement.check | 2 +- test/files/neg/unchecked-refinement.scala | 2 +- test/files/pos/patmat-exprs-b.scala | 51 ++++++++++++++++++ 7 files changed, 135 insertions(+), 11 deletions(-) create mode 100644 test/files/neg/patmat-exprs-b.check create mode 100644 test/files/neg/patmat-exprs-b.scala create mode 100644 test/files/pos/patmat-exprs-b.scala diff --git a/src/compiler/scala/tools/nsc/typechecker/Checkable.scala b/src/compiler/scala/tools/nsc/typechecker/Checkable.scala index 559a8bd481f2..481531a5951d 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Checkable.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Checkable.scala @@ -173,13 +173,12 @@ trait Checkable { } // collect type args which are candidates for warning because uncheckable private def typeArgsInTopLevelType(tp: Type): Set[Type] = { - def isUnwarnableTypeArg(arg: Type) = { - def isUnwarnableTypeArgSymbol(sym: Symbol) = + def isUnwarnableTypeArg(arg: Type) = + uncheckedOk(arg) || { // @unchecked T + val sym = arg.typeSymbolDirect // has to be direct: see pos/t1439 sym.name.toTermName == nme.WILDCARD || // don't warn for `case l: List[_]`. Here, `List[_]` is a TypeRef, the arg refers an abstract type symbol `_` nme.isVariableName(sym.name) // don't warn for `x.isInstanceOf[List[_]]`. Here, `List[_]` is an existential, quantified sym has `isVariableName` - uncheckedOk(arg) || // @unchecked T - isUnwarnableTypeArgSymbol(arg.typeSymbolDirect) // has to be direct: see pos/t1439 - } + } var res: Set[Type] = Set.empty[Type] def add(t: Type): Unit = if (!isUnwarnableTypeArg(t)) res += t def loop(tp: Type): Unit = tp match { @@ -364,13 +363,13 @@ trait Checkable { } else if (checker.result == RuntimeCheckable) { // register deferred checking for sealed types in current run - @`inline` def Xsym = X.typeSymbol - @`inline` def Psym = P.typeSymbol - @`inline` def isSealedOrFinal(sym: Symbol) = sym.isSealed || sym.isFinal def recheckFruitless(): Unit = { val rechecker = new CheckabilityChecker(X, P, isRecheck = true) if (rechecker.neverMatches) neverMatchesWarning(rechecker) } + def isSealedOrFinal(sym: Symbol) = sym.isSealed || sym.isFinal + val Xsym = X.typeSymbol + val Psym = P.typeSymbol if (isSealedOrFinal(Xsym) && isSealedOrFinal(Psym) && (currentRun.compiles(Xsym) || currentRun.compiles(Psym))) context.unit.toCheck += (() => recheckFruitless()) } diff --git a/src/compiler/scala/tools/nsc/typechecker/PatternTypers.scala b/src/compiler/scala/tools/nsc/typechecker/PatternTypers.scala index 1c42cab12493..176867663f40 100644 --- a/src/compiler/scala/tools/nsc/typechecker/PatternTypers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/PatternTypers.scala @@ -191,7 +191,7 @@ trait PatternTypers { case _ => extractor.nonEmpty } - val ownType = inferTypedPattern(tptTyped, tpe, pt, canRemedy, isUnapply = false) + val ownType = inferTypedPattern(tptTyped, tpe, pt, canRemedy = canRemedy, isUnapply = false) val treeTyped = treeCopy.Typed(tree, exprTyped, tptTyped) setType ownType extractor match { diff --git a/test/files/neg/patmat-exprs-b.check b/test/files/neg/patmat-exprs-b.check new file mode 100644 index 000000000000..c1a39e7f5565 --- /dev/null +++ b/test/files/neg/patmat-exprs-b.check @@ -0,0 +1,21 @@ +patmat-exprs-b.scala:42: warning: parameter value num in class Add is never used + case class Add[T](args: Iterable[Expr[T]])(implicit @nowarn num: NumericOps[T]) extends ManyArg[T] { + ^ +patmat-exprs-b.scala:46: warning: parameter value num in class Add2 is never used + case class Add2[T](left: Expr[T], right: Expr[T])(implicit @nowarn num: NumericOps[T]) extends TwoArg[T] { + ^ +patmat-exprs-b.scala:49: warning: parameter value num in class Add3 is never used + case class Add3[T](a1: Expr[T], a2: Expr[T], a3: Expr[T])(implicit @nowarn num: NumericOps[T]) extends ManyArg[T] { + ^ +patmat-exprs-b.scala:42: warning: @nowarn annotation does not suppress any warnings + case class Add[T](args: Iterable[Expr[T]])(implicit @nowarn num: NumericOps[T]) extends ManyArg[T] { + ^ +patmat-exprs-b.scala:46: warning: @nowarn annotation does not suppress any warnings + case class Add2[T](left: Expr[T], right: Expr[T])(implicit @nowarn num: NumericOps[T]) extends TwoArg[T] { + ^ +patmat-exprs-b.scala:49: warning: @nowarn annotation does not suppress any warnings + case class Add3[T](a1: Expr[T], a2: Expr[T], a3: Expr[T])(implicit @nowarn num: NumericOps[T]) extends ManyArg[T] { + ^ +error: No warnings can be incurred under -Werror. +6 warnings +1 error diff --git a/test/files/neg/patmat-exprs-b.scala b/test/files/neg/patmat-exprs-b.scala new file mode 100644 index 000000000000..17b2ed63473b --- /dev/null +++ b/test/files/neg/patmat-exprs-b.scala @@ -0,0 +1,53 @@ +// scalac: -Werror -Xlint +// + +import annotation.nowarn + +trait Pattern { + + trait NumericOps[T] extends Serializable { + + def zero: T + + def add(a: T, b: T): T + def add(a: T, b: T, c: T): T = add(a, add(b, c)) + + def sum(terms: Iterable[T]) = terms.foldLeft(zero)(add) + def sum(terms: Iterator[T]) = terms.foldLeft(zero)(add) + } + + trait Expr[T] { + + /** Returns arguments of this operator */ + def args: Iterable[Expr[_]] + + def + (other: Expr[T])(implicit n: NumericOps[T]) = Add(List(this, other)) + + def specialize(implicit num: NumericOps[T]): Expr[T] = + this match { + case Add(Seq(a, b)) => Add2(a, b) + case Add(Seq(a, b, c)) => Add3(a, b, c) + case x => x + } + } + + trait TwoArg[T] extends Expr[T] { + val left: Expr[T] + val right: Expr[T] + val args = List(left, right) + } + + trait ManyArg[T] extends Expr[T] + + case class Add[T](args: Iterable[Expr[T]])(implicit @nowarn num: NumericOps[T]) extends ManyArg[T] { + override def toString = "(" + args.mkString(" + ") + ")" + } + + case class Add2[T](left: Expr[T], right: Expr[T])(implicit @nowarn num: NumericOps[T]) extends TwoArg[T] { + override def toString = "(" + left + " + " + right + ")" + } + case class Add3[T](a1: Expr[T], a2: Expr[T], a3: Expr[T])(implicit @nowarn num: NumericOps[T]) extends ManyArg[T] { + val args = List(a1, a2, a3) + override def toString = "(" + a1 + " + " + a2 + " + " + a3 + ")" + } +} diff --git a/test/files/neg/unchecked-refinement.check b/test/files/neg/unchecked-refinement.check index 8a65d1a3b08a..8ef312f91ca5 100644 --- a/test/files/neg/unchecked-refinement.check +++ b/test/files/neg/unchecked-refinement.check @@ -5,7 +5,7 @@ unchecked-refinement.scala:21: warning: the type test for pattern Foo[Any,U,V] c /* warn */ case _: Foo[Any, U, V] if b => () ^ unchecked-refinement.scala:25: warning: a pattern match on a refinement type is unchecked - /* nowarn - todo */ case x: AnyRef { def bippy: Int } if b => x.bippy // this could/should do an instance check and not warn + /* nowarn - todo */ case x: AnyRef { def bippy: Int } if b => x.bippy // this could/should do an instance check and not warn // dotty warns under reflectiveSelectable ^ unchecked-refinement.scala:26: warning: a pattern match on a refinement type is unchecked /* nowarn - todo */ case x: AnyRef { def size: Int } if b => x.size // this could/should do a static conformance test and not warn diff --git a/test/files/neg/unchecked-refinement.scala b/test/files/neg/unchecked-refinement.scala index 5902a442ae13..2d3b27eda23a 100644 --- a/test/files/neg/unchecked-refinement.scala +++ b/test/files/neg/unchecked-refinement.scala @@ -22,7 +22,7 @@ class A { } def f4(xs: List[Int]) = xs match { - /* nowarn - todo */ case x: AnyRef { def bippy: Int } if b => x.bippy // this could/should do an instance check and not warn + /* nowarn - todo */ case x: AnyRef { def bippy: Int } if b => x.bippy // this could/should do an instance check and not warn // dotty warns under reflectiveSelectable /* nowarn - todo */ case x: AnyRef { def size: Int } if b => x.size // this could/should do a static conformance test and not warn /* nowarn */ case x: ((AnyRef { def size: Int }) @unchecked) if b => x.size } diff --git a/test/files/pos/patmat-exprs-b.scala b/test/files/pos/patmat-exprs-b.scala new file mode 100644 index 000000000000..426419a0c8ee --- /dev/null +++ b/test/files/pos/patmat-exprs-b.scala @@ -0,0 +1,51 @@ + +import annotation.nowarn + +trait Pattern { + + trait NumericOps[T] extends Serializable { + + def zero: T + + def add(a: T, b: T): T + def add(a: T, b: T, c: T): T = add(a, add(b, c)) + + def sum(terms: Iterable[T]) = terms.foldLeft(zero)(add) + def sum(terms: Iterator[T]) = terms.foldLeft(zero)(add) + } + + trait Expr[T] { + + /** Returns arguments of this operator */ + def args: Iterable[Expr[_]] + + def + (other: Expr[T])(implicit n: NumericOps[T]) = Add(List(this, other)) + + def specialize(implicit num: NumericOps[T]): Expr[T] = + this match { + case Add(Seq(a, b)) => Add2(a, b) + case Add(Seq(a, b, c)) => Add3(a, b, c) + case x => x + } + } + + trait TwoArg[T] extends Expr[T] { + val left: Expr[T] + val right: Expr[T] + val args = List(left, right) + } + + trait ManyArg[T] extends Expr[T] + + case class Add[T](args: Iterable[Expr[T]])(implicit @nowarn num: NumericOps[T]) extends ManyArg[T] { + override def toString = "(" + args.mkString(" + ") + ")" + } + + case class Add2[T](left: Expr[T], right: Expr[T])(implicit @nowarn num: NumericOps[T]) extends TwoArg[T] { + override def toString = "(" + left + " + " + right + ")" + } + case class Add3[T](a1: Expr[T], a2: Expr[T], a3: Expr[T])(implicit @nowarn num: NumericOps[T]) extends ManyArg[T] { + val args = List(a1, a2, a3) + override def toString = "(" + a1 + " + " + a2 + " + " + a3 + ")" + } +} From c524cdd8ccf5b5de072ca96b863cc15a7aaf43eb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E6=A2=A6=E5=A2=83=E8=BF=B7=E7=A6=BB?= Date: Sun, 4 Jul 2021 17:13:30 +0800 Subject: [PATCH 281/769] scala/bug#12395 add default implement for `redrawLine` --- .../scala/tools/nsc/interpreter/jline/Reader.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/repl-frontend/scala/tools/nsc/interpreter/jline/Reader.scala b/src/repl-frontend/scala/tools/nsc/interpreter/jline/Reader.scala index 7302966ac16d..2f1c5fedf958 100644 --- a/src/repl-frontend/scala/tools/nsc/interpreter/jline/Reader.scala +++ b/src/repl-frontend/scala/tools/nsc/interpreter/jline/Reader.scala @@ -44,7 +44,7 @@ class Reader private ( case _: EndOfFileException | _: UserInterruptException => reader.getBuffer.delete() ; null } } - def redrawLine(): Unit = ??? + def redrawLine(): Unit = () //see https://github.com/scala/bug/issues/12395, SimpleReader#redrawLine also use `()` def reset(): Unit = accumulator.reset() override def close(): Unit = terminal.close() From 2b6cfc07ffb7d99bc1191c2e7bd9a69d2612f475 Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Mon, 5 Jul 2021 19:07:31 -0700 Subject: [PATCH 282/769] Normalize partests for classpath and stderr DirectTest supplies `-usejavacp` in `extraSettings`; there are two reflect tests that turn it off. `-d` is supplied with settings, so it is not necessary to specify it. Capturing stderr to the log file was added in 2013, so remove old cruft. --- .../scala/tools/partest/DirectTest.scala | 4 +++- test/files/run/analyzerPlugins.scala | 2 -- test/files/run/annotatedRetyping.scala | 2 -- test/files/run/delambdafy_t6028.scala | 5 +---- test/files/run/delambdafy_t6555.scala | 5 +---- .../run/delambdafy_uncurry_byname_inline.scala | 5 +---- .../run/delambdafy_uncurry_byname_method.scala | 5 +---- test/files/run/delambdafy_uncurry_inline.scala | 6 +----- test/files/run/delambdafy_uncurry_method.scala | 6 +----- test/files/run/dynamic-applyDynamic.scala | 6 +----- test/files/run/dynamic-applyDynamicNamed.scala | 6 +----- test/files/run/dynamic-selectDynamic.scala | 6 +----- test/files/run/dynamic-updateDynamic.scala | 6 +----- test/files/run/existential-rangepos.scala | 2 +- test/files/run/icode-reader-dead-code.scala | 4 ++-- test/files/run/large_class.scala | 7 +------ test/files/run/large_code.scala | 7 +------ test/files/run/literals-parsing.scala | 4 +--- test/files/run/macroPlugins-namerHooks.scala | 1 - test/files/run/maxerrs.scala | 2 -- test/files/run/patmat-no-inline-isEmpty.scala | 6 +++--- test/files/run/patmat-no-inline-unapply.scala | 6 +++--- test/files/run/patmat-origtp-switch.scala | 6 +----- test/files/run/patmat-seq.scala | 6 +----- test/files/run/sbt-icode-interface.scala | 2 +- test/files/run/sd187.scala | 7 +------ test/files/run/sd275.scala | 2 +- test/files/run/string-switch-pos.scala | 2 +- test/files/run/t10203.scala | 6 +----- test/files/run/t10344.scala | 6 +----- test/files/run/t10751.scala | 6 +----- test/files/run/t11385.scala | 2 +- test/files/run/t11731.scala | 2 +- test/files/run/t12405.scala | 4 +--- test/files/run/t4841-no-plugin.scala | 2 -- test/files/run/t5463.scala | 2 +- test/files/run/t5545.scala | 8 ++------ test/files/run/t5603.scala | 10 ++-------- test/files/run/t5717.scala | 10 ++++------ test/files/run/t5905-features.scala | 2 -- test/files/run/t5905b-features.scala | 2 -- test/files/run/t5938.scala | 10 ++++------ test/files/run/t5940.scala | 6 +++--- test/files/run/t6028.scala | 6 +----- test/files/run/t6288.scala | 6 +----- test/files/run/t6440.scala | 4 ++-- test/files/run/t6440b.scala | 4 ++-- test/files/run/t6502.scala | 3 +-- test/files/run/t6555.scala | 6 +----- test/files/run/t7271.scala | 8 +------- test/files/run/t7876.scala | 1 - test/files/run/t8433.scala | 2 +- test/files/run/t8502.scala | 4 ++-- test/files/run/t8502b.scala | 4 ++-- test/files/run/t8907.scala | 2 +- test/files/run/t9097.scala | 2 +- test/files/run/t9437b.scala | 15 ++++----------- ..._without_scala_reflect_manifest_lookup.scala | 2 +- ...s_without_scala_reflect_typetag_lookup.scala | 13 ++++++++----- ...scala_reflect_typetag_manifest_interop.scala | 17 +++++++++-------- test/scaladoc/run/t5527.scala | 7 ++----- 61 files changed, 93 insertions(+), 219 deletions(-) diff --git a/src/partest/scala/tools/partest/DirectTest.scala b/src/partest/scala/tools/partest/DirectTest.scala index 229b2715df84..17de444bb7c4 100644 --- a/src/partest/scala/tools/partest/DirectTest.scala +++ b/src/partest/scala/tools/partest/DirectTest.scala @@ -42,8 +42,10 @@ abstract class DirectTest { def testPath = SFile(sys.props("partest.test-path")) def testOutput = Directory(sys.props("partest.output")) + protected def pathOf(locations: String*) = locations.mkString(sys.props("path.separator")) + // override to add additional settings besides -d testOutput.path - def extraSettings: String = "" + def extraSettings: String = "-usejavacp" // a default Settings object using only extraSettings def settings: Settings = newSettings(CommandLineParser.tokenize(extraSettings)) // settings factory using given args and also debug settings diff --git a/test/files/run/analyzerPlugins.scala b/test/files/run/analyzerPlugins.scala index 09c541366922..81b085d74fcb 100644 --- a/test/files/run/analyzerPlugins.scala +++ b/test/files/run/analyzerPlugins.scala @@ -3,8 +3,6 @@ import scala.tools.nsc._ object Test extends DirectTest { - override def extraSettings: String = "-usejavacp" - def code = """ class testAnn extends annotation.TypeConstraint diff --git a/test/files/run/annotatedRetyping.scala b/test/files/run/annotatedRetyping.scala index 556e2ffcedef..865b6aad1c71 100644 --- a/test/files/run/annotatedRetyping.scala +++ b/test/files/run/annotatedRetyping.scala @@ -2,8 +2,6 @@ import scala.tools.partest._ object Test extends DirectTest { - override def extraSettings: String = "-usejavacp" - def code = """ class testAnn extends annotation.Annotation diff --git a/test/files/run/delambdafy_t6028.scala b/test/files/run/delambdafy_t6028.scala index 2eee66eb3000..5e1ed7d294f8 100644 --- a/test/files/run/delambdafy_t6028.scala +++ b/test/files/run/delambdafy_t6028.scala @@ -12,8 +12,5 @@ object Test extends DirectTest { |} |""".stripMargin.trim - override def show(): Unit = - Console.withErr(System.out) { - compile() - } + override def show(): Unit = compile() } diff --git a/test/files/run/delambdafy_t6555.scala b/test/files/run/delambdafy_t6555.scala index 2ee5955883f6..93839ecf950c 100644 --- a/test/files/run/delambdafy_t6555.scala +++ b/test/files/run/delambdafy_t6555.scala @@ -6,8 +6,5 @@ object Test extends DirectTest { override def code = "class Foo { val f = (param: String) => param } " - override def show(): Unit = - Console.withErr(System.out) { - compile() - } + override def show(): Unit = compile() } diff --git a/test/files/run/delambdafy_uncurry_byname_inline.scala b/test/files/run/delambdafy_uncurry_byname_inline.scala index 6b33c921ea81..6e3507960d39 100644 --- a/test/files/run/delambdafy_uncurry_byname_inline.scala +++ b/test/files/run/delambdafy_uncurry_byname_inline.scala @@ -11,8 +11,5 @@ object Test extends DirectTest { |} |""".stripMargin.trim - override def show(): Unit = - Console.withErr(System.out) { - compile() - } + override def show(): Unit = compile() } diff --git a/test/files/run/delambdafy_uncurry_byname_method.scala b/test/files/run/delambdafy_uncurry_byname_method.scala index d12edfcf6bc4..ccef6d1cd3dc 100644 --- a/test/files/run/delambdafy_uncurry_byname_method.scala +++ b/test/files/run/delambdafy_uncurry_byname_method.scala @@ -11,8 +11,5 @@ object Test extends DirectTest { |} |""".stripMargin.trim - override def show(): Unit = - Console.withErr(System.out) { - compile() - } + override def show(): Unit = compile() } diff --git a/test/files/run/delambdafy_uncurry_inline.scala b/test/files/run/delambdafy_uncurry_inline.scala index 40c91814143f..4187909a1508 100644 --- a/test/files/run/delambdafy_uncurry_inline.scala +++ b/test/files/run/delambdafy_uncurry_inline.scala @@ -11,9 +11,5 @@ object Test extends DirectTest { |} |""".stripMargin.trim - override def show(): Unit = { - Console.withErr(System.out) { - compile() - } - } + override def show(): Unit = compile() } diff --git a/test/files/run/delambdafy_uncurry_method.scala b/test/files/run/delambdafy_uncurry_method.scala index d83446535357..849ed872f9c6 100644 --- a/test/files/run/delambdafy_uncurry_method.scala +++ b/test/files/run/delambdafy_uncurry_method.scala @@ -11,9 +11,5 @@ object Test extends DirectTest { |} |""".stripMargin.trim - override def show(): Unit = { - Console.withErr(System.out) { - compile() - } - } + override def show(): Unit = compile() } diff --git a/test/files/run/dynamic-applyDynamic.scala b/test/files/run/dynamic-applyDynamic.scala index 17efad94a797..25a7cf1dcfeb 100644 --- a/test/files/run/dynamic-applyDynamic.scala +++ b/test/files/run/dynamic-applyDynamic.scala @@ -13,11 +13,7 @@ object Test extends DirectTest { } """.trim - override def show(): Unit = { - Console.withErr(System.out) { - compile() - } - } + override def show(): Unit = compile() } import language.dynamics diff --git a/test/files/run/dynamic-applyDynamicNamed.scala b/test/files/run/dynamic-applyDynamicNamed.scala index de15a4857d82..d5185476ba1b 100644 --- a/test/files/run/dynamic-applyDynamicNamed.scala +++ b/test/files/run/dynamic-applyDynamicNamed.scala @@ -13,11 +13,7 @@ object Test extends DirectTest { } """.trim - override def show(): Unit = { - Console.withErr(System.out) { - compile() - } - } + override def show(): Unit = compile() } import language.dynamics diff --git a/test/files/run/dynamic-selectDynamic.scala b/test/files/run/dynamic-selectDynamic.scala index 392d7bd53c57..8383c1f45823 100644 --- a/test/files/run/dynamic-selectDynamic.scala +++ b/test/files/run/dynamic-selectDynamic.scala @@ -12,11 +12,7 @@ object Test extends DirectTest { } """.trim - override def show(): Unit = { - Console.withErr(System.out) { - compile() - } - } + override def show(): Unit = compile() } import language.dynamics diff --git a/test/files/run/dynamic-updateDynamic.scala b/test/files/run/dynamic-updateDynamic.scala index 237c4884884d..0c5914b61604 100644 --- a/test/files/run/dynamic-updateDynamic.scala +++ b/test/files/run/dynamic-updateDynamic.scala @@ -13,11 +13,7 @@ object Test extends DirectTest { } """.trim - override def show(): Unit = { - Console.withErr(System.out) { - compile() - } - } + override def show(): Unit = compile() } import language.dynamics diff --git a/test/files/run/existential-rangepos.scala b/test/files/run/existential-rangepos.scala index 2f56e8ebed87..d31a5e754f53 100644 --- a/test/files/run/existential-rangepos.scala +++ b/test/files/run/existential-rangepos.scala @@ -9,5 +9,5 @@ abstract class A[T] { val bar: Set[_ <: T] }""".trim - override def show(): Unit = Console.withErr(System.out)(compile()) + override def show(): Unit = compile() } diff --git a/test/files/run/icode-reader-dead-code.scala b/test/files/run/icode-reader-dead-code.scala index 31f5c06f388d..dd3934a0eef6 100644 --- a/test/files/run/icode-reader-dead-code.scala +++ b/test/files/run/icode-reader-dead-code.scala @@ -30,13 +30,13 @@ object Test extends DirectTest { |} """.stripMargin - compileString(newCompiler("-usejavacp", "-cp", testOutput.path))(aCode) + compileString(newCompiler("-cp", testOutput.path))(aCode) addDeadCode() // If inlining fails, the compiler will issue an inliner warning that is not present in the // check file - compileString(newCompiler("-usejavacp", "-cp", testOutput.path, "-opt:l:inline", "-opt-inline-from:**"))(bCode) + compileString(newCompiler("-cp", testOutput.path, "-opt:l:inline", "-opt-inline-from:**"))(bCode) } def readClass(file: String) = { diff --git a/test/files/run/large_class.scala b/test/files/run/large_class.scala index b10462aa5681..e422f653a2da 100644 --- a/test/files/run/large_class.scala +++ b/test/files/run/large_class.scala @@ -2,7 +2,6 @@ import scala.tools.partest._ // a cold run of partest takes about 15s for this test on my laptop object Test extends DirectTest { - override def extraSettings: String = "-usejavacp" def s(n: Int) = "\""+n+"\"" @@ -18,9 +17,5 @@ object Test extends DirectTest { s(n+60000)+")") mkString ";"} |}""".stripMargin.trim - override def show(): Unit = { - Console.withErr(System.out) { - compile() - } - } + override def show(): Unit = compile() } diff --git a/test/files/run/large_code.scala b/test/files/run/large_code.scala index e6104d2c062c..c3b0beac7876 100644 --- a/test/files/run/large_code.scala +++ b/test/files/run/large_code.scala @@ -2,7 +2,6 @@ import scala.tools.partest._ // a cold run of partest takes about 15s for this test on my laptop object Test extends DirectTest { - override def extraSettings: String = "-usejavacp" // test that we hit the code size limit and error out gracefully // 5958 is the magic number (2^16/11 -- each `a(1,2,3,4,5,6)` is 11 bytes of bytecode) @@ -15,9 +14,5 @@ object Test extends DirectTest { | } |}""".stripMargin.trim - override def show(): Unit = { - Console.withErr(System.out) { - compile() - } - } + override def show(): Unit = compile() } diff --git a/test/files/run/literals-parsing.scala b/test/files/run/literals-parsing.scala index eb94d5a260df..04a0c5f4d359 100644 --- a/test/files/run/literals-parsing.scala +++ b/test/files/run/literals-parsing.scala @@ -19,7 +19,5 @@ object Test extends DirectTest { } """.trim - override def show(): Unit = Console.withErr(System.out) { - compile() - } + override def show(): Unit = compile() } diff --git a/test/files/run/macroPlugins-namerHooks.scala b/test/files/run/macroPlugins-namerHooks.scala index ca049e78e9e6..89ee7756867a 100644 --- a/test/files/run/macroPlugins-namerHooks.scala +++ b/test/files/run/macroPlugins-namerHooks.scala @@ -2,7 +2,6 @@ import scala.tools.partest._ import scala.tools.nsc._ object Test extends DirectTest { - override def extraSettings: String = "-usejavacp" def code = """ case class C(x: Int, y: Int) diff --git a/test/files/run/maxerrs.scala b/test/files/run/maxerrs.scala index c04b3e5bad1f..fa2768ec6688 100644 --- a/test/files/run/maxerrs.scala +++ b/test/files/run/maxerrs.scala @@ -14,8 +14,6 @@ object Test extends DirectTest { } """.trim - override def extraSettings = "-usejavacp" - // a reporter that ignores all limits lazy val store = new UnfilteredStoreReporter diff --git a/test/files/run/patmat-no-inline-isEmpty.scala b/test/files/run/patmat-no-inline-isEmpty.scala index 3af510134c70..52fb76d1ccf4 100644 --- a/test/files/run/patmat-no-inline-isEmpty.scala +++ b/test/files/run/patmat-no-inline-isEmpty.scala @@ -24,8 +24,8 @@ object Test extends DirectTest { |} """.stripMargin - def show(): Unit = Console.withErr(System.out) { - compileString(newCompiler("-usejavacp"))(depCode) - compileString(newCompiler("-usejavacp", "-cp", testOutput.path, "-Vprint:patmat"))(code) + def show(): Unit = { + compileString(newCompiler())(depCode) + compileString(newCompiler("-cp", testOutput.path, "-Vprint:patmat"))(code) } } diff --git a/test/files/run/patmat-no-inline-unapply.scala b/test/files/run/patmat-no-inline-unapply.scala index bd6a5541cdd6..1ce9994c30d2 100644 --- a/test/files/run/patmat-no-inline-unapply.scala +++ b/test/files/run/patmat-no-inline-unapply.scala @@ -16,8 +16,8 @@ object Test extends DirectTest { |} """.stripMargin - def show(): Unit = Console.withErr(System.out) { - compileString(newCompiler("-usejavacp"))(depCode) - compileString(newCompiler("-usejavacp", "-cp", testOutput.path, "-Vprint:patmat"))(code) + def show(): Unit = { + compileString(newCompiler())(depCode) + compileString(newCompiler("-cp", testOutput.path, "-Vprint:patmat"))(code) } } diff --git a/test/files/run/patmat-origtp-switch.scala b/test/files/run/patmat-origtp-switch.scala index 8451d31bac83..c890ee13601c 100644 --- a/test/files/run/patmat-origtp-switch.scala +++ b/test/files/run/patmat-origtp-switch.scala @@ -12,9 +12,5 @@ object Test extends DirectTest { } """ - override def show(): Unit = { - Console.withErr(System.out) { - compile() - } - } + override def show(): Unit = compile() } diff --git a/test/files/run/patmat-seq.scala b/test/files/run/patmat-seq.scala index c0319298b45b..874656ab6d66 100644 --- a/test/files/run/patmat-seq.scala +++ b/test/files/run/patmat-seq.scala @@ -51,9 +51,5 @@ object Test extends DirectTest { |} """.stripMargin - override def show(): Unit = { - Console.withErr(System.out) { - compile() - } - } + override def show(): Unit = compile() } diff --git a/test/files/run/sbt-icode-interface.scala b/test/files/run/sbt-icode-interface.scala index f0281ccf63c2..1b7bd5a6acad 100644 --- a/test/files/run/sbt-icode-interface.scala +++ b/test/files/run/sbt-icode-interface.scala @@ -9,7 +9,7 @@ object Test extends DirectTest { """.trim def show(): Unit = { - val global = newCompiler("-usejavacp") + val global = newCompiler() import global._ val r = new Run r.compileSources(newSourceFile(code) :: Nil) diff --git a/test/files/run/sd187.scala b/test/files/run/sd187.scala index d8892be7f1f0..be475a15e0c3 100644 --- a/test/files/run/sd187.scala +++ b/test/files/run/sd187.scala @@ -32,10 +32,5 @@ object Test extends DirectTest { |} |""".stripMargin - - override def show(): Unit = { - Console.withErr(System.out) { - compile() - } - } + override def show(): Unit = compile() } diff --git a/test/files/run/sd275.scala b/test/files/run/sd275.scala index 519558d1a552..b150b59afebe 100644 --- a/test/files/run/sd275.scala +++ b/test/files/run/sd275.scala @@ -24,7 +24,7 @@ package p1 { """ override def extraSettings = { - val classpath = List(sys.props("partest.lib"), testOutput.path) mkString sys.props("path.separator") + val classpath = pathOf(sys.props("partest.lib"), testOutput.path) s"-cp $classpath" } diff --git a/test/files/run/string-switch-pos.scala b/test/files/run/string-switch-pos.scala index db093bc93a55..b8d8c7ad1a9f 100644 --- a/test/files/run/string-switch-pos.scala +++ b/test/files/run/string-switch-pos.scala @@ -15,5 +15,5 @@ object Test extends DirectTest { |} """.stripMargin.trim - override def show(): Unit = Console.withErr(Console.out) { super.compile() } + override def show(): Unit = compile() } diff --git a/test/files/run/t10203.scala b/test/files/run/t10203.scala index 2ad060399d85..c718ee7995c7 100644 --- a/test/files/run/t10203.scala +++ b/test/files/run/t10203.scala @@ -14,11 +14,7 @@ object Test extends DirectTest { } """.trim - override def show(): Unit = { - Console.withErr(System.out) { - compile() - } - } + override def show(): Unit = compile() } import language.dynamics diff --git a/test/files/run/t10344.scala b/test/files/run/t10344.scala index dfcf1f442451..fbbc8a871c88 100644 --- a/test/files/run/t10344.scala +++ b/test/files/run/t10344.scala @@ -13,9 +13,5 @@ object t10344 { } """ - override def show(): Unit = { - Console.withErr(System.out) { - compile() - } - } + override def show(): Unit = compile() } diff --git a/test/files/run/t10751.scala b/test/files/run/t10751.scala index dd6fbbd5dcc0..bcef4e169a3f 100644 --- a/test/files/run/t10751.scala +++ b/test/files/run/t10751.scala @@ -23,11 +23,7 @@ object Test extends DirectTest { } """.trim - override def show(): Unit = { - Console.withErr(System.out) { - compile() - } - } + override def show(): Unit = compile() } class C { diff --git a/test/files/run/t11385.scala b/test/files/run/t11385.scala index 5f66a6ddeb5b..a46985706f70 100644 --- a/test/files/run/t11385.scala +++ b/test/files/run/t11385.scala @@ -12,7 +12,7 @@ object Test extends DirectTest { def show() = assert { val tmp = createTempDirectory("t11385") val pkg = createDirectories(tmp.resolve("acme").resolve("C").resolve("sub")) - compile("-usejavacp", "-classpath", tmp.toString) + compile("-classpath", tmp.toString) } } diff --git a/test/files/run/t11731.scala b/test/files/run/t11731.scala index d52a9dc94ab7..3973c5a8e682 100644 --- a/test/files/run/t11731.scala +++ b/test/files/run/t11731.scala @@ -35,7 +35,7 @@ object Test extends DirectTest { private def fakeSbt = new sbt.FakeSbt override def show() = { - val global = newCompiler("-usejavacp", "-feature") + val global = newCompiler("-feature") def checkMsg(): Unit = assert(global.reporter.asInstanceOf[StoreReporter].infos.head.msg.contains("postfix operator")) diff --git a/test/files/run/t12405.scala b/test/files/run/t12405.scala index f44e19fd99ea..f506bd062c38 100644 --- a/test/files/run/t12405.scala +++ b/test/files/run/t12405.scala @@ -24,7 +24,5 @@ object Test extends DirectTest { |} |""".stripMargin - override def show(): Unit = Console.withErr(System.out) { - compile() - } + override def show(): Unit = compile() } diff --git a/test/files/run/t4841-no-plugin.scala b/test/files/run/t4841-no-plugin.scala index 8105278ca3a0..d10cddc60ff1 100644 --- a/test/files/run/t4841-no-plugin.scala +++ b/test/files/run/t4841-no-plugin.scala @@ -7,8 +7,6 @@ import java.io.File object Test extends DirectTest { override def code = "class Code" - override def extraSettings = s"-usejavacp" - override def show() = { val tmp = new File(testOutput.jfile, "plugins.partest").getAbsolutePath compile("-Xdev", s"-Xplugin:$tmp", "-Xpluginsdir", tmp) diff --git a/test/files/run/t5463.scala b/test/files/run/t5463.scala index 30b8306156d3..db710beff70d 100644 --- a/test/files/run/t5463.scala +++ b/test/files/run/t5463.scala @@ -12,7 +12,7 @@ object Test extends DirectTest { val classpath = List(sys.props("partest.lib"), jarpath, testOutput.path) mkString sys.props("path.separator") try { - compileString(newCompiler("-cp", classpath, "-d", testOutput.path))(code) + compileString(newCompiler("-cp", classpath))(code) throw new Error("Compilation should have failed"); } catch { case ex: FatalError => // this is expected diff --git a/test/files/run/t5545.scala b/test/files/run/t5545.scala index 3b46bbb6422c..0faf87a943af 100644 --- a/test/files/run/t5545.scala +++ b/test/files/run/t5545.scala @@ -3,9 +3,9 @@ import java.io._ object Test extends DirectTest { - override def extraSettings: String = "-usejavacp -d " + testOutput.path + " -cp " + testOutput.path + override def extraSettings: String = s"-usejavacp -cp ${testOutput.path}" - override def code = """ + override def code = s""" // scala/bug#5545 trait F[@specialized(Int) T1, R] { def f(v1: T1): R @@ -14,12 +14,8 @@ object Test extends DirectTest { """.trim override def show(): Unit = { - // redirect err to out, for logging - val prevErr = System.err - System.setErr(System.out) compile() // the bug manifests at the second compilation, when the bytecode is already there compile() - System.setErr(prevErr) } } diff --git a/test/files/run/t5603.scala b/test/files/run/t5603.scala index c047fe7896b5..2e36639931f8 100644 --- a/test/files/run/t5603.scala +++ b/test/files/run/t5603.scala @@ -7,7 +7,7 @@ import scala.tools.nsc.reporters.ConsoleReporter object Test extends DirectTest { - override def extraSettings: String = "-usejavacp -Vprint:parser -Ystop-after:parser -d " + testOutput.path + override def extraSettings: String = "-usejavacp -Vprint:parser -Ystop-after:parser" override def code = """ trait Greeting { @@ -24,13 +24,7 @@ object Test extends DirectTest { object Test extends App {} """.trim - override def show(): Unit = { - // redirect err to out, for logging - val prevErr = System.err - System.setErr(System.out) - compile() - System.setErr(prevErr) - } + override def show(): Unit = compile() override def newCompiler(args: String*): Global = { diff --git a/test/files/run/t5717.scala b/test/files/run/t5717.scala index 5e3b94656476..805e3f99203c 100644 --- a/test/files/run/t5717.scala +++ b/test/files/run/t5717.scala @@ -2,12 +2,10 @@ import scala.tools.partest._ import java.io.File object Test extends StoreReporterDirectTest { - def code = ??? + def code = "package a { class B }" + + override def extraSettings: String = s"-cp ${pathOf(sys.props("partest.lib"), testOutput.path)}" - def compileCode(code: String) = { - val classpath = List(sys.props("partest.lib"), testOutput.path) mkString sys.props("path.separator") - compileString(newCompiler("-cp", classpath, "-d", testOutput.path))(code) - } // TODO // Don't assume output is on physical disk // Let the compiler tell us output dir @@ -16,7 +14,7 @@ object Test extends StoreReporterDirectTest { def show(): Unit = { // Don't crash when we find a file 'a' where package 'a' should go. scala.reflect.io.File(testOutput.path + "/a").writeAll("a") - compileCode("package a { class B }") + compile() val List(i) = filteredInfos // for some reason, nio doesn't throw the same exception on windows and linux/mac import File.separator diff --git a/test/files/run/t5905-features.scala b/test/files/run/t5905-features.scala index 5d92961931ea..d411f7d92148 100644 --- a/test/files/run/t5905-features.scala +++ b/test/files/run/t5905-features.scala @@ -7,8 +7,6 @@ import tools.partest.DirectTest object Test extends DirectTest { override def code = "class Code { def f = (1 to 10) size }" // exercise a feature to sanity-check coverage of -language options - override def extraSettings = s"-usejavacp -d ${testOutput.path}" - override def show() = { val global = newCompiler("-Ystop-after:typer") compileString(global)("") // warm me up, scotty diff --git a/test/files/run/t5905b-features.scala b/test/files/run/t5905b-features.scala index 627df8334b89..938d26e4f247 100644 --- a/test/files/run/t5905b-features.scala +++ b/test/files/run/t5905b-features.scala @@ -5,8 +5,6 @@ import tools.partest.DirectTest object Test extends DirectTest { override def code = "class Code" - override def extraSettings = s"-usejavacp -d ${testOutput.path}" - override def show() = { //compile("-language", "--") // no error compile(s"-language:noob") diff --git a/test/files/run/t5938.scala b/test/files/run/t5938.scala index 7a3093102a70..58ce964f9d0c 100644 --- a/test/files/run/t5938.scala +++ b/test/files/run/t5938.scala @@ -3,7 +3,7 @@ import scala.tools.partest.DirectTest object Test extends DirectTest { override def extraSettings: String = - s"-usejavacp -d ${testOutput.path} -cp ${testOutput.path} -d ${testOutput.path}" + s"-usejavacp -cp ${testOutput.path}" override def code = """ object O extends C { @@ -15,11 +15,9 @@ object O extends C { override def show(): Unit = { val global = newCompiler() - Console.withErr(System.out) { - compileString(global)(code) - compileString(global)(code) - loadClass // was "duplicate name and signature in class X" - } + compileString(global)(code) + compileString(global)(code) + loadClass // was "duplicate name and signature in class X" } def loadClass: Class[_] = { diff --git a/test/files/run/t5940.scala b/test/files/run/t5940.scala index 0c0e39f603c0..d86585e8720a 100644 --- a/test/files/run/t5940.scala +++ b/test/files/run/t5940.scala @@ -17,8 +17,8 @@ object Test extends DirectTest { } """ def compileMacros() = { - val classpath = List(sys.props("partest.lib"), sys.props("partest.reflect")) mkString sys.props("path.separator") - compileString(newCompiler("-language:experimental.macros", "-cp", classpath, "-d", testOutput.path))(macros_1) + val classpath = pathOf(sys.props("partest.lib"), sys.props("partest.reflect")) + compileString(newCompiler("-language:experimental.macros", "-cp", classpath))(macros_1) } def test_2 = """ @@ -27,7 +27,7 @@ object Test extends DirectTest { } """ def compileTest() = { - val classpath = List(sys.props("partest.lib"), testOutput.path) mkString sys.props("path.separator") + val classpath = pathOf(sys.props("partest.lib"), testOutput.path) compileString(newCompiler("-cp", classpath, "-d", testOutput.path))(test_2) } diff --git a/test/files/run/t6028.scala b/test/files/run/t6028.scala index 6e4e179f1dfd..60517d5193e7 100644 --- a/test/files/run/t6028.scala +++ b/test/files/run/t6028.scala @@ -13,9 +13,5 @@ object Test extends DirectTest { |} |""".stripMargin.trim - override def show(): Unit = { - Console.withErr(System.out) { - compile() - } - } + override def show(): Unit = compile() } diff --git a/test/files/run/t6288.scala b/test/files/run/t6288.scala index 0565e848ea40..29ef3567a553 100644 --- a/test/files/run/t6288.scala +++ b/test/files/run/t6288.scala @@ -40,11 +40,7 @@ object Test extends DirectTest { |} |""".stripMargin.trim - override def show(): Unit = { + override def show(): Unit = compile() // Now: [84][84]Case3.unapply([84]x1); // Was: [84][84]Case3.unapply([64]x1); - Console.withErr(System.out) { - compile() - } - } } diff --git a/test/files/run/t6440.scala b/test/files/run/t6440.scala index 94eda3642ea6..f6fcc97d4870 100644 --- a/test/files/run/t6440.scala +++ b/test/files/run/t6440.scala @@ -5,8 +5,8 @@ object Test extends StoreReporterDirectTest { def code = ??? def compileCode(code: String) = { - val classpath = List(sys.props("partest.lib"), testOutput.path) mkString sys.props("path.separator") - compileString(newCompiler("-cp", classpath, "-d", testOutput.path))(code) + val classpath = pathOf(sys.props("partest.lib"), testOutput.path) + compileString(newCompiler("-cp", classpath))(code) } def library1 = """ diff --git a/test/files/run/t6440b.scala b/test/files/run/t6440b.scala index a1ad71716226..65a40edc40b4 100644 --- a/test/files/run/t6440b.scala +++ b/test/files/run/t6440b.scala @@ -5,8 +5,8 @@ object Test extends StoreReporterDirectTest { def code = ??? def compileCode(code: String) = { - val classpath = List(sys.props("partest.lib"), testOutput.path) mkString sys.props("path.separator") - compileString(newCompiler("-cp", classpath, "-d", testOutput.path))(code) + val classpath = pathOf(sys.props("partest.lib"), testOutput.path) + compileString(newCompiler("-cp", classpath))(code) } def library1 = """ diff --git a/test/files/run/t6502.scala b/test/files/run/t6502.scala index 68c0e8aa4e88..8664f5c96ade 100644 --- a/test/files/run/t6502.scala +++ b/test/files/run/t6502.scala @@ -6,7 +6,7 @@ object Test extends StoreReporterDirectTest { def code = ??? private def compileCode(code: String, jarFileName: String) = { - val classpath = List(sys.props("partest.lib"), testOutput.path) mkString sys.props("path.separator") + val classpath = pathOf(sys.props("partest.lib"), testOutput.path) compileString(newCompiler("-cp", classpath, "-d", s"${testOutput.path}/$jarFileName"))(code) } private def runAdded(codeToRun: String): String = { @@ -16,7 +16,6 @@ object Test extends StoreReporterDirectTest { output.mkString("\n") } - def app1 = """ package test diff --git a/test/files/run/t6555.scala b/test/files/run/t6555.scala index e1db058da1d7..f4425d960113 100644 --- a/test/files/run/t6555.scala +++ b/test/files/run/t6555.scala @@ -7,9 +7,5 @@ object Test extends DirectTest { override def code = "class Foo { val f = (param: Int) => param } " - override def show(): Unit = { - Console.withErr(System.out) { - compile() - } - } + override def show(): Unit = compile() } diff --git a/test/files/run/t7271.scala b/test/files/run/t7271.scala index 6eadb7816c0e..c2801396d37e 100644 --- a/test/files/run/t7271.scala +++ b/test/files/run/t7271.scala @@ -16,13 +16,7 @@ object Test extends DirectTest { } """.trim - override def show(): Unit = { - // redirect err to out, for logging - val prevErr = System.err - System.setErr(System.out) - compile() - System.setErr(prevErr) - } + override def show(): Unit = compile() override def newCompiler(args: String*): Global = { diff --git a/test/files/run/t7876.scala b/test/files/run/t7876.scala index 8c41e2e3c34a..bc18f16b6dd8 100644 --- a/test/files/run/t7876.scala +++ b/test/files/run/t7876.scala @@ -2,7 +2,6 @@ import scala.tools.partest._ // Type constructors for FunctionN and TupleN should not be considered as function type / tuple types. object Test extends DirectTest { - override def extraSettings: String = "-usejavacp" def code = "" diff --git a/test/files/run/t8433.scala b/test/files/run/t8433.scala index 16144ffddb7c..c4757997c897 100644 --- a/test/files/run/t8433.scala +++ b/test/files/run/t8433.scala @@ -42,5 +42,5 @@ object Test extends DirectTest { ScalaClassLoader(getClass.getClassLoader).run("Main", Nil) } - override def extraSettings = s"-usejavacp -d ${testOutput.path} -cp ${testOutput.path}" + override def extraSettings = s"-usejavacp -cp ${testOutput.path}" } diff --git a/test/files/run/t8502.scala b/test/files/run/t8502.scala index a700eb521729..fa10a10d9c3a 100644 --- a/test/files/run/t8502.scala +++ b/test/files/run/t8502.scala @@ -5,8 +5,8 @@ object Test extends StoreReporterDirectTest { def code = ??? def compileCode(code: String) = { - val classpath = List(sys.props("partest.lib"), testOutput.path) mkString sys.props("path.separator") - compileString(newCompiler("-cp", classpath, "-d", testOutput.path))(code) + val classpath = pathOf(sys.props("partest.lib"), testOutput.path) + compileString(newCompiler("-cp", classpath))(code) } def show(): Unit = { diff --git a/test/files/run/t8502b.scala b/test/files/run/t8502b.scala index f1858fd88859..5113179c957e 100644 --- a/test/files/run/t8502b.scala +++ b/test/files/run/t8502b.scala @@ -10,8 +10,8 @@ object Test extends StoreReporterDirectTest { def code = ??? def compileCode(code: String) = { - val classpath = List(sys.props("partest.lib"), testOutput.path) mkString sys.props("path.separator") - compileString(newCompiler("-cp", classpath, "-d", testOutput.path))(code) + val classpath = pathOf(sys.props("partest.lib"), testOutput.path) + compileString(newCompiler("-cp", classpath))(code) } def show(): Unit = { diff --git a/test/files/run/t8907.scala b/test/files/run/t8907.scala index e425e93546e8..a20e9c552e7c 100644 --- a/test/files/run/t8907.scala +++ b/test/files/run/t8907.scala @@ -5,7 +5,7 @@ object Test extends StoreReporterDirectTest { def code = ??? def compileCode(code: String) = { - val classpath = List(sys.props("partest.lib"), testOutput.path) mkString sys.props("path.separator") + val classpath = pathOf(sys.props("partest.lib"), testOutput.path) compileString(newCompiler("-cp", classpath, "-d", testOutput.path))(code) } diff --git a/test/files/run/t9097.scala b/test/files/run/t9097.scala index a7ddc5cf917e..23c9c31a7a32 100644 --- a/test/files/run/t9097.scala +++ b/test/files/run/t9097.scala @@ -9,7 +9,7 @@ object Test extends StoreReporterDirectTest { "-Ydelambdafy:method", "-Vprint:delambdafy", s"-d ${testOutput.path}" - ) mkString " " + ).mkString(" ") override def code = """package o |package a { diff --git a/test/files/run/t9437b.scala b/test/files/run/t9437b.scala index 9278e02ec8d4..82a83dd093e8 100644 --- a/test/files/run/t9437b.scala +++ b/test/files/run/t9437b.scala @@ -12,7 +12,7 @@ import Opcodes._ // that uses the class with named arguments. // Any failure will be dumped to std out. object Test extends DirectTest { - override def extraSettings: String = "-usejavacp -d " + testOutput.path + " -cp " + testOutput.path + override def extraSettings: String = s"-usejavacp -cp ${testOutput.path}" def generateCode(): Unit = { val className = "Foo" @@ -78,15 +78,8 @@ class Driver { """ override def show(): Unit = { - // redirect err to out, for logging - val prevErr = System.err - System.setErr(System.out) - try { - generateCode() - compile() - Class.forName("Driver").getDeclaredConstructor().newInstance() - } - finally - System.setErr(prevErr) + generateCode() + compile() + Class.forName("Driver").getDeclaredConstructor().newInstance() } } diff --git a/test/files/run/typetags_without_scala_reflect_manifest_lookup.scala b/test/files/run/typetags_without_scala_reflect_manifest_lookup.scala index 6488c78d164b..820479cfd3be 100644 --- a/test/files/run/typetags_without_scala_reflect_manifest_lookup.scala +++ b/test/files/run/typetags_without_scala_reflect_manifest_lookup.scala @@ -2,7 +2,7 @@ import scala.tools.partest._ import scala.tools.nsc.Settings object Test extends DirectTest { - override def extraSettings = "-cp " + sys.props("partest.lib") + " -d \"" + testOutput.path + "\"" + override def extraSettings = "-cp " + sys.props("partest.lib") def code = """ object Test extends App { diff --git a/test/files/run/typetags_without_scala_reflect_typetag_lookup.scala b/test/files/run/typetags_without_scala_reflect_typetag_lookup.scala index dccb2af8f55e..fd0f2dee3d32 100644 --- a/test/files/run/typetags_without_scala_reflect_typetag_lookup.scala +++ b/test/files/run/typetags_without_scala_reflect_typetag_lookup.scala @@ -3,6 +3,9 @@ import scala.tools.partest._ object Test extends StoreReporterDirectTest { def code = ??? + // differs for two compilations + override def extraSettings: String = "" + def library = """ import scala.reflect.runtime.universe._ @@ -11,8 +14,8 @@ object Test extends StoreReporterDirectTest { } """ def compileLibrary() = { - val classpath = List(sys.props("partest.lib"), sys.props("partest.reflect")) mkString sys.props("path.separator") - compileString(newCompiler("-cp", classpath, "-d", testOutput.path))(library) + val classpath = pathOf(sys.props("partest.lib"), sys.props("partest.reflect")) + compileString(newCompiler("-cp", classpath))(library) } def app = """ @@ -27,15 +30,15 @@ object Test extends StoreReporterDirectTest { } """ def compileApp() = { - val classpath = List(sys.props("partest.lib"), testOutput.path) mkString sys.props("path.separator") - compileString(newCompiler("-cp", classpath, "-d", testOutput.path))(app) + val classpath = pathOf(sys.props("partest.lib"), testOutput.path) + compileString(newCompiler("-cp", classpath))(app) } def show(): Unit = { compileLibrary(); println(filteredInfos.mkString("\n")) storeReporter.infos.clear() - compileApp(); + compileApp() // we should get "missing or invalid dependency detected" errors, because we're trying to use an implicit that can't be unpickled // but we don't know the number of these errors and their order, so I just ignore them all println(filteredInfos.filterNot(_.msg.contains("is missing from the classpath")).mkString("\n")) diff --git a/test/files/run/typetags_without_scala_reflect_typetag_manifest_interop.scala b/test/files/run/typetags_without_scala_reflect_typetag_manifest_interop.scala index c865759588c3..a62c5fe1e6d5 100644 --- a/test/files/run/typetags_without_scala_reflect_typetag_manifest_interop.scala +++ b/test/files/run/typetags_without_scala_reflect_typetag_manifest_interop.scala @@ -4,6 +4,9 @@ import scala.tools.nsc.Settings object Test extends StoreReporterDirectTest { def code = ??? + // differs for two compilations + override def extraSettings: String = "" + def library = """ import scala.reflect.runtime.universe._ @@ -13,8 +16,8 @@ object Test extends StoreReporterDirectTest { } """ def compileLibrary() = { - val classpath = List(sys.props("partest.lib"), sys.props("partest.reflect")) mkString sys.props("path.separator") - compileString(newCompiler("-cp", classpath, "-d", testOutput.path))(library) + val classpath = pathOf(sys.props("partest.lib"), sys.props("partest.reflect")) + compileString(newCompiler("-cp", classpath))(library) } def app = """ @@ -29,19 +32,17 @@ object Test extends StoreReporterDirectTest { } """ def compileApp() = { - val classpath = List(sys.props("partest.lib"), testOutput.path) mkString sys.props("path.separator") - val global = newCompiler("-cp", classpath, "-d", testOutput.path) - compileString(newCompiler("-cp", classpath, "-d", testOutput.path))(app) - //global.reporter.ERROR.foreach(println) + val classpath = pathOf(sys.props("partest.lib"), testOutput.path) + compileString(newCompiler("-cp", classpath))(app) } def show(): Unit = { compileLibrary(); println(filteredInfos.mkString("\n")) storeReporter.infos.clear() - compileApp(); + compileApp() // we should get "missing or invalid dependency detected" errors, because we're trying to use an implicit that can't be unpickled // but we don't know the number of these errors and their order, so I just ignore them all - println(filteredInfos.filterNot (_.msg.contains("is missing from the classpath")).mkString("\n")) + println(filteredInfos.filterNot(_.msg.contains("is missing from the classpath")).mkString("\n")) } } diff --git a/test/scaladoc/run/t5527.scala b/test/scaladoc/run/t5527.scala index b4fdc70339a3..ec4ee4eeaa04 100644 --- a/test/scaladoc/run/t5527.scala +++ b/test/scaladoc/run/t5527.scala @@ -137,11 +137,8 @@ object Test extends DirectTest { } """.trim - // redirect err to out, for logging - override def show(): Unit = StreamCapture.savingSystem { - System.setErr(System.out) - compile() - } + override def show(): Unit = compile() + // doc.Settings override def newSettings(args: List[String]) = new doc.Settings(_ => ()).tap(_.processArguments(args, true)) // ScaladocGlobal yielded by DocFactory#compiler, requires doc.Settings From 41f6345a19e0e310c243090e93746097de07c496 Mon Sep 17 00:00:00 2001 From: Martijn Hoekstra Date: Tue, 6 Jul 2021 19:03:32 +0200 Subject: [PATCH 283/769] Update src/library-aux/scala/Any.scala Co-authored-by: Dale Wijnand --- src/library-aux/scala/Any.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/library-aux/scala/Any.scala b/src/library-aux/scala/Any.scala index 0f769be8e99a..188d68e756e3 100644 --- a/src/library-aux/scala/Any.scala +++ b/src/library-aux/scala/Any.scala @@ -110,7 +110,7 @@ abstract class Any { */ final def ## : Int = sys.error("##") - /** Test whether the dynamic type of the receiver object has the same erasure to `T0`. + /** Test whether the dynamic type of the receiver object has the same erasure as `T0`. * * Depending on what `T0` is, the test is done in one of the below ways: * From 32ac837feced96c3ae24397adbf94438aad3cf9c Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Fri, 2 Jul 2021 12:09:01 +1000 Subject: [PATCH 284/769] [backport] Update to SBT 1.5.4 and its slash DSL --- build.sbt | 408 +++++++++++++++--------------- project/AutomaticModuleName.scala | 2 +- project/JitWatch.scala | 10 +- project/License.scala | 2 +- project/Osgi.scala | 8 +- project/ScaladocSettings.scala | 2 +- project/ScriptCommands.scala | 44 ++-- project/VersionUtil.scala | 10 +- project/build.properties | 2 +- scripts/common | 2 +- 10 files changed, 246 insertions(+), 244 deletions(-) diff --git a/build.sbt b/build.sbt index 70128525bba0..4bded6e96dbb 100644 --- a/build.sbt +++ b/build.sbt @@ -56,7 +56,7 @@ val diffUtilsDep = "com.googlecode.java-diff-utils" % "diffutils" % " * real publishing should be done with sbt's standard `publish` task. */ lazy val publishDists = taskKey[Unit]("Publish to ./dists/maven-sbt.") -credentials in Global ++= { +(Global / credentials) ++= { val file = Path.userHome / ".credentials" if (file.exists && !file.isDirectory) List(Credentials(file)) else Nil @@ -64,7 +64,7 @@ credentials in Global ++= { lazy val publishSettings : Seq[Setting[_]] = Seq( publishDists := { - val artifacts = (packagedArtifacts in publish).value + val artifacts = (publish / packagedArtifacts).value val ver = VersionUtil.versionProperties.value.canonicalVersion val log = streams.value.log val mappings = artifacts.toSeq.map { case (a, f) => @@ -95,14 +95,14 @@ lazy val publishSettings : Seq[Setting[_]] = Seq( // should not be set directly. It is the same as the Maven version and derived automatically from `baseVersion` and // `baseVersionSuffix`. globalVersionSettings -baseVersion in Global := "2.12.15" -baseVersionSuffix in Global := "SNAPSHOT" -organization in ThisBuild := "org.scala-lang" -homepage in ThisBuild := Some(url("https://www.scala-lang.org")) -startYear in ThisBuild := Some(2002) -licenses in ThisBuild += (("Apache-2.0", url("https://www.apache.org/licenses/LICENSE-2.0"))) -headerLicense in ThisBuild := Some(HeaderLicense.Custom( - s"""Scala (${(homepage in ThisBuild).value.get}) +(Global / baseVersion) := "2.12.15" +(Global / baseVersionSuffix) := "SNAPSHOT" +(ThisBuild / organization) := "org.scala-lang" +(ThisBuild / homepage) := Some(url("https://www.scala-lang.org")) +(ThisBuild / startYear) := Some(2002) +(ThisBuild / licenses) += (("Apache-2.0", url("https://www.apache.org/licenses/LICENSE-2.0"))) +(ThisBuild / headerLicense) := Some(HeaderLicense.Custom( + s"""Scala (${(ThisBuild / homepage).value.get}) | |Copyright EPFL and Lightbend, Inc. | @@ -114,7 +114,7 @@ headerLicense in ThisBuild := Some(HeaderLicense.Custom( |""".stripMargin )) -scalaVersion in Global := versionProps("starr.version") +(Global / scalaVersion) := versionProps("starr.version") lazy val instanceSettings = Seq[Setting[_]]( // we don't cross build Scala itself @@ -124,12 +124,12 @@ lazy val instanceSettings = Seq[Setting[_]]( // Avoid circular dependencies for scalaInstance (see https://github.com/sbt/sbt/issues/1872) managedScalaInstance := false, scalaInstance := { - val s = (scalaInstance in bootstrap).value + val s = (bootstrap / scalaInstance).value // sbt claims that s.isManagedVersion is false even though s was resolved by Ivy // We create a managed copy to prevent sbt from putting it on the classpath where we don't want it if(s.isManagedVersion) s else { import sbt.internal.inc.ScalaInstance - val s2 = new ScalaInstance(s.version, s.loader, s.loaderLibraryOnly, s.libraryJars, s.compilerJar, s.allJars, Some(s.actualVersion)) + val s2 = new ScalaInstance(s.version, s.loader, s.loaderCompilerOnly, s.loaderLibraryOnly, s.libraryJars, s.compilerJars, s.allJars, Some(s.actualVersion)) assert(s2.isManagedVersion) s2 } @@ -146,36 +146,36 @@ lazy val commonSettings = instanceSettings ++ clearSourceAndResourceDirectories // we always assume that Java classes are standalone and do not have any dependency // on Scala classes compileOrder := CompileOrder.JavaThenScala, - javacOptions in Compile ++= Seq("-g", "-source", "1.8", "-target", "1.8", "-Xlint:unchecked"), - unmanagedJars in Compile := Seq.empty, // no JARs in version control! - sourceDirectory in Compile := baseDirectory.value, - unmanagedSourceDirectories in Compile := List(baseDirectory.value), - unmanagedResourceDirectories in Compile += (baseDirectory in ThisBuild).value / "src" / thisProject.value.id, + (Compile / javacOptions) ++= Seq("-g", "-source", "1.8", "-target", "1.8", "-Xlint:unchecked"), + (Compile / unmanagedJars) := Seq.empty, // no JARs in version control! + (Compile / sourceDirectory) := baseDirectory.value, + (Compile / unmanagedSourceDirectories) := List(baseDirectory.value), + (Compile / unmanagedResourceDirectories) += (ThisBuild / baseDirectory).value / "src" / thisProject.value.id, sourcesInBase := false, - scalaSource in Compile := (sourceDirectory in Compile).value, - javaSource in Compile := (sourceDirectory in Compile).value, + (Compile / scalaSource) := (Compile / sourceDirectory).value, + (Compile / javaSource) := (Compile / sourceDirectory).value, // resources are stored along source files in our current layout - resourceDirectory in Compile := (sourceDirectory in Compile).value, + (Compile / resourceDirectory) := (Compile / sourceDirectory).value, // each subproject has to ask specifically for files they want to include - includeFilter in unmanagedResources in Compile := NothingFilter, - target := (baseDirectory in ThisBuild).value / "target" / thisProject.value.id, - classDirectory in Compile := buildDirectory.value / "quick/classes" / thisProject.value.id, - target in Compile in doc := buildDirectory.value / "scaladoc" / thisProject.value.id, + (Compile / unmanagedResources / includeFilter) := NothingFilter, + target := (ThisBuild / baseDirectory).value / "target" / thisProject.value.id, + (Compile / classDirectory) := buildDirectory.value / "quick/classes" / thisProject.value.id, + (Compile / doc / target) := buildDirectory.value / "scaladoc" / thisProject.value.id, // given that classDirectory and doc target are overridden to be _outside_ of target directory, we have // to make sure they are being cleaned properly - cleanFiles += (classDirectory in Compile).value, - cleanFiles += (target in Compile in doc).value, - fork in run := true, - connectInput in run := true, - scalacOptions in Compile += "-Ywarn-unused:imports", - scalacOptions in Compile in doc ++= Seq( + cleanFiles += (Compile / classDirectory).value, + cleanFiles += (Compile / doc / target).value, + (run / fork) := true, + (run / connectInput) := true, + (Compile / scalacOptions) += "-Ywarn-unused:imports", + (Compile / doc / scalacOptions) ++= Seq( "-doc-footer", "epfl", "-diagrams", "-implicits", "-groups", "-doc-version", versionProperties.value.canonicalVersion, "-doc-title", description.value, - "-sourcepath", (baseDirectory in ThisBuild).value.toString, + "-sourcepath", (ThisBuild / baseDirectory).value.toString, "-doc-source-url", s"https://github.com/scala/scala/tree/${versionProperties.value.githubTree}€{FILE_PATH_EXT}#L€{FILE_LINE}" ), setIncOptions, @@ -201,20 +201,20 @@ lazy val commonSettings = instanceSettings ++ clearSourceAndResourceDirectories }, - headerLicense := (headerLicense in ThisBuild).value, + headerLicense := (ThisBuild / headerLicense).value, // Remove auto-generated manifest attributes - packageOptions in Compile in packageBin := Seq.empty, - packageOptions in Compile in packageSrc := Seq.empty, + (Compile / packageBin / packageOptions) := Seq.empty, + (Compile / packageSrc / packageOptions) := Seq.empty, // Lets us CTRL-C partest without exiting SBT entirely - cancelable in Global := true, + (Global / cancelable) := true, // Don't pick up source files from the project root. sourcesInBase := false, // Don't log process output (e.g. of forked `compiler/runMain ...Main`), just pass it // directly to stdout - outputStrategy in run := Some(StdoutOutput) + (run / outputStrategy) := Some(StdoutOutput) ) ++ removePomDependencies ++ setForkedWorkingDirectory /** Extra post-processing for the published POM files. These are needed to create POMs that @@ -247,7 +247,7 @@ def fixPom(extra: (String, scala.xml.Node)*): Setting[_] = { val pomDependencyExclusions = settingKey[Seq[(String, String)]]("List of (groupId, artifactId) pairs to exclude from the POM and ivy.xml") -pomDependencyExclusions in Global := Nil +(Global / pomDependencyExclusions) := Nil /** Remove unwanted dependencies from the POM and ivy.xml. */ lazy val removePomDependencies: Seq[Setting[_]] = Seq( @@ -290,8 +290,8 @@ lazy val removePomDependencies: Seq[Setting[_]] = Seq( ) val disableDocs = Seq[Setting[_]]( - sources in (Compile, doc) := Seq.empty, - publishArtifact in (Compile, packageDoc) := false + (Compile / doc / sources) := Seq.empty, + (Compile / packageDoc / publishArtifact) := false ) val disablePublishing = Seq[Setting[_]]( @@ -303,7 +303,7 @@ val disablePublishing = Seq[Setting[_]]( ) lazy val setJarLocation: Setting[_] = - artifactPath in packageBin in Compile := { + (Compile / packageBin / artifactPath) := { // two lines below are copied over from sbt's sources: // https://github.com/sbt/sbt/blob/0.13/main/src/main/scala/sbt/Defaults.scala#L628 //val resolvedScalaVersion = ScalaVersion((scalaVersion in artifactName).value, (scalaBinaryVersion in artifactName).value) @@ -317,14 +317,14 @@ lazy val setJarLocation: Setting[_] = lazy val scalaSubprojectSettings: Seq[Setting[_]] = commonSettings :+ setJarLocation def filterDocSources(ff: FileFilter): Seq[Setting[_]] = Seq( - sources in (Compile, doc) ~= (_.filter(ff.accept)), + (Compile / doc / sources) ~= (_.filter(ff.accept)), // Excluded sources may still be referenced by the included sources, so we add the compiler // output to the scaladoc classpath to resolve them. For the `library` project this is // always required because otherwise the compiler cannot even initialize Definitions without // binaries of the library on the classpath. Specifically, we get this error: // (library/compile:doc) scala.reflect.internal.FatalError: package class scala does not have a member Int - dependencyClasspath in (Compile, doc) += (classDirectory in Compile).value, - doc in Compile := (doc in Compile).dependsOn(compile in Compile).value + (Compile / doc / dependencyClasspath) += (Compile / classDirectory).value, + (Compile / doc) := (Compile / doc).dependsOn((Compile / compile)).value ) def regexFileFilter(s: String): FileFilter = new FileFilter { @@ -335,7 +335,7 @@ def regexFileFilter(s: String): FileFilter = new FileFilter { def setForkedWorkingDirectory: Seq[Setting[_]] = { // When we fork subprocesses, use the base directory as the working directory. // This“ enables `sbt> partest test/files/run/t1.scala` or `sbt> scalac sandbox/test.scala` - val setting = (forkOptions in Compile) := (forkOptions in Compile).value.withWorkingDirectory((baseDirectory in ThisBuild).value) + val setting = (Compile / forkOptions) := (Compile / forkOptions).value.withWorkingDirectory((ThisBuild / baseDirectory).value) setting ++ inTask(run)(setting) } @@ -350,19 +350,19 @@ lazy val library = configureAsSubproject(project) name := "scala-library", description := "Scala Standard Library", compileOrder := CompileOrder.Mixed, // needed for JFunction classes in scala.runtime.java8 - scalacOptions in Compile ++= Seq[String]("-sourcepath", (scalaSource in Compile).value.toString), - scalacOptions in Compile in doc ++= { - val libraryAuxDir = (baseDirectory in ThisBuild).value / "src/library-aux" + (Compile / scalacOptions) ++= Seq[String]("-sourcepath", (Compile / scalaSource).value.toString), + (Compile / doc/ scalacOptions) ++= { + val libraryAuxDir = (ThisBuild / baseDirectory).value / "src/library-aux" Seq( "-doc-no-compile", libraryAuxDir.toString, "-skip-packages", "scala.concurrent.impl", - "-doc-root-content", (sourceDirectory in Compile).value + "/rootdoc.txt" + "-doc-root-content", (Compile / sourceDirectory).value + "/rootdoc.txt" ) }, - includeFilter in unmanagedResources in Compile := "*.tmpl" | "*.xml" | "*.js" | "*.css" | "rootdoc.txt", + (Compile / unmanagedResources / includeFilter) := "*.tmpl" | "*.xml" | "*.js" | "*.css" | "rootdoc.txt", // Include *.txt files in source JAR: - mappings in Compile in packageSrc ++= { - val base = (unmanagedResourceDirectories in Compile).value + (Compile / packageSrc / mappings) ++= { + val base = (Compile / unmanagedResourceDirectories).value base ** "*.txt" pair Path.relativeTo(base) }, Osgi.headers += "Import-Package" -> "sun.misc;resolution:=optional, *", @@ -388,7 +388,7 @@ lazy val reflect = configureAsSubproject(project) name := "scala-reflect", description := "Scala Reflection Library", Osgi.bundleName := "Scala Reflect", - scalacOptions in Compile in doc ++= Seq( + (Compile / doc / scalacOptions) ++= Seq( "-skip-packages", "scala.reflect.macros.internal:scala.reflect.internal:scala.reflect.io" ), Osgi.headers += @@ -433,42 +433,42 @@ lazy val compiler = configureAsSubproject(project) // for WhiteSource purposes; the JLine JAR contains a shaded jansi, but WhiteSource // won't know about that unless we tell it.) libraryDependencies ++= Seq(scalaXmlDep, jlineDep % "optional", jansiDep % "optional"), - buildCharacterPropertiesFile := (resourceManaged in Compile).value / "scala-buildcharacter.properties", - resourceGenerators in Compile += generateBuildCharacterPropertiesFile.map(file => Seq(file)).taskValue, + buildCharacterPropertiesFile := (Compile / resourceManaged).value / "scala-buildcharacter.properties", + (Compile / resourceGenerators) += generateBuildCharacterPropertiesFile.map(file => Seq(file)).taskValue, // this a way to make sure that classes from interactive and scaladoc projects // end up in compiler jar. note that we need to use LocalProject references // (with strings) to deal with mutual recursion - products in Compile in packageBin := - (products in Compile in packageBin).value ++ - Seq((dependencyClasspath in Compile).value.find(_.get(moduleID.key).map(id => (id.organization, id.name, id.revision)).contains((asmDep.organization, asmDep.name, asmDep.revision))).get.data) ++ - (products in Compile in packageBin in LocalProject("interactive")).value ++ - (products in Compile in packageBin in LocalProject("scaladoc")).value ++ - (products in Compile in packageBin in LocalProject("repl")).value ++ - (products in Compile in packageBin in LocalProject("repl-jline")).value ++ - (products in Compile in packageBin in LocalProject("repl-jline-embedded")).value, - includeFilter in unmanagedResources in Compile := + (Compile / packageBin / products) := + (Compile / packageBin / products).value ++ + Seq((Compile / dependencyClasspath).value.find(_.get(moduleID.key).map(id => (id.organization, id.name, id.revision)).contains((asmDep.organization, asmDep.name, asmDep.revision))).get.data) ++ + (LocalProject("interactive") / Compile / packageBin / products).value ++ + (LocalProject("scaladoc") / Compile / packageBin / products).value ++ + (LocalProject("repl") / Compile / packageBin / products).value ++ + (LocalProject("repl-jline") / Compile / packageBin / products).value ++ + (LocalProject("repl-jline-embedded") / Compile / packageBin / products).value, + (Compile / unmanagedResources / includeFilter) := "*.tmpl" | "*.xml" | "*.js" | "*.css" | "*.html" | "*.properties" | "*.swf" | "*.png" | "*.gif" | "*.gif" | "*.txt", // Also include the selected unmanaged resources and source files from the additional projects in the source JAR: - mappings in Compile in packageSrc ++= { - val base = (unmanagedResourceDirectories in Compile).value ++ - (unmanagedResourceDirectories in Compile in LocalProject("interactive")).value ++ - (unmanagedResourceDirectories in Compile in LocalProject("scaladoc")).value ++ - (unmanagedResourceDirectories in Compile in LocalProject("repl")).value - base ** ((includeFilter in unmanagedResources in Compile).value || "*.scala" || "*.psd" || "*.ai" || "*.java") pair Path.relativeTo(base) + (Compile / packageSrc / mappings) ++= { + val base = (Compile / unmanagedResourceDirectories).value ++ + (LocalProject("interactive") / Compile / unmanagedResourceDirectories).value ++ + (LocalProject("scaladoc") / Compile / unmanagedResourceDirectories).value ++ + (LocalProject("repl")/ Compile / unmanagedResourceDirectories).value + base ** ((Compile / unmanagedResources / includeFilter).value || "*.scala" || "*.psd" || "*.ai" || "*.java") pair Path.relativeTo(base) }, // Include the additional projects in the scaladoc JAR: - sources in Compile in doc ++= { + (Compile / doc / sources) ++= { val base = - (unmanagedSourceDirectories in Compile in LocalProject("interactive")).value ++ - (unmanagedSourceDirectories in Compile in LocalProject("scaladoc")).value ++ - (unmanagedSourceDirectories in Compile in LocalProject("repl")).value + (LocalProject("interactive") / Compile / unmanagedSourceDirectories).value ++ + (LocalProject("scaladoc") / Compile / unmanagedSourceDirectories).value ++ + (LocalProject("repl") / Compile / unmanagedSourceDirectories).value ((base ** ("*.scala" || "*.java")) --- (base ** "Scaladoc*ModelTest.scala") // exclude test classes that depend on partest ).get }, - scalacOptions in Compile in doc ++= Seq( - "-doc-root-content", (sourceDirectory in Compile).value + "/rootdoc.txt" + (Compile / doc / scalacOptions) ++= Seq( + "-doc-root-content", (Compile / sourceDirectory).value + "/rootdoc.txt" ), Osgi.headers ++= Seq( "Import-Package" -> ("jline.*;resolution:=optional," + @@ -481,7 +481,7 @@ lazy val compiler = configureAsSubproject(project) // Generate the ScriptEngineFactory service definition. The Ant build does this when building // the JAR but sbt has no support for it and it is easier to do as a resource generator: generateServiceProviderResources("javax.script.ScriptEngineFactory" -> "scala.tools.nsc.interpreter.Scripted$Factory"), - managedResourceDirectories in Compile := Seq((resourceManaged in Compile).value), + (Compile / managedResourceDirectories) := Seq((Compile / resourceManaged).value), fixPom( "/project/name" -> Scala Compiler, "/project/description" -> Compiler for the Scala Programming Language, @@ -505,8 +505,8 @@ lazy val repl = configureAsSubproject(project) .settings(disableDocs) .settings(disablePublishing) .settings( - connectInput in run := true, - run := (run in Compile).partialInput(" -usejavacp").evaluated // Automatically add this so that `repl/run` works without additional arguments. + (run / connectInput) := true, + run := (Compile / run).partialInput(" -usejavacp").evaluated // Automatically add this so that `repl/run` works without additional arguments. ) .dependsOn(compiler, interactive) @@ -529,12 +529,12 @@ lazy val replJlineEmbedded = Project("repl-jline-embedded", file(".") / "target" // quick/repl-jline and quick/repl-jline-shaded on the classpath for quick/bin scripts. // This is different from the Ant build where all parts are combined into quick/repl, but // it is cleaner because it avoids circular dependencies. - compile in Compile := (compile in Compile).dependsOn(Def.task { + (Compile / compile) := (Compile / compile).dependsOn(Def.task { import java.util.jar._ import collection.JavaConverters._ val inputs: Iterator[JarJar.Entry] = { - val repljlineClasses = (products in Compile in replJline).value.flatMap(base => Path.allSubpaths(base).map(x => (base, x._1))) - val jlineJAR = findJar((dependencyClasspath in Compile).value, jlineDep).get.data + val repljlineClasses = (replJline / Compile/ products).value.flatMap(base => Path.allSubpaths(base).map(x => (base, x._1))) + val jlineJAR = findJar((Compile / dependencyClasspath).value, jlineDep).get.data val jarFile = new JarFile(jlineJAR) val jarEntries = jarFile.entries.asScala.filterNot(_.isDirectory).map(entry => JarJar.JarEntryInput(jarFile, entry)) def compiledClasses = repljlineClasses.iterator.map { case (base, file) => JarJar.FileInput(base, file) } @@ -549,10 +549,10 @@ lazy val replJlineEmbedded = Project("repl-jline-embedded", file(".") / "target" Rule("scala.tools.nsc.interpreter.jline.**", "scala.tools.nsc.interpreter.jline_embedded.@1"), Keep("scala.tools.**") ) - val outdir = (classDirectory in Compile).value + val outdir = (Compile / classDirectory).value JarJar(inputs, outdir, config) }).value, - connectInput in run := true + (run / connectInput) := true ) .dependsOn(replJline) @@ -564,9 +564,9 @@ lazy val scaladoc = configureAsSubproject(project) name := "scala-compiler-doc", description := "Scala Documentation Generator", libraryDependencies ++= Seq(scalaXmlDep), - includeFilter in unmanagedResources in Compile := "*.html" | "*.css" | "*.gif" | "*.png" | "*.js" | "*.txt" | "*.svg" | "*.eot" | "*.woff" | "*.ttf", + (Compile / unmanagedResources / includeFilter) := "*.html" | "*.css" | "*.gif" | "*.png" | "*.js" | "*.txt" | "*.svg" | "*.eot" | "*.woff" | "*.ttf", libraryDependencies ++= ScaladocSettings.webjarResources, - resourceGenerators in Compile += ScaladocSettings.extractResourcesFromWebjar + (Compile / resourceGenerators) += ScaladocSettings.extractResourcesFromWebjar ) .dependsOn(compiler) @@ -574,14 +574,14 @@ lazy val scalap = configureAsSubproject(project) .settings( description := "Scala Bytecode Parser", // Include decoder.properties - includeFilter in unmanagedResources in Compile := "*.properties", + (Compile / unmanagedResources / includeFilter) := "*.properties", fixPom( "/project/name" -> Scalap, "/project/description" -> bytecode analysis tool, "/project/properties" -> scala.xml.Text("") ), headerLicense := Some(HeaderLicense.Custom( - s"""Scala classfile decoder (${(homepage in ThisBuild).value.get}) + s"""Scala classfile decoder (${(ThisBuild / homepage).value.get}) | |Copyright EPFL and Lightbend, Inc. | @@ -591,11 +591,11 @@ lazy val scalap = configureAsSubproject(project) |See the NOTICE file distributed with this work for |additional information regarding copyright ownership. |""".stripMargin)), - (headerSources in Compile) ~= { xs => + (Compile / headerSources) ~= { xs => val excluded = Set("Memoisable.scala", "Result.scala", "Rule.scala", "Rules.scala", "SeqRule.scala") xs filter { x => !excluded(x.getName) } }, - (headerResources in Compile) := Nil + (Compile / headerResources) := Nil ) .dependsOn(compiler) @@ -623,10 +623,10 @@ lazy val specLib = project.in(file("test") / "instrumented") .settings(disableDocs) .settings(disablePublishing) .settings( - sourceGenerators in Compile += Def.task { + (Compile / sourceGenerators) += Def.task { import scala.collection.JavaConverters._ - val srcBase = (sourceDirectories in Compile in library).value.head / "scala/runtime" - val targetBase = (sourceManaged in Compile).value / "scala/runtime" + val srcBase = (library / Compile / sourceDirectories).value.head / "scala/runtime" + val targetBase = (Compile / sourceManaged).value / "scala/runtime" def patch(srcFile: String, patchFile: String): File = try { val patchLines: List[String] = IO.readLines(baseDirectory.value / patchFile) val origLines: List[String] = IO.readLines(srcBase / srcFile) @@ -683,14 +683,14 @@ lazy val junit = project.in(file("test") / "junit") .settings(disableDocs) .settings(disablePublishing) .settings( - fork in Test := true, - javaOptions in Test ++= "-Xss1M" +: addOpensForTesting, - (forkOptions in Test) := (forkOptions in Test).value.withWorkingDirectory((baseDirectory in ThisBuild).value), - (forkOptions in Test in testOnly) := (forkOptions in Test in testOnly).value.withWorkingDirectory((baseDirectory in ThisBuild).value), + (Test / fork) := true, + (Test / javaOptions) ++= "-Xss1M" +: addOpensForTesting, + (Test / forkOptions) := (Test / forkOptions).value.withWorkingDirectory((ThisBuild / baseDirectory).value), + (Test / testOnly / forkOptions) := (Test / testOnly / forkOptions).value.withWorkingDirectory((ThisBuild / baseDirectory).value), libraryDependencies ++= Seq(junitDep, junitInterfaceDep, jolDep), testOptions += Tests.Argument(TestFrameworks.JUnit, "-a", "-v"), - unmanagedSourceDirectories in Compile := Nil, - unmanagedSourceDirectories in Test := List(baseDirectory.value) + (Compile / unmanagedSourceDirectories) := Nil, + (Test / unmanagedSourceDirectories) := List(baseDirectory.value) ) lazy val scalacheck = project.in(file("test") / "scalacheck") @@ -701,20 +701,20 @@ lazy val scalacheck = project.in(file("test") / "scalacheck") .settings(disablePublishing) .settings( // enable forking to workaround https://github.com/sbt/sbt/issues/4009 - fork in Test := true, - javaOptions in Test ++= "-Xss1M" +: addOpensForTesting, + (Test / fork) := true, + (Test / javaOptions) ++= "-Xss1M" +: addOpensForTesting, testOptions ++= { - if ((fork in Test).value) Nil + if ((Test / fork).value) Nil else List(Tests.Cleanup { loader => sbt.internal.inc.ModuleUtilities.getObject("scala.TestCleanup", loader).asInstanceOf[Runnable].run() }) }, libraryDependencies ++= Seq(scalacheckDep), - unmanagedSourceDirectories in Compile := Nil, - unmanagedSourceDirectories in Test := List(baseDirectory.value) + (Compile / unmanagedSourceDirectories) := Nil, + (Test / unmanagedSourceDirectories) := List(baseDirectory.value) ).settings( // Workaround for https://github.com/sbt/sbt/pull/3985 - List(Keys.test, Keys.testOnly).map(task => parallelExecution in task := false) : _* + List(Keys.test, Keys.testOnly).map(task => (task / parallelExecution) := false) : _* ) lazy val osgiTestFelix = osgiTestProject( @@ -732,8 +732,8 @@ def osgiTestProject(p: Project, framework: ModuleID) = p .settings(disableDocs) .settings(disablePublishing) .settings( - fork in Test := true, - parallelExecution in Test := false, + (Test / fork) := true, + (Test / parallelExecution) := false, libraryDependencies ++= { val paxExamVersion = "4.11.0" // Last version which supports Java 9+ Seq( @@ -750,23 +750,23 @@ def osgiTestProject(p: Project, framework: ModuleID) = p framework % "test" ) }, - Keys.test in Test := (Keys.test in Test).dependsOn(packageBin in Compile).value, - Keys.testOnly in Test := (Keys.testOnly in Test).dependsOn(packageBin in Compile).evaluated, + (Test / Keys.test) := (Test / Keys.test).dependsOn((Compile / packageBin)).value, + (Test / Keys.testOnly) := (Test / Keys.testOnly).dependsOn((Compile / packageBin)).evaluated, testOptions += Tests.Argument(TestFrameworks.JUnit, "-a", "-v", "-q"), - javaOptions in Test ++= ("-Dscala.bundle.dir=" + (buildDirectory in ThisBuild).value / "osgi") +: addOpensForTesting, - (forkOptions in Test in test) := (forkOptions in Test in test).value.withWorkingDirectory((baseDirectory in ThisBuild).value), - unmanagedSourceDirectories in Test := List((baseDirectory in ThisBuild).value / "test" / "osgi" / "src"), - unmanagedResourceDirectories in Compile := (unmanagedSourceDirectories in Test).value, - includeFilter in unmanagedResources in Compile := "*.xml", - packageBin in Compile := { // Put the bundle JARs required for the tests into build/osgi - val targetDir = (buildDirectory in ThisBuild).value / "osgi" - val mappings = ((mkPack in dist).value / "lib").listFiles.collect { + (Test / javaOptions) ++= ("-Dscala.bundle.dir=" + (ThisBuild / buildDirectory).value / "osgi") +: addOpensForTesting, + (test / Test / forkOptions) := (test / Test / forkOptions).value.withWorkingDirectory((ThisBuild / baseDirectory).value), + (Test / unmanagedSourceDirectories) := List((ThisBuild / baseDirectory).value / "test" / "osgi" / "src"), + (Compile / unmanagedResourceDirectories) := (Test / unmanagedSourceDirectories).value, + (Compile / unmanagedResources / includeFilter) := "*.xml", + (Compile / packageBin) := { // Put the bundle JARs required for the tests into build/osgi + val targetDir = (ThisBuild / buildDirectory).value / "osgi" + val mappings = ((dist / mkPack).value / "lib").listFiles.collect { case f if f.getName.startsWith("scala-") && f.getName.endsWith(".jar") => (f, targetDir / f.getName) } IO.copy(mappings, CopyOptions() withOverwrite true) targetDir }, - cleanFiles += (buildDirectory in ThisBuild).value / "osgi" + cleanFiles += (ThisBuild / buildDirectory).value / "osgi" ) lazy val partestJavaAgent = Project("partest-javaagent", file(".") / "src" / "partest-javaagent") @@ -781,7 +781,7 @@ lazy val partestJavaAgent = Project("partest-javaagent", file(".") / "src" / "pa name := "scala-partest-javaagent", description := "Scala Compiler Testing Tool (compiler-specific java agent)", // add required manifest entry - previously included from file - packageOptions in (Compile, packageBin) += + (Compile / packageBin / packageOptions) += Package.ManifestAttributes( "Premain-Class" -> "scala.tools.partest.javaagent.ProfilingAgent" ), // we need to build this to a JAR exportJars := true @@ -798,29 +798,29 @@ lazy val test = project .settings( libraryDependencies ++= Seq(asmDep, scalaXmlDep), // no main sources - sources in Compile := Seq.empty, + (Compile / sources) := Seq.empty, // test sources are compiled in partest run, not here - sources in IntegrationTest := Seq.empty, - fork in IntegrationTest := true, + (IntegrationTest / sources) := Seq.empty, + (IntegrationTest / fork) := true, // enable this in 2.13, when tests pass //scalacOptions in Compile += "-Yvalidate-pos:parser,typer", - scalacOptions in Compile -= "-Ywarn-unused:imports", - javaOptions in IntegrationTest ++= List("-Xmx2G", "-Dpartest.exec.in.process=true", "-Dfile.encoding=UTF-8", "-Duser.language=en", "-Duser.country=US") ++ addOpensForTesting, - testOptions in IntegrationTest += Tests.Argument("-Dfile.encoding=UTF-8", "-Duser.language=en", "-Duser.country=US"), + (Compile / scalacOptions) -= "-Ywarn-unused:imports", + (IntegrationTest / javaOptions) ++= List("-Xmx2G", "-Dpartest.exec.in.process=true", "-Dfile.encoding=UTF-8", "-Duser.language=en", "-Duser.country=US") ++ addOpensForTesting, + (IntegrationTest / testOptions) += Tests.Argument("-Dfile.encoding=UTF-8", "-Duser.language=en", "-Duser.country=US"), testFrameworks += new TestFramework("scala.tools.partest.sbt.Framework"), - testOptions in IntegrationTest += Tests.Argument(s"""-Dpartest.java_opts=-Xmx1024M -Xms64M ${addOpensForTesting.mkString(" ")}"""), - testOptions in IntegrationTest += Tests.Argument("-Dpartest.scalac_opts=" + (scalacOptions in Compile).value.mkString(" ")), - (forkOptions in IntegrationTest) := (forkOptions in IntegrationTest).value.withWorkingDirectory((baseDirectory in ThisBuild).value), - testOptions in IntegrationTest += { - val cp = (dependencyClasspath in Test).value - val baseDir = (baseDirectory in ThisBuild).value - val instrumentedJar = (packagedArtifact in (LocalProject("specLib"), Compile, packageBin)).value._2 + (IntegrationTest / testOptions) += Tests.Argument(s"""-Dpartest.java_opts=-Xmx1024M -Xms64M ${addOpensForTesting.mkString(" ")}"""), + (IntegrationTest / testOptions) += Tests.Argument("-Dpartest.scalac_opts=" + (Compile / scalacOptions).value.mkString(" ")), + (IntegrationTest / forkOptions) := (IntegrationTest / forkOptions).value.withWorkingDirectory((ThisBuild / baseDirectory).value), + (IntegrationTest / testOptions) += { + val cp = (Test / dependencyClasspath).value + val baseDir = (ThisBuild / baseDirectory).value + val instrumentedJar = (LocalProject("specLib") / Compile / packageBin / packagedArtifact).value._2 Tests.Setup { () => // Copy code.jar (resolved in the otherwise unused scope "test") and instrumented.jar (from specLib)to the location where partest expects them IO.copyFile(instrumentedJar, baseDir / "test/files/speclib/instrumented.jar") } }, - definedTests in IntegrationTest += new sbt.TestDefinition( + (IntegrationTest / definedTests) += new sbt.TestDefinition( "partest", // marker fingerprint since there are no test classes // to be discovered by sbt: @@ -829,10 +829,10 @@ lazy val test = project def annotationName = "partest" }, true, Array() ), - executeTests in IntegrationTest := { + (IntegrationTest / executeTests) := { val log = streams.value.log - val result = (executeTests in IntegrationTest).value - val result2 = (executeTests in Test).value + val result = (IntegrationTest / executeTests).value + val result2 = (Test / executeTests).value if (result.overall != TestResult.Error && result.events.isEmpty) { // workaround for https://github.com/sbt/sbt/issues/2722 log.error("No test events found") @@ -840,7 +840,7 @@ lazy val test = project } else result }, - testListeners in IntegrationTest += new PartestTestListener(target.value) + (IntegrationTest / testListeners) += new PartestTestListener(target.value) ) lazy val manual = configureAsSubproject(project) @@ -848,7 +848,7 @@ lazy val manual = configureAsSubproject(project) .settings(disablePublishing) .settings( libraryDependencies ++= Seq(scalaXmlDep, antDep, "org.scala-lang" % "scala-library" % scalaVersion.value), - classDirectory in Compile := (target in Compile).value / "classes" + (Compile / classDirectory) := (Compile / target).value / "classes" ) lazy val libraryAll = Project("library-all", file(".") / "target" / "library-all-src-dummy") @@ -856,8 +856,8 @@ lazy val libraryAll = Project("library-all", file(".") / "target" / "library-all .settings(disableDocs) .settings( name := "scala-library-all", - publishArtifact in (Compile, packageBin) := false, - publishArtifact in (Compile, packageSrc) := false, + (Compile / packageBin / publishArtifact) := false, + (Compile / packageSrc / publishArtifact) := false, libraryDependencies ++= Seq(scalaXmlDep, scalaParserCombinatorsDep, scalaSwingDep), apiURL := None, fixPom( @@ -871,26 +871,26 @@ lazy val scalaDist = Project("scala-dist", file(".") / "target" / "scala-dist-di .settings(commonSettings) .settings(disableDocs) .settings( - mappings in Compile in packageBin ++= { + (Compile / packageBin / mappings) ++= { val binBaseDir = buildDirectory.value / "pack" - val binMappings = (mkBin in dist).value.pair(Path.relativeTo(binBaseDir), errorIfNone = false) + val binMappings = (dist / mkBin).value.pair(Path.relativeTo(binBaseDir), errorIfNone = false) // With the way the resource files are spread out over the project sources we can't just add // an unmanagedResourceDirectory, so we generate the mappings manually: - val docBaseDir = (baseDirectory in ThisBuild).value + val docBaseDir = (ThisBuild / baseDirectory).value val docMappings = (docBaseDir / "doc").allPaths pair Path.relativeTo(docBaseDir) - val resBaseDir = (baseDirectory in ThisBuild).value / "src/manual/scala/tools/docutil/resources" + val resBaseDir = (ThisBuild / baseDirectory).value / "src/manual/scala/tools/docutil/resources" val resMappings = resBaseDir ** ("*.html" | "*.css" | "*.gif" | "*.png") pair (p => Path.relativeTo(resBaseDir)(p).map("doc/tools/" + _)) docMappings ++ resMappings ++ binMappings }, - resourceGenerators in Compile += Def.task { + (Compile / resourceGenerators) += Def.task { val command = "fsc, scala, scalac, scaladoc, scalap" - val htmlOut = (resourceManaged in Compile).value / "doc/tools" - val manOut = (resourceManaged in Compile).value / "genman" - val fixedManOut = (resourceManaged in Compile).value / "man" + val htmlOut = (Compile / resourceManaged).value / "doc/tools" + val manOut = (Compile / resourceManaged).value / "genman" + val fixedManOut = (Compile / resourceManaged).value / "man" IO.createDirectory(htmlOut) IO.createDirectory(manOut / "man1") runner.value.run("scala.tools.docutil.ManMaker", - (fullClasspath in Compile in manual).value.files, + (manual / Compile / fullClasspath).value.files, Seq(command, htmlOut.getAbsolutePath, manOut.getAbsolutePath), streams.value.log).failed foreach (sys error _.getMessage) (manOut ** "*.1" pair Path.rebase(manOut, fixedManOut)).foreach { case (in, out) => @@ -901,7 +901,7 @@ lazy val scalaDist = Project("scala-dist", file(".") / "target" / "scala-dist-di } (htmlOut ** "*.html").get ++ (fixedManOut ** "*.1").get }.taskValue, - managedResourceDirectories in Compile := Seq((resourceManaged in Compile).value), + (Compile / managedResourceDirectories) := Seq((Compile / resourceManaged).value), libraryDependencies += jlineDep, apiURL := None, fixPom( @@ -909,7 +909,7 @@ lazy val scalaDist = Project("scala-dist", file(".") / "target" / "scala-dist-di "/project/description" -> The Artifacts Distributed with Scala, "/project/packaging" -> jar ), - publishArtifact in (Compile, packageSrc) := false + (Compile / packageSrc / publishArtifact) := false ) .dependsOn(libraryAll, compiler, scalap) @@ -920,7 +920,7 @@ lazy val root: Project = (project in file(".")) .settings( commands ++= ScriptCommands.all, extractBuildCharacterPropertiesFile := { - val jar = (scalaInstance in bootstrap).value.allJars.find(_.getName contains "-compiler").get + val jar = (bootstrap / scalaInstance).value.allJars.find(_.getName contains "-compiler").get val bc = buildCharacterPropertiesFile.value val packagedName = "scala-buildcharacter.properties" IO.withTemporaryDirectory { tmp => @@ -936,7 +936,7 @@ lazy val root: Project = (project in file(".")) // source links (could be fixed by shipping these sources with the scaladoc bundles) and scala-js source maps // rely on them being on github. commands += Command.command("generateSources") { state => - val dir = (((baseDirectory in ThisBuild).value) / "src" / "library" / "scala") + val dir = (((ThisBuild / baseDirectory).value) / "src" / "library" / "scala") genprod.main(Array(dir.getPath)) GenerateAnyVals.run(dir.getAbsoluteFile) state @@ -944,24 +944,24 @@ lazy val root: Project = (project in file(".")) testAll := { val results = ScriptCommands.sequence[(Result[Unit], String)](List( - (Keys.test in Test in junit).result map (_ -> "junit/test"), - (Keys.test in Test in scalacheck).result map (_ -> "scalacheck/test"), - (testOnly in IntegrationTest in testP).toTask(" -- run").result map (_ -> "partest run"), - (testOnly in IntegrationTest in testP).toTask(" -- pos neg jvm").result map (_ -> "partest pos neg jvm"), - (testOnly in IntegrationTest in testP).toTask(" -- res scalap specialized").result map (_ -> "partest res scalap specialized"), - (testOnly in IntegrationTest in testP).toTask(" -- instrumented presentation").result map (_ -> "partest instrumented presentation"), - (testOnly in IntegrationTest in testP).toTask(" -- --srcpath scaladoc").result map (_ -> "partest --srcpath scaladoc"), - (testOnly in IntegrationTest in testP).toTask(" -- --srcpath async").result map (_ -> "partest --srcpath async"), - (Keys.test in Test in osgiTestFelix).result map (_ -> "osgiTestFelix/test"), - (Keys.test in Test in osgiTestEclipse).result map (_ -> "osgiTestEclipse/test"), + (junit / Test / Keys.test).result map (_ -> "junit/test"), + (scalacheck / Test / Keys.test).result map (_ -> "scalacheck/test"), + (testP / IntegrationTest / testOnly).toTask(" -- run").result map (_ -> "partest run"), + (testP / IntegrationTest / testOnly).toTask(" -- pos neg jvm").result map (_ -> "partest pos neg jvm"), + (testP / IntegrationTest / testOnly).toTask(" -- res scalap specialized").result map (_ -> "partest res scalap specialized"), + (testP / IntegrationTest / testOnly).toTask(" -- instrumented presentation").result map (_ -> "partest instrumented presentation"), + (testP / IntegrationTest / testOnly).toTask(" -- --srcpath scaladoc").result map (_ -> "partest --srcpath scaladoc"), + (testP / IntegrationTest / testOnly).toTask(" -- --srcpath async").result map (_ -> "partest --srcpath async"), + (osgiTestFelix / Test / Keys.test).result map (_ -> "osgiTestFelix/test"), + (osgiTestEclipse / Test / Keys.test).result map (_ -> "osgiTestEclipse/test"), (library / mimaReportBinaryIssues).result.map(_ -> "library/mimaReportBinaryIssues"), // doesn't aggregate.. (reflect / mimaReportBinaryIssues).result.map(_ -> "reflect/mimaReportBinaryIssues"), // ..so specify both - (compile in Compile in bench).map(_ => ()).result map (_ -> "bench/compile"), + (bench / Compile / compile).map(_ => ()).result map (_ -> "bench/compile"), Def.task(()).dependsOn( // Run these in parallel: - doc in Compile in library, - doc in Compile in reflect, - doc in Compile in compiler, - doc in Compile in scalap + (library / Compile / doc), + (reflect / Compile / doc), + (compiler / Compile / doc), + (scalap / Compile / doc) ).result map (_ -> "doc") )).value val log = streams.value.log @@ -1008,7 +1008,7 @@ lazy val root: Project = (project in file(".")) ) .aggregate(library, reflect, compiler, compilerOptionsExporter, interactive, repl, replJline, replJlineEmbedded, scaladoc, scalap, partest, junit, libraryAll, scalaDist).settings( - sources in Compile := Seq.empty, + (Compile / sources) := Seq.empty, onLoadMessage := s"""|*** Welcome to the sbt build definition for Scala! *** |version=${(Global / version).value} scalaVersion=${(Global / scalaVersion).value} |Check README.md for more information.""".stripMargin @@ -1028,33 +1028,33 @@ lazy val dist = (project in file("dist")) libraryDependencies ++= Seq(scalaSwingDep, jlineDep), mkBin := mkBinImpl.value, mkQuick := Def.task { - val cp = (fullClasspath in IntegrationTest in LocalProject("test")).value - val propsFile = (buildDirectory in ThisBuild).value / "quick" / "partest.properties" + val cp = (LocalProject("test") / IntegrationTest / fullClasspath).value + val propsFile = (ThisBuild / buildDirectory).value / "quick" / "partest.properties" val props = new java.util.Properties() props.setProperty("partest.classpath", cp.map(_.data.getAbsolutePath).mkString(sys.props("path.separator"))) IO.write(props, null, propsFile) - (buildDirectory in ThisBuild).value / "quick" - }.dependsOn((distDependencies.map(products in Runtime in _) :+ mkBin): _*).value, - mkPack := Def.task { (buildDirectory in ThisBuild).value / "pack" }.dependsOn(packagedArtifact in (Compile, packageBin), mkBin).value, - target := (baseDirectory in ThisBuild).value / "target" / thisProject.value.id, - packageBin in Compile := { + (ThisBuild / buildDirectory).value / "quick" + }.dependsOn((distDependencies.map((_ / Runtime / products)) :+ mkBin): _*).value, + mkPack := Def.task { (ThisBuild / buildDirectory).value / "pack" }.dependsOn((Compile / packageBin / packagedArtifact), mkBin).value, + target := (ThisBuild / baseDirectory).value / "target" / thisProject.value.id, + (Compile / packageBin) := { val extraDeps = Set(scalaSwingDep, scalaParserCombinatorsDep, scalaXmlDep) - val targetDir = (buildDirectory in ThisBuild).value / "pack" / "lib" + val targetDir = (ThisBuild / buildDirectory).value / "pack" / "lib" def uniqueModule(m: ModuleID) = (m.organization, m.name.replaceFirst("_.*", "")) val extraModules = extraDeps.map(uniqueModule) - val extraJars = (externalDependencyClasspath in Compile).value.map(a => (a.get(moduleID.key), a.data)).collect { + val extraJars = (Compile / externalDependencyClasspath).value.map(a => (a.get(moduleID.key), a.data)).collect { case (Some(m), f) if extraModules contains uniqueModule(m) => f } - val jlineJAR = findJar((dependencyClasspath in Compile).value, jlineDep).get.data + val jlineJAR = findJar((Compile / dependencyClasspath).value, jlineDep).get.data val mappings = extraJars.map(f => (f, targetDir / f.getName)) :+ ((jlineJAR, targetDir / "jline.jar")) IO.copy(mappings, CopyOptions() withOverwrite true) targetDir }, - cleanFiles += (buildDirectory in ThisBuild).value / "quick", - cleanFiles += (buildDirectory in ThisBuild).value / "pack", - packagedArtifact in (Compile, packageBin) := - (packagedArtifact in (Compile, packageBin)) - .dependsOn(distDependencies.map(packagedArtifact in (Compile, packageBin) in _): _*) + cleanFiles += (ThisBuild / buildDirectory).value / "quick", + cleanFiles += (ThisBuild / buildDirectory).value / "pack", + (Compile / packageBin / packagedArtifact) := + (Compile / packageBin / packagedArtifact) + .dependsOn(distDependencies.map((_ / Runtime / packageBin/ packagedArtifact)): _*) .value ) .dependsOn(distDependencies.map(p => p: ClasspathDep[ProjectReference]): _*) @@ -1099,7 +1099,7 @@ lazy val mkBinImpl: Def.Initialize[Task[Seq[File]]] = Def.task { properties = Map.empty, javaOpts = "-Xmx256M -Xms32M", toolFlags = "") - val rootDir = (classDirectory in Compile in compiler).value + val rootDir = (compiler / Compile / classDirectory).value val quickOutDir = buildDirectory.value / "quick/bin" val packOutDir = buildDirectory.value / "pack/bin" def writeScripts(scalaTool: ScalaTool, file: String, outDir: File): Seq[File] = { @@ -1122,32 +1122,32 @@ lazy val mkBinImpl: Def.Initialize[Task[Seq[File]]] = Def.task { streams.value.log.info(s"Creating scripts in $quickOutDir and $packOutDir") - mkBin("scala" , "scala.tools.nsc.MainGenericRunner", (fullClasspath in Compile in replJlineEmbedded).value) ++ - mkBin("scalac" , "scala.tools.nsc.Main", (fullClasspath in Compile in compiler).value) ++ - mkBin("fsc" , "scala.tools.nsc.CompileClient", (fullClasspath in Compile in compiler).value) ++ - mkBin("scaladoc" , "scala.tools.nsc.ScalaDoc", (fullClasspath in Compile in scaladoc).value) ++ - mkBin("scalap" , "scala.tools.scalap.Main", (fullClasspath in Compile in scalap).value) + mkBin("scala" , "scala.tools.nsc.MainGenericRunner", (replJlineEmbedded / Compile / fullClasspath).value) ++ + mkBin("scalac" , "scala.tools.nsc.Main", (compiler / Compile / fullClasspath).value) ++ + mkBin("fsc" , "scala.tools.nsc.CompileClient", (compiler / Compile / fullClasspath).value) ++ + mkBin("scaladoc" , "scala.tools.nsc.ScalaDoc", (scaladoc / Compile / fullClasspath).value) ++ + mkBin("scalap" , "scala.tools.scalap.Main", (scalap / Compile / fullClasspath).value) } /** Generate service provider definition files under META-INF/services */ def generateServiceProviderResources(services: (String, String)*): Setting[_] = - resourceGenerators in Compile += Def.task { + (Compile / resourceGenerators) += Def.task { services.map { case (k, v) => - val f = (resourceManaged in Compile).value / "META-INF/services" / k + val f = (Compile / resourceManaged).value / "META-INF/services" / k IO.write(f, v + "\n") f } }.taskValue -buildDirectory in ThisBuild := (baseDirectory in ThisBuild).value / "build" +(ThisBuild / buildDirectory) := (ThisBuild / baseDirectory).value / "build" // Add tab completion to partest -commands += Command("partest")(_ => PartestUtil.partestParser((baseDirectory in ThisBuild).value, (baseDirectory in ThisBuild).value / "test")) { (state, parsed) => +commands += Command("partest")(_ => PartestUtil.partestParser((ThisBuild / baseDirectory).value, (ThisBuild / baseDirectory).value / "test")) { (state, parsed) => ("test/it:testOnly -- " + parsed) :: state } // Watch the test files also so ~partest triggers on test case changes -watchSources ++= PartestUtil.testFilePaths((baseDirectory in ThisBuild).value, (baseDirectory in ThisBuild).value / "test") +watchSources ++= PartestUtil.testFilePaths((ThisBuild / baseDirectory).value, (ThisBuild / baseDirectory).value / "test") // Add tab completion to scalac et al. commands ++= { @@ -1158,7 +1158,7 @@ commands ++= { commands.map { case (entryPoint, projectRef, mainClassName) => - Command(entryPoint)(_ => ScalaOptionParser.scalaParser(entryPoint, (baseDirectory in ThisBuild).value)) { (state, parsedOptions) => + Command(entryPoint)(_ => ScalaOptionParser.scalaParser(entryPoint, (ThisBuild / baseDirectory).value)) { (state, parsedOptions) => (projectRef + "/runMain " + mainClassName + " -usejavacp " + parsedOptions) :: state } } @@ -1168,7 +1168,7 @@ addCommandAlias("scalap", "scalap/compile:runMain scala.tools.sca lazy val intellij = taskKey[Unit]("Update the library classpaths in the IntelliJ project files.") -def moduleDeps(p: Project, config: Configuration = Compile) = (externalDependencyClasspath in config in p).map(a => (p.id, a.map(_.data))) +def moduleDeps(p: Project, config: Configuration = Compile) = (p / config / externalDependencyClasspath).map(a => (p.id, a.map(_.data))) // aliases to projects to prevent name clashes def compilerP = compiler @@ -1179,7 +1179,7 @@ intellij := { import xml.transform._ val s = streams.value - val compilerScalaInstance = (scalaInstance in LocalProject("compiler")).value + val compilerScalaInstance = (LocalProject("compiler") / scalaInstance).value val modules: List[(String, Seq[File])] = { // for the sbt build module, the dependencies are fetched from the project's build using sbt-buildinfo @@ -1261,11 +1261,11 @@ intellij := { r } - val intellijDir = (baseDirectory in ThisBuild).value / "src/intellij" + val intellijDir = (ThisBuild / baseDirectory).value / "src/intellij" val ipr = intellijDir / "scala.ipr" backupIdea(intellijDir) if (!ipr.exists) { - intellijCreateFromSample((baseDirectory in ThisBuild).value) + intellijCreateFromSample((ThisBuild / baseDirectory).value) } s.log.info("Updating library classpaths in src/intellij/scala.ipr.") val content = XML.loadFile(ipr) @@ -1302,10 +1302,10 @@ def backupIdea(ideaDir: File): Unit = { intellijFromSample := { val s = streams.value - val intellijDir = (baseDirectory in ThisBuild).value / "src/intellij" + val intellijDir = (ThisBuild / baseDirectory).value / "src/intellij" val ipr = intellijDir / "scala.ipr" backupIdea(intellijDir) - intellijCreateFromSample((baseDirectory in ThisBuild).value) + intellijCreateFromSample((ThisBuild / baseDirectory).value) } def intellijCreateFromSample(basedir: File): Unit = { @@ -1318,7 +1318,7 @@ lazy val intellijToSample = taskKey[Unit]("Update src/intellij/*.SAMPLE using th intellijToSample := { val s = streams.value - val intellijDir = (baseDirectory in ThisBuild).value / "src/intellij" + val intellijDir = (ThisBuild / baseDirectory).value / "src/intellij" val ipr = intellijDir / "scala.ipr" backupIdea(intellijDir) val existing =intellijDir * "*.SAMPLE" @@ -1339,6 +1339,8 @@ whitesourceProduct := "Lightbend Reactive Platform" whitesourceAggregateProjectName := "scala-2.12-stable" whitesourceIgnoredScopes := Vector("test", "scala-tool") +Global / excludeLintKeys := (Global / excludeLintKeys).value ++ Set(scalaSource, javaSource, resourceDirectory) + { scala.build.TravisOutput.installIfOnTravis() Nil diff --git a/project/AutomaticModuleName.scala b/project/AutomaticModuleName.scala index 8a70c67adae6..9e9bb74ea3bc 100644 --- a/project/AutomaticModuleName.scala +++ b/project/AutomaticModuleName.scala @@ -15,7 +15,7 @@ object AutomaticModuleName { def settings(name: String): Seq[Def.Setting[_]] = { val pair = ("Automatic-Module-Name" -> name) Seq( - packageOptions in (Compile, packageBin) += Package.ManifestAttributes(pair), + (Compile / packageBin / packageOptions) += Package.ManifestAttributes(pair), Osgi.headers += pair ) } diff --git a/project/JitWatch.scala b/project/JitWatch.scala index 8bd483cc618f..84037d6067e8 100644 --- a/project/JitWatch.scala +++ b/project/JitWatch.scala @@ -34,14 +34,14 @@ object JitWatchFilePlugin extends AutoPlugin { // Transitive sources from the projects that contribute to this classpath. val projects: Seq[ProjectRef] = buildDependencies.value.classpathTransitiveRefs(thisProjectRef.value) :+ thisProjectRef.value - val projectArtifacts: Map[ProjectRef, Seq[Artifact]] = projects.map(project => (project -> (Keys.artifacts in project get settingsData.value).getOrElse(Nil))).toMap - val artifactNameToProject: Map[String, Seq[ProjectRef]] = projects.groupBy(project => (Keys.name in project get settingsData.value).getOrElse("")) + val projectArtifacts: Map[ProjectRef, Seq[Artifact]] = projects.map(project => (project -> ((project / Keys.artifacts) get settingsData.value).getOrElse(Nil))).toMap + val artifactNameToProject: Map[String, Seq[ProjectRef]] = projects.groupBy(project => ((project / Keys.name) get settingsData.value).getOrElse("")) val transitiveSourceDirectories = projects.flatMap { project => - val projectArtifacts: Seq[Artifact] = (Keys.artifacts in project get settingsData.value).getOrElse(Nil) + val projectArtifacts: Seq[Artifact] = ((project / Keys.artifacts) get settingsData.value).getOrElse(Nil) val matching = projectArtifacts.filter(artifacts.contains(_)) val configs = matching.flatMap(artifact => artifact.configurations).distinct val sourceDirectories: Seq[File] = configs.flatMap { configRef => - (Keys.sourceDirectories in project in sbt.Configuration.of(configRef.name.capitalize, configRef.name)).get(settingsData.value).toList.flatten + (project / sbt.Configuration.of(configRef.name.capitalize, configRef.name) / Keys.sourceDirectories).get(settingsData.value).toList.flatten } sourceDirectories }.distinct @@ -50,7 +50,7 @@ object JitWatchFilePlugin extends AutoPlugin { projects.flatMap { project: ProjectRef => val configs = artifact.configurations val sourceDirectories: Seq[File] = configs.toList.flatMap { configRef => - (Keys.sourceDirectories in project in sbt.Configuration.of(configRef.name.capitalize, configRef.name)).get(settingsData.value).toList.flatten + (project / sbt.Configuration.of(configRef.name.capitalize, configRef.name) / Keys.sourceDirectories).get(settingsData.value).toList.flatten } sourceDirectories } diff --git a/project/License.scala b/project/License.scala index 13c8a2778544..baa5fded751c 100644 --- a/project/License.scala +++ b/project/License.scala @@ -10,7 +10,7 @@ object License extends AutoPlugin { override def projectSettings: Seq[Def.Setting[_]] = List(packageSrc, packageBin, packageDoc) - .map(pkg => mappings in (Compile, pkg) ++= licenseMapping.value) + .map(pkg => (Compile / pkg / mappings) ++= licenseMapping.value) override def buildSettings: Seq[Def.Setting[_]] = Seq( licenseMapping := List("LICENSE", "NOTICE").map(fn => (baseDirectory.value / fn) -> fn) diff --git a/project/Osgi.scala b/project/Osgi.scala index c77866145505..636a7f1e8aba 100644 --- a/project/Osgi.scala +++ b/project/Osgi.scala @@ -38,14 +38,14 @@ object Osgi { }, jarlist := false, bundle := Def.task { - val cp = (products in Compile in packageBin).value + val cp = (Compile / packageBin / products).value val licenseFiles = License.licenseMapping.value.map(_._1) bundleTask(headers.value.toMap, jarlist.value, cp, - (artifactPath in (Compile, packageBin)).value, cp ++ licenseFiles, streams.value) + (Compile / packageBin / artifactPath).value, cp ++ licenseFiles, streams.value) }.value, - packagedArtifact in (Compile, packageBin) := (((artifact in (Compile, packageBin)).value, bundle.value)), + (Compile / packageBin / packagedArtifact) := (((Compile / packageBin / artifact).value, bundle.value)), // Also create OSGi source bundles: - packageOptions in (Compile, packageSrc) += Package.ManifestAttributes( + (Compile / packageSrc / packageOptions) += Package.ManifestAttributes( "Bundle-Name" -> (description.value + " Sources"), "Bundle-SymbolicName" -> (bundleSymbolicName.value + ".source"), "Bundle-Version" -> versionProperties.value.osgiVersion, diff --git a/project/ScaladocSettings.scala b/project/ScaladocSettings.scala index 634b3856a847..1ac6ed7a1916 100644 --- a/project/ScaladocSettings.scala +++ b/project/ScaladocSettings.scala @@ -15,7 +15,7 @@ object ScaladocSettings { s.get(artifact.key).isDefined && s.get(moduleID.key).exists(_.organization == "org.webjars") val dest = (resourceManaged.value / "webjars").getAbsoluteFile IO.createDirectory(dest) - val classpathes = (dependencyClasspath in Compile).value + val classpathes = (Compile / dependencyClasspath).value val files: Seq[File] = classpathes.filter(isWebjar).flatMap { classpathEntry => val jarFile = classpathEntry.data IO.unzip(jarFile, dest) diff --git a/project/ScriptCommands.scala b/project/ScriptCommands.scala index 12cd37e34156..8716be837e4a 100644 --- a/project/ScriptCommands.scala +++ b/project/ScriptCommands.scala @@ -26,7 +26,7 @@ object ScriptCommands { * The optional argument is the Artifactory snapshot repository URL. */ def setupPublishCoreNonOpt = setup("setupPublishCoreNonOpt") { args => Seq( - baseVersionSuffix in Global := "SHA-SNAPSHOT" + (Global / baseVersionSuffix) := "SHA-SNAPSHOT" ) ++ (args match { case Seq(url) => publishTarget(url) case Nil => Nil @@ -37,7 +37,7 @@ object ScriptCommands { * The optional argument is the Artifactory snapshot repository URL. */ def setupPublishCore = setup("setupPublishCore") { args => Seq( - baseVersionSuffix in Global := "SHA-SNAPSHOT" + (Global / baseVersionSuffix) := "SHA-SNAPSHOT" ) ++ (args match { case Seq(url) => publishTarget(url) case Nil => Nil @@ -48,9 +48,9 @@ object ScriptCommands { * The optional argument is the Artifactory snapshot repository URL. */ def setupValidateTest = setup("setupValidateTest") { args => Seq( - testOptions in IntegrationTest in LocalProject("test") ++= Seq(Tests.Argument("--show-log"), Tests.Argument("--show-diff")) + LocalProject("test") / IntegrationTest / testOptions ++= Seq(Tests.Argument("--show-log"), Tests.Argument("--show-diff")) ) ++ (args match { - case Seq(url) => Seq(resolvers in Global += "scala-pr" at url) + case Seq(url) => Seq((Global / resolvers) += "scala-pr" at url) case Nil => Nil }) ++ enableOptimizer } @@ -61,8 +61,8 @@ object ScriptCommands { def setupBootstrapStarr = setup("setupBootstrapStarr") { case Seq(fileOrUrl, ver) => val url = fileToUrl(fileOrUrl) Seq( - baseVersion in Global := ver, - baseVersionSuffix in Global := "SPLIT" + (Global / baseVersion) := ver, + (Global / baseVersionSuffix) := "SPLIT" ) ++ publishTarget(url) ++ noDocs ++ enableOptimizer } @@ -72,9 +72,9 @@ object ScriptCommands { def setupBootstrapLocker = setup("setupBootstrapLocker") { case Seq(fileOrUrl, ver) => val url = fileToUrl(fileOrUrl) Seq( - baseVersion in Global := ver, - baseVersionSuffix in Global := "SPLIT", - resolvers in Global += "scala-pr" at url + (Global / baseVersion) := ver, + (Global / baseVersionSuffix) := "SPLIT", + (Global / resolvers) += "scala-pr" at url ) ++ publishTarget(url) ++ noDocs ++ enableOptimizer } @@ -88,10 +88,10 @@ object ScriptCommands { val targetUrl = fileToUrl(targetFileOrUrl) val resolverUrl = fileToUrl(resolverFileOrUrl) Seq( - baseVersion in Global := ver, - baseVersionSuffix in Global := "SPLIT", - resolvers in Global += "scala-pr" at resolverUrl, - testOptions in IntegrationTest in LocalProject("test") ++= Seq(Tests.Argument("--show-log"), Tests.Argument("--show-diff")) + (Global / baseVersion) := ver, + (Global / baseVersionSuffix) := "SPLIT", + (Global / resolvers) += "scala-pr" at resolverUrl, + LocalProject("test") / IntegrationTest / testOptions ++= Seq(Tests.Argument("--show-log"), Tests.Argument("--show-diff")) ) ++ publishTarget(targetUrl) ++ enableOptimizer } @@ -102,11 +102,11 @@ object ScriptCommands { def setupBootstrapPublish = setup("setupBootstrapPublish") { case Seq(fileOrUrl, ver) => val url = fileToUrl(fileOrUrl) Seq( - baseVersion in Global := ver, - baseVersionSuffix in Global := "SPLIT", - resolvers in Global += "scala-pr" at url, - publishTo in Global := Some("sonatype-releases" at "https://oss.sonatype.org/service/local/staging/deploy/maven2"), - credentials in Global ++= { + (Global / baseVersion) := ver, + (Global / baseVersionSuffix) := "SPLIT", + (Global / resolvers) += "scala-pr" at url, + (Global / publishTo) := Some("sonatype-releases" at "https://oss.sonatype.org/service/local/staging/deploy/maven2"), + (Global / credentials) ++= { val user = env("SONA_USER") val pass = env("SONA_PASS") if (user != "" && pass != "") @@ -152,11 +152,11 @@ object ScriptCommands { } val enableOptimizer = Seq( - scalacOptions in Compile in ThisBuild ++= Seq("-opt:l:inline", "-opt-inline-from:scala/**") + ThisBuild / Compile / scalacOptions ++= Seq("-opt:l:inline", "-opt-inline-from:scala/**") ) val noDocs = Seq( - publishArtifact in (Compile, packageDoc) in ThisBuild := false + ThisBuild / Compile / packageDoc / publishArtifact := false ) private[this] def publishTarget(url: String) = { @@ -164,8 +164,8 @@ object ScriptCommands { val url2 = if(url.startsWith("file:")) url else url.replaceAll("/$", "") + ";build.timestamp=" + System.currentTimeMillis Seq( - publishTo in Global := Some("scala-pr-publish" at url2), - credentials in Global ++= { + (Global / publishTo) := Some("scala-pr-publish" at url2), + (Global / credentials) ++= { val pass = env("PRIVATE_REPO_PASS") if (pass != "") List(Credentials("Artifactory Realm", "scala-ci.typesafe.com", "scala-ci", env("PRIVATE_REPO_PASS"))) diff --git a/project/VersionUtil.scala b/project/VersionUtil.scala index 9bcc24953b39..6f1c90ffb0be 100644 --- a/project/VersionUtil.scala +++ b/project/VersionUtil.scala @@ -24,9 +24,9 @@ object VersionUtil { lazy val globalVersionSettings = Seq[Setting[_]]( // Set the version properties globally (they are the same for all projects) - versionProperties in Global := versionPropertiesImpl.value, + (Global / versionProperties) := versionPropertiesImpl.value, gitProperties := gitPropertiesImpl.value, - version in Global := versionProperties.value.mavenVersion + (Global / version) := versionProperties.value.mavenVersion ) lazy val generatePropertiesFileSettings = Seq[Setting[_]]( @@ -37,12 +37,12 @@ object VersionUtil { | __\ \/ /__/ __ |/ /__/ __ | | /____/\___/_/ |_/____/_/ | | | |/ %s""".stripMargin.linesIterator.drop(1).map(s => s"${ "%n" }${ s }").mkString, - resourceGenerators in Compile += generateVersionPropertiesFile.map(file => Seq(file)).taskValue, + (Compile / resourceGenerators) += generateVersionPropertiesFile.map(file => Seq(file)).taskValue, generateVersionPropertiesFile := generateVersionPropertiesFileImpl.value ) lazy val generateBuildCharacterFileSettings = Seq[Setting[_]]( - buildCharacterPropertiesFile := ((baseDirectory in ThisBuild).value / "buildcharacter.properties"), + buildCharacterPropertiesFile := ((ThisBuild / baseDirectory).value / "buildcharacter.properties"), generateBuildCharacterPropertiesFile := generateBuildCharacterPropertiesFileImpl.value ) @@ -161,7 +161,7 @@ object VersionUtil { "copyright.string" -> copyrightString.value, "shell.welcome" -> shellWelcomeString.value ), - (resourceManaged in Compile).value / s"${thisProject.value.id}.properties") + (Compile / resourceManaged).value / s"${thisProject.value.id}.properties") } private lazy val generateBuildCharacterPropertiesFileImpl: Def.Initialize[Task[File]] = Def.task { diff --git a/project/build.properties b/project/build.properties index 0837f7a132de..9edb75b77c28 100644 --- a/project/build.properties +++ b/project/build.properties @@ -1 +1 @@ -sbt.version=1.3.13 +sbt.version=1.5.4 diff --git a/scripts/common b/scripts/common index 4f869dfe6eca..106d96cc2296 100644 --- a/scripts/common +++ b/scripts/common @@ -15,7 +15,7 @@ rm -rf "$WORKSPACE/resolutionScratch_" mkdir -p "$WORKSPACE/resolutionScratch_" SBT_CMD=${SBT_CMD-sbt} -SBT_CMD="$SBT_CMD -sbt-version 1.3.13" +SBT_CMD="$SBT_CMD -sbt-version 1.5.4" # repo to publish builds integrationRepoUrl=${integrationRepoUrl-"https://scala-ci.typesafe.com/artifactory/scala-integration/"} From fc94fbcba764b14b2c0848e07b82170a8ceb87a5 Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Tue, 6 Jul 2021 16:12:21 -0700 Subject: [PATCH 285/769] Emphasize PartialFunction.applyOrElse --- src/library/scala/PartialFunction.scala | 61 +++++++++++++++++-------- 1 file changed, 43 insertions(+), 18 deletions(-) diff --git a/src/library/scala/PartialFunction.scala b/src/library/scala/PartialFunction.scala index 625285fd93fd..c9c67ca5e7ef 100644 --- a/src/library/scala/PartialFunction.scala +++ b/src/library/scala/PartialFunction.scala @@ -14,17 +14,16 @@ package scala import scala.annotation.nowarn - /** A partial function of type `PartialFunction[A, B]` is a unary function * where the domain does not necessarily include all values of type `A`. - * The function `isDefinedAt` allows to test dynamically if a value is in + * The function [[isDefinedAt]] allows to test dynamically if a value is in * the domain of the function. * * Even if `isDefinedAt` returns true for an `a: A`, calling `apply(a)` may * still throw an exception, so the following code is legal: * * {{{ - * val f: PartialFunction[Int, Any] = { case _ => 1/0 } + * val f: PartialFunction[Int, Any] = { case x => x / 0 } // ArithmeticException: / by zero * }}} * * It is the responsibility of the caller to call `isDefinedAt` before @@ -32,26 +31,52 @@ import scala.annotation.nowarn * `apply` will throw an exception to indicate an error condition. If an * exception is not thrown, evaluation may result in an arbitrary value. * + * The usual way to respect this contract is to call [[applyOrElse]], + * which is expected to be more efficient than calling both `isDefinedAt` + * and `apply`. + * * The main distinction between `PartialFunction` and [[scala.Function1]] is * that the user of a `PartialFunction` may choose to do something different * with input that is declared to be outside its domain. For example: * * {{{ * val sample = 1 to 10 - * val isEven: PartialFunction[Int, String] = { - * case x if x % 2 == 0 => x+" is even" + * def isEven(n: Int) = n % 2 == 0 + * val eveningNews: PartialFunction[Int, String] = { + * case x if isEven(x) => s"\$x is even" + * } + * + * // The method collect is described as "filter + map" + * // because it uses a PartialFunction to select elements + * // to which the function is applied. + * val evenNumbers = sample.collect(eveningNews) + * + * val oddlyEnough: PartialFunction[Int, String] = { + * case x if !isEven(x) => s"\$x is odd" * } * - * // the method collect can use isDefinedAt to select which members to collect - * val evenNumbers = sample collect isEven + * // The method orElse allows chaining another PartialFunction + * // to handle input outside the declared domain. + * val numbers = sample.map(eveningNews orElse oddlyEnough) * - * val isOdd: PartialFunction[Int, String] = { - * case x if x % 2 == 1 => x+" is odd" + * // same as + * val numbers = sample.map(n => eveningNews.applyOrElse(n, oddlyEnough)) + * + * val half: PartialFunction[Int, Int] = { + * case x if isEven(x) => x / 2 * } * - * // the method orElse allows chaining another partial function to handle - * // input outside the declared domain - * val numbers = sample map (isEven orElse isOdd) + * // Calculating the domain of a composition can be expensive. + * val oddByHalf = half.andThen(oddlyEnough) + * + * // Invokes `half.apply` on even elements! + * val oddBalls = sample.filter(oddByHalf.isDefinedAt) + * + * // Better than filter(oddByHalf.isDefinedAt).map(oddByHalf) + * val oddBalls = sample.collect(oddByHalf) + * + * // Providing "default" values. + * val oddsAndEnds = sample.map(n => oddByHalf.applyOrElse(n, (i: Int) => s"[\$i]")) * }}} * * @note Optional [[Function]]s, [[PartialFunction]]s and extractor objects @@ -63,6 +88,10 @@ import scala.annotation.nowarn * | from optional [[Function]] | [[Function1.UnliftOps#unlift]] or [[Function.unlift]] | [[Predef.identity]] | [[Function1.UnliftOps#unlift]] | * | from an extractor | `{ case extractor(x) => x }` | `extractor.unapply _` | [[Predef.identity]] | *   + * + * @define applyOrElseOrElse Note that calling [[isDefinedAt]] on the resulting partial function + * may apply the first partial function and execute its side effect. + * For efficiency, it is recommended to call [[applyOrElse]] instead of [[isDefinedAt]] or [[apply]]. */ trait PartialFunction[-A, +B] extends (A => B) { self => import PartialFunction._ @@ -125,9 +154,7 @@ trait PartialFunction[-A, +B] extends (A => B) { self => * Composes this partial function with another partial function that * gets applied to results of this partial function. * - * Note that calling [[isDefinedAt]] on the resulting partial function may apply the first - * partial function and execute its side effect. It is highly recommended to call [[applyOrElse]] - * instead of [[isDefinedAt]] / [[apply]] for efficiency. + * $applyOrElseOrElse * * @param k the transformation function * @tparam C the result type of the transformation function. @@ -141,9 +168,7 @@ trait PartialFunction[-A, +B] extends (A => B) { self => * Composes another partial function `k` with this partial function so that this * partial function gets applied to results of `k`. * - * Note that calling [[isDefinedAt]] on the resulting partial function may apply the first - * partial function and execute its side effect. It is highly recommended to call [[applyOrElse]] - * instead of [[isDefinedAt]] / [[apply]] for efficiency. + * $applyOrElseOrElse * * @param k the transformation function * @tparam R the parameter type of the transformation function. From 956dc4f662d4ca9c2bcfdd29451ca82729058a26 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Sat, 3 Jul 2021 15:02:10 +1000 Subject: [PATCH 286/769] Configure OSGI bnd tool for reproducible builds We already set use the Git timestamp for the OSGi version number, but we were still getting diffs in the META-INF/MANIFEST.MF file in the `Bnd-LastModified` header. This commit sets the REPRODUCIBLE flag to tell the OGSi bundler tool to omit that header and ensure that the current timestamp does not leak into other Zip entry metadata. Together with the upgrade to SBT 1.5.x this seems to make the resulting JARs successively builds from a given commit identical. There have been some behaviour changes in bnd that affect our output. These seem to be valid bug fixes, but I have opted to stay with our status quo with a little extra configuration. --- project/Osgi.scala | 31 +++++++++++++++++++++++++------ project/plugins.sbt | 10 +++++----- 2 files changed, 30 insertions(+), 11 deletions(-) diff --git a/project/Osgi.scala b/project/Osgi.scala index 636a7f1e8aba..016c19c422cd 100644 --- a/project/Osgi.scala +++ b/project/Osgi.scala @@ -8,11 +8,11 @@ import sbt.Keys._ import collection.JavaConverters._ import VersionUtil.versionProperties -/** OSGi packaging for the Scala build, distilled from sbt-osgi. We do not use sbt-osgi because it - * depends on a newer version of BND which gives slightly different output (probably OK to upgrade - * in the future, now that the Ant build has been removed) and does not allow a crucial bit of +/** OSGi packaging for the Scala build, distilled from sbt-osgi. + * + * We don't use sbt-osgi (yet) because it does not allow a crucial bit of * configuration that we need: Setting the classpath for BND. In sbt-osgi this is always - * `fullClasspath in Compile` whereas we want `products in Compile in packageBin`. */ + * `fullClasspath in Compile` whereas we want `products in Compile in packageBin`. */ object Osgi { val bundle = TaskKey[File]("osgiBundle", "Create an OSGi bundle.") val bundleName = SettingKey[String]("osgiBundleName", "The Bundle-Name for the manifest.") @@ -29,11 +29,30 @@ object Osgi { "Bundle-Name" -> bundleName.value, "Bundle-SymbolicName" -> bundleSymbolicName.value, "ver" -> v, - "Export-Package" -> "*;version=${ver};-split-package:=merge-first", + + // bnd 3.0 fixes for https://github.com/bndtools/bnd/issues/971. This changes our OSGi + // metadata by adding Import-Package automatically for all of our exported packages. + // Supposedly this is the right thing to do: https://blog.osgi.org/2007/04/importance-of-exporting-nd-importing.html + // but I'm disabling the feature (`-noimport:=true`) to avoid changing this detail of + // our little understood OSGi metadata for now. + "Export-Package" -> "*;version=${ver};-noimport:=true;-split-package:=merge-first", + "Import-Package" -> "scala.*;version=\"${range;[==,=+);${ver}}\",*", "Bundle-Version" -> v, "Bundle-RequiredExecutionEnvironment" -> "JavaSE-1.8", - "-eclipse" -> "false" + "-eclipse" -> "false", + + // Great new feature in modern bnd versions: reproducible builds. + // Omits the Bundle-LastModified header and avoids using System.currentTimeMillis + // for ZIP metadata. + "-reproducible" -> "true", + + // https://github.com/bndtools/bnd/commit/2f1d89428559d21857b87b6d5b465a18a300becc (bndlib 4.2.0) + // seems to have fixed a bug in its detection class references in Class.forName("some.Class") + // For our build, this adds an import on the package "com.cloudius.util" (referred to by an optional + // part of JLine. This directive disables the Class.forName scanning. An alternative fix would be + // direct this to be an optional dependency (as we do for jline itself with `"Import-Package" -> ("jline.*;resolution:=optional," + ... )`) + "-noclassforname" -> "true" // ) }, jarlist := false, diff --git a/project/plugins.sbt b/project/plugins.sbt index 17b1a733e101..17332913bed6 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -1,10 +1,10 @@ scalacOptions ++= Seq("-unchecked", "-feature"/*, "-deprecation", "-Xlint" , "-Xfatal-warnings"*/) -libraryDependencies += "org.apache.commons" % "commons-lang3" % "3.3.2" +libraryDependencies += "org.apache.commons" % "commons-lang3" % "3.12.0" -libraryDependencies += "org.pantsbuild" % "jarjar" % "1.6.5" +libraryDependencies += "org.pantsbuild" % "jarjar" % "1.7.2" -libraryDependencies += "biz.aQute.bnd" % "biz.aQute.bnd" % "2.4.1" +libraryDependencies += "biz.aQute.bnd" % "biz.aQute.bndlib" % "5.3.0" enablePlugins(BuildInfoPlugin) @@ -22,9 +22,9 @@ addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "0.8.1") libraryDependencies ++= Seq( "org.eclipse.jgit" % "org.eclipse.jgit" % "4.6.0.201612231935-r", - "org.slf4j" % "slf4j-nop" % "1.7.23", + "org.slf4j" % "slf4j-nop" % "1.7.31", "com.googlecode.java-diff-utils" % "diffutils" % "1.3.0" -) + ) concurrentRestrictions in Global := Seq( Tags.limitAll(1) // workaround for https://github.com/sbt/sbt/issues/2970 From 5ee8292e4158600b30f946ddecf7c96a80d9464e Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Wed, 7 Jul 2021 16:21:38 +1000 Subject: [PATCH 287/769] Make contents of scala-buildcharacter.properties deterministic Avoid the timestamp and order by key --- project/VersionUtil.scala | 23 ++++++++++++++--------- 1 file changed, 14 insertions(+), 9 deletions(-) diff --git a/project/VersionUtil.scala b/project/VersionUtil.scala index 6f1c90ffb0be..d330c6877adf 100644 --- a/project/VersionUtil.scala +++ b/project/VersionUtil.scala @@ -2,13 +2,13 @@ package scala.build import sbt._ import Keys._ + import java.util.{Date, Locale, Properties, TimeZone} -import java.io.{File, FileInputStream} +import java.io.{File, FileInputStream, StringWriter} import java.text.SimpleDateFormat import java.time.Instant import java.time.format.DateTimeFormatter import java.time.temporal.{TemporalAccessor, TemporalQueries, TemporalQuery} - import scala.collection.JavaConverters._ import BuildSettings.autoImport._ @@ -173,13 +173,18 @@ object VersionUtil { } private def writeProps(m: Map[String, String], propFile: File): File = { - val props = new Properties - m.foreach { case (k, v) => props.put(k, v) } - // unfortunately, this will write properties in arbitrary order - // this makes it harder to test for stability of generated artifacts - // consider using https://github.com/etiennestuder/java-ordered-properties - // instead of java.util.Properties - IO.write(props, null, propFile) + // Like: + // IO.write(props, null, propFile) + // But with deterministic key ordering and no timestamp + val fullWriter = new StringWriter() + for (k <- m.keySet.toVector.sorted) { + val writer = new StringWriter() + val props = new Properties() + props.put(k, m(k)) + props.store(writer, null) + writer.toString.linesIterator.drop(1).foreach{line => fullWriter.write(line); fullWriter.write("\n")} + } + IO.write(propFile, fullWriter.toString) propFile } From b73caeaf9f14c685fa20ad520036c1c60f2ece96 Mon Sep 17 00:00:00 2001 From: Dale Wijnand Date: Wed, 7 Jul 2021 09:56:40 +0100 Subject: [PATCH 288/769] Apply suggestions from code review --- src/library-aux/scala/Any.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/library-aux/scala/Any.scala b/src/library-aux/scala/Any.scala index 188d68e756e3..d514aea60a2f 100644 --- a/src/library-aux/scala/Any.scala +++ b/src/library-aux/scala/Any.scala @@ -124,7 +124,7 @@ abstract class Any { * - `T0` is an intersection `X with Y` or `X & Y: this method is equivalent to `x.isInstanceOf[X] && x.isInstanceOf[Y]` * - `T0` is a union `X | Y`: this method is equivalent to `x.isInstanceOf[X] || x.isInstanceOf[Y]` * - `T0` is a type parameter or an abstract type member: this method is equivalent - * to `isInstanceOf[U]` where `U` is `A`'s upper bound, `Any` if `A` is unbounded. + * to `isInstanceOf[U]` where `U` is `T0`'s upper bound, `Any` if `T0` is unbounded. * For example, `x.isInstanceOf[A]` where `A` is an unbounded type parameter * will return true for any value of `x`. * From f9431696923b3222f4581e9fb7a8fe53e857d516 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Thu, 8 Jul 2021 14:49:07 +1000 Subject: [PATCH 289/769] Make a home for Java reserved identifiers and fix a comment --- src/compiler/scala/tools/nsc/javac/JavaParsers.scala | 8 +++++--- src/reflect/scala/reflect/internal/StdNames.scala | 10 ++++++++++ 2 files changed, 15 insertions(+), 3 deletions(-) diff --git a/src/compiler/scala/tools/nsc/javac/JavaParsers.scala b/src/compiler/scala/tools/nsc/javac/JavaParsers.scala index d14aacad9e68..8bccad6b8250 100644 --- a/src/compiler/scala/tools/nsc/javac/JavaParsers.scala +++ b/src/compiler/scala/tools/nsc/javac/JavaParsers.scala @@ -572,7 +572,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { * "record" class, it is much more convenient to promote it to a token. */ def adaptRecordIdentifier(): Unit = { - if (in.token == IDENTIFIER && in.name.toString == "record") + if (in.token == IDENTIFIER && in.name == nme.javaRestrictedIdentifiers.RECORD) in.token = RECORD } @@ -836,7 +836,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { val interfaces = interfacesOpt() val (statics, body) = typeBody(RECORD) - // Records generate a canonical constructor and accessors, unless they are manually specified + // Generate accessors, if not already manually specified var generateAccessors = header .view .map { case ValDef(mods, name, tpt, _) => (name, (tpt, mods.annotations)) } @@ -844,12 +844,14 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { for (DefDef(_, name, List(), List(params), _, _) <- body if generateAccessors.contains(name) && params.isEmpty) generateAccessors -= name - // Generate canonical constructor and accessors, if not already manually specified val accessors = generateAccessors .map { case (name, (tpt, annots)) => DefDef(Modifiers(Flags.JAVA) withAnnotations annots, name, List(), List(), tpt.duplicate, blankExpr) } .toList + + // Generate canonical constructor. During parsing this is done unconditionally but the symbol + // is unlinked in Namer if it is found to clash with a manually specified constructor. val canonicalCtor = DefDef( mods | Flags.SYNTHETIC, nme.CONSTRUCTOR, diff --git a/src/reflect/scala/reflect/internal/StdNames.scala b/src/reflect/scala/reflect/internal/StdNames.scala index 87eeb58b0c95..0480aa15ed8e 100644 --- a/src/reflect/scala/reflect/internal/StdNames.scala +++ b/src/reflect/scala/reflect/internal/StdNames.scala @@ -236,6 +236,7 @@ trait StdNames { final val keywords = kw.result } with CommonNames { final val javaKeywords = new JavaKeywords() + final val javaRestrictedIdentifiers = new JavaRestrictedIdentifiers() } abstract class TypeNames extends Keywords with TypeNamesApi { @@ -1256,6 +1257,15 @@ trait StdNames { final val keywords = kw.result } + // "The identifiers var, yield, and record are restricted identifiers because they are not allowed in some contexts" + // A type identifier is an identifier that is not the character sequence var, yield, or record. + // An unqualified method identifier is an identifier that is not the character sequence yield. + class JavaRestrictedIdentifiers { + final val RECORD: TermName = TermName("record") + final val VAR: TermName = TermName("var") + final val YIELD: TermName = TermName("yield") + } + sealed abstract class SymbolNames { protected def nameType(s: String): TypeName = newTypeNameCached(s) From 615599faf4a6ff51c7a4d7476f14aa041d70530e Mon Sep 17 00:00:00 2001 From: Georgi Krastev Date: Thu, 8 Jul 2021 22:24:33 +0200 Subject: [PATCH 290/769] When checking bounds fails, return original undetermined parameters `inferMethodInstance` assumes that it doesn't matter what is returned when `checkBounds` fails because it issues errors. However that is not the case when typechecking in silent mode, e.g. in `tryTypedApply` which can recover with implicit conversions. When typechecking the inserted conversions we should see the yet undetermined type parameters and try to infer them again. --- .../scala/tools/nsc/typechecker/Infer.scala | 12 ++++++------ test/files/neg/leibniz-liskov.check | 12 ++++++------ test/files/neg/t12413.check | 16 ++++++++++++++++ test/files/neg/t12413.scala | 18 ++++++++++++++++++ test/files/neg/t7509.check | 5 +---- test/files/neg/t8463.check | 7 +------ 6 files changed, 48 insertions(+), 22 deletions(-) create mode 100644 test/files/neg/t12413.check create mode 100644 test/files/neg/t12413.scala diff --git a/src/compiler/scala/tools/nsc/typechecker/Infer.scala b/src/compiler/scala/tools/nsc/typechecker/Infer.scala index 3457e2326bc5..d2296acec6c0 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Infer.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Infer.scala @@ -1076,7 +1076,7 @@ trait Infer extends Checkable { */ def inferMethodInstance(fn: Tree, undetParams: List[Symbol], args: List[Tree], pt0: Type): List[Symbol] = fn.tpe match { - case mt @ MethodType(params0, _) => + case mt @ MethodType(_, _) => try { val pt = if (pt0.typeSymbol == UnitClass) WildcardType else pt0 val formals = formalTypes(mt.paramTypes, args.length) @@ -1094,17 +1094,17 @@ trait Infer extends Checkable { adjusted.undetParams match { case Nil => Nil case xs => - // #3890 + // scala/bug#3890 val xs1 = treeSubst.typeMap mapOver xs if (xs ne xs1) new TreeSymSubstTraverser(xs, xs1) traverseTrees fn :: args enhanceBounds(adjusted.okParams, adjusted.okArgs, xs1) xs1 } - } else Nil - } - catch ifNoInstance { msg => - NoMethodInstanceError(fn, args, msg); List() + } else undetParams + } catch ifNoInstance { msg => + NoMethodInstanceError(fn, args, msg) + undetParams } case x => throw new MatchError(x) } diff --git a/test/files/neg/leibniz-liskov.check b/test/files/neg/leibniz-liskov.check index c760861dbbf6..ad2ff6f6d4df 100644 --- a/test/files/neg/leibniz-liskov.check +++ b/test/files/neg/leibniz-liskov.check @@ -55,8 +55,8 @@ leibniz-liskov.scala:21: error: type mismatch; required: F[U] def convert1[T, U](l: List[T])(ev: T =:= U): List[U] = ev.substituteContra(l) ^ -leibniz-liskov.scala:21: error: type mismatch; - found : F[T] +leibniz-liskov.scala:21: error: polymorphic expression cannot be instantiated to expected type; + found : [F[_]]F[T] required: List[U] def convert1[T, U](l: List[T])(ev: T =:= U): List[U] = ev.substituteContra(l) ^ @@ -72,8 +72,8 @@ leibniz-liskov.scala:22: error: type mismatch; required: F[T] def convert2[T, U](l: List[U])(ev: T =:= U): List[T] = ev.substituteCo(l) ^ -leibniz-liskov.scala:22: error: type mismatch; - found : F[U] +leibniz-liskov.scala:22: error: polymorphic expression cannot be instantiated to expected type; + found : [F[_]]F[U] required: List[T] def convert2[T, U](l: List[U])(ev: T =:= U): List[T] = ev.substituteCo(l) ^ @@ -111,8 +111,8 @@ leibniz-liskov.scala:35: error: type mismatch; required: F[U] def convertConsume2[U, T](c: Consumes[T])(ev: U <:< T): Consumes[U] = ev.substituteCo(c) ^ -leibniz-liskov.scala:35: error: type mismatch; - found : F[T] +leibniz-liskov.scala:35: error: polymorphic expression cannot be instantiated to expected type; + found : [F[+_]]F[T] required: LeibnizLiskov.this.Consumes[U] (which expands to) U => Unit def convertConsume2[U, T](c: Consumes[T])(ev: U <:< T): Consumes[U] = ev.substituteCo(c) diff --git a/test/files/neg/t12413.check b/test/files/neg/t12413.check new file mode 100644 index 000000000000..fefa9a3e8a80 --- /dev/null +++ b/test/files/neg/t12413.check @@ -0,0 +1,16 @@ +t12413.scala:13: error: inferred type arguments [AnyRef] do not conform to method close's type parameter bounds [Phantom >: AnyRef <: Open] + println(door.close.toString()) + ^ +t12413.scala:14: error: inferred type arguments [AnyRef] do not conform to method close's type parameter bounds [Phantom >: AnyRef <: Open] + println(door.close == 0) + ^ +t12413.scala:15: error: inferred type arguments [AnyRef] do not conform to method open's type parameter bounds [Phantom >: AnyRef <: Open] + println(door.open().toString) + ^ +t12413.scala:16: error: inferred type arguments [AnyRef] do not conform to method open's type parameter bounds [Phantom >: AnyRef <: Open] + println(door.open().toString()) + ^ +t12413.scala:17: error: inferred type arguments [AnyRef] do not conform to method open's type parameter bounds [Phantom >: AnyRef <: Open] + println(door.open() == 0) + ^ +5 errors diff --git a/test/files/neg/t12413.scala b/test/files/neg/t12413.scala new file mode 100644 index 000000000000..505c04f6b33b --- /dev/null +++ b/test/files/neg/t12413.scala @@ -0,0 +1,18 @@ +class Open + +class Door[State] { + def close[Phantom >: State <: Open]: Int = 0 + def open[Phantom >: State <: Open](): Int = 0 +} + +class Test { + val door = new Door[AnyRef] + // the error here happens later (at refchecks) + println(door.close.toString) + // the errors below happen when typing implicit conversions + println(door.close.toString()) + println(door.close == 0) + println(door.open().toString) + println(door.open().toString()) + println(door.open() == 0) +} diff --git a/test/files/neg/t7509.check b/test/files/neg/t7509.check index 03ec8ef282fa..30a01e840b3d 100644 --- a/test/files/neg/t7509.check +++ b/test/files/neg/t7509.check @@ -6,7 +6,4 @@ t7509.scala:3: error: type mismatch; required: R crash(42) ^ -t7509.scala:3: error: could not find implicit value for parameter ev: R - crash(42) - ^ -3 errors +2 errors diff --git a/test/files/neg/t8463.check b/test/files/neg/t8463.check index fe3f19aa4606..572a460728ed 100644 --- a/test/files/neg/t8463.check +++ b/test/files/neg/t8463.check @@ -19,9 +19,4 @@ t8463.scala:5: error: type mismatch; required: T[Long] insertCell(Foo(5)) ^ -t8463.scala:5: error: type mismatch; - found : Test.Foo[T] - required: Test.Foo[Test.Cell] - insertCell(Foo(5)) - ^ -4 errors +3 errors From fa86e9cc13c6e674719d3b210a066788775cdad3 Mon Sep 17 00:00:00 2001 From: Georgi Krastev Date: Sun, 11 Jul 2021 19:49:39 +0200 Subject: [PATCH 291/769] Remove special case in maybeRewrap When we call `dealias` or `normalize` we do want to rewrap the underlying type even when it's `=:=` (e.g. type aliases). --- .../scala/reflect/internal/Types.scala | 14 +-------- test/files/neg/t12324.check | 5 ++- test/files/neg/t7636.check | 14 ++++----- test/files/neg/t7636.scala | 10 +++--- test/files/neg/t8127a.check | 2 +- test/files/neg/wildcards-future.check | 4 +-- test/files/run/analyzerPlugins.check | 2 +- test/files/run/t10363.scala | 31 +++++++++++++++++++ test/files/run/t6329_repl.check | 2 +- test/files/run/t6329_repl_bug.check | 2 +- test/files/run/t6329_vanilla.check | 2 +- test/files/run/t6329_vanilla_bug.check | 2 +- 12 files changed, 56 insertions(+), 34 deletions(-) create mode 100644 test/files/run/t10363.scala diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala index 1cefcf355dfd..66ac394a8f14 100644 --- a/src/reflect/scala/reflect/internal/Types.scala +++ b/src/reflect/scala/reflect/internal/Types.scala @@ -176,19 +176,7 @@ trait Types * forwarded here. Some operations are rewrapped again. */ trait RewrappingTypeProxy extends SimpleTypeProxy { - protected def maybeRewrap(newtp: Type) = ( - if (newtp eq underlying) this - else { - // - BoundedWildcardTypes reach here during erroneous compilation: neg/t6258 - // - Higher-kinded exclusion is because [x]CC[x] compares =:= to CC: pos/t3800 - // - Avoid reusing the existing Wrapped(RefinedType) when we've be asked to wrap an =:= RefinementTypeRef, the - // distinction is important in base type sequences. See TypesTest.testExistentialRefinement - // - Otherwise, if newtp =:= underlying, don't rewrap it. - val hasSpecialMeaningBeyond_=:= = newtp.isWildcard || newtp.isHigherKinded || newtp.isInstanceOf[RefinementTypeRef] - if (!hasSpecialMeaningBeyond_=:= && (newtp =:= underlying)) this - else rewrap(newtp) - } - ) + protected def maybeRewrap(newtp: Type) = if (newtp eq underlying) this else rewrap(newtp) protected def rewrap(newtp: Type): Type // the following are all operations in class Type that are overridden in some subclass diff --git a/test/files/neg/t12324.check b/test/files/neg/t12324.check index 5a7bbb7119f6..3ade85f310a8 100644 --- a/test/files/neg/t12324.check +++ b/test/files/neg/t12324.check @@ -13,4 +13,7 @@ t12324.scala:12: error: `@throws` only allowed for methods and constructors t12324.scala:14: error: `@throws` only allowed for methods and constructors def g(): Unit = (): @throws[Exception] ^ -5 errors +t12324.scala:16: error: `@throws` only allowed for methods and constructors + def n(i: Int) = i match { case 42 => 27: @throws[Exception] } // not all cruft reaches refchecks + ^ +6 errors diff --git a/test/files/neg/t7636.check b/test/files/neg/t7636.check index c05996ef4010..59f474ca90fb 100644 --- a/test/files/neg/t7636.check +++ b/test/files/neg/t7636.check @@ -1,10 +1,10 @@ t7636.scala:3: error: illegal inheritance; - self-type Main.C does not conform to Main.ResultTable[_$3]'s selftype Main.ResultTable[_$3] - class C extends ResultTable(Left(5):Either[_,_])(5) - ^ + self-type Main.bar.type does not conform to Main.Foo[T]'s selftype Main.Foo[T] + object bar extends Foo(5: T forSome { type T }) + ^ t7636.scala:3: error: type mismatch; - found : Either[_$2,_$3(in constructor C)] where type _$3(in constructor C), type _$2 - required: Either[_, _$3(in value )] where type _$3(in value ) - class C extends ResultTable(Left(5):Either[_,_])(5) - ^ + found : T(in constructor bar) where type T(in constructor bar) + required: T(in value ) where type T(in value ) + object bar extends Foo(5: T forSome { type T }) + ^ 2 errors diff --git a/test/files/neg/t7636.scala b/test/files/neg/t7636.scala index 5d5d56a5efd3..27d4c060932f 100644 --- a/test/files/neg/t7636.scala +++ b/test/files/neg/t7636.scala @@ -1,7 +1,7 @@ -object Main extends App{ - class ResultTable[E]( query : Either[_,E] )( columns : Int ) - class C extends ResultTable(Left(5):Either[_,_])(5) +object Main extends App { + class Foo[A](x: A) + object bar extends Foo(5: T forSome { type T }) } -// Inference of the existential type for the parent type argument -// E still fails. That looks tricky to fix, see the comments in scala/bug#7636. +// Inference of the existential type for the parent type argument A still fails. +// That looks tricky to fix, see the comments in scala/bug#7636. // But we at least prevent a cascading NPE. diff --git a/test/files/neg/t8127a.check b/test/files/neg/t8127a.check index ce75d28cf265..4518affe0ae3 100644 --- a/test/files/neg/t8127a.check +++ b/test/files/neg/t8127a.check @@ -1,4 +1,4 @@ -t8127a.scala:7: error: The result type of an unapplySeq method must contain a member `get` to be used as an extractor pattern, no such member exists in Seq[_$1] +t8127a.scala:7: error: The result type of an unapplySeq method must contain a member `get` to be used as an extractor pattern, no such member exists in Seq[Any] case H(v) => ^ 1 error diff --git a/test/files/neg/wildcards-future.check b/test/files/neg/wildcards-future.check index 0aedb6dd8b01..31f116c7e547 100644 --- a/test/files/neg/wildcards-future.check +++ b/test/files/neg/wildcards-future.check @@ -1,10 +1,10 @@ wildcards-future.scala:7: error: type mismatch; - found : Map[_$1,_$2] where type _$2 >: Null, type _$1 <: AnyRef + found : scala.collection.immutable.Map[_$1,Any] where type _$1 <: AnyRef required: Map[String,String] underscores : Map[String, String] // error wildcard variables starting with `_` ^ wildcards-future.scala:9: error: type mismatch; - found : Map[?$1,?$2] where type ?$2 >: Null, type ?$1 <: AnyRef + found : scala.collection.immutable.Map[?$1,Any] where type ?$1 <: AnyRef required: Map[String,String] qmarks : Map[String, String] // error – wildcard variables should start with `?` to differentiate from the old syntax ^ diff --git a/test/files/run/analyzerPlugins.check b/test/files/run/analyzerPlugins.check index a3c7d4cc4319..2659fd3b3e6e 100644 --- a/test/files/run/analyzerPlugins.check +++ b/test/files/run/analyzerPlugins.check @@ -19,7 +19,7 @@ canAdaptAnnotations(Trees$TypeTree, ?) [7] canAdaptAnnotations(Trees$Typed, ?) [3] canAdaptAnnotations(Trees$Typed, Any) [1] canAdaptAnnotations(Trees$Typed, Int) [1] -lub(List(Int(1) @testAnn, 2)) [1] +lub(List(1 @testAnn, 2)) [1] pluginsPt(?, Trees$Annotated) [6] pluginsPt(?, Trees$Apply) [17] pluginsPt(?, Trees$ApplyImplicitView) [2] diff --git a/test/files/run/t10363.scala b/test/files/run/t10363.scala new file mode 100644 index 000000000000..7d1462c9ced8 --- /dev/null +++ b/test/files/run/t10363.scala @@ -0,0 +1,31 @@ +trait Foo[A, B] +object Foo { + type Bar[A] = Foo[A, _] +} + +trait Base[M[_]] { + def method(in: M[_]): Unit +} + +class Concrete extends Base[Foo.Bar] { + def method(in: Foo.Bar[_]): Unit = {} +} + +trait Template[M[_]] { + def toBeImplemented: Base[M] + def mark[A]: M[A] + + def method2(): Unit = { + toBeImplemented.method(mark[Nothing]) + } +} + +class Impl extends Template[Foo.Bar] { + def toBeImplemented: Base[Foo.Bar] = new Concrete + def mark[A]: Foo.Bar[A] = new Foo[A, Nothing] {} +} + +object Test { + def main(args: Array[String]): Unit = + (new Impl).method2() +} diff --git a/test/files/run/t6329_repl.check b/test/files/run/t6329_repl.check index e6b94db0f589..7035b4569747 100644 --- a/test/files/run/t6329_repl.check +++ b/test/files/run/t6329_repl.check @@ -4,7 +4,7 @@ import scala.reflect.{ClassManifest, classTag} scala> implicitly[ClassManifest[scala.List[_]]] warning: 1 deprecation (since 2.10.0); for details, enable `:setting -deprecation` or `:replay -deprecation` -val res0: scala.reflect.ClassTag[List[_]] = scala.collection.immutable.List[] +val res0: scala.reflect.ClassTag[List[_]] = scala.collection.immutable.List[Any] scala> classTag[scala.List[_]] val res1: scala.reflect.ClassTag[List[_]] = scala.collection.immutable.List diff --git a/test/files/run/t6329_repl_bug.check b/test/files/run/t6329_repl_bug.check index f82398194821..8e89bd2ed33f 100644 --- a/test/files/run/t6329_repl_bug.check +++ b/test/files/run/t6329_repl_bug.check @@ -7,7 +7,7 @@ import scala.reflect.runtime._ scala> implicitly[scala.reflect.ClassManifest[List[_]]] warning: 1 deprecation (since 2.10.0); for details, enable `:setting -deprecation` or `:replay -deprecation` -val res0: scala.reflect.ClassTag[List[_]] = scala.collection.immutable.List[] +val res0: scala.reflect.ClassTag[List[_]] = scala.collection.immutable.List[java.lang.Class] scala> scala.reflect.classTag[List[_]] val res1: scala.reflect.ClassTag[List[_]] = scala.collection.immutable.List diff --git a/test/files/run/t6329_vanilla.check b/test/files/run/t6329_vanilla.check index ad8f4b5c7720..74481cabf49c 100644 --- a/test/files/run/t6329_vanilla.check +++ b/test/files/run/t6329_vanilla.check @@ -1,4 +1,4 @@ -scala.collection.immutable.List[] +scala.collection.immutable.List[Any] scala.collection.immutable.List scala.collection.immutable.List[] scala.collection.immutable.List diff --git a/test/files/run/t6329_vanilla_bug.check b/test/files/run/t6329_vanilla_bug.check index 56b075b2e658..8282afaeba13 100644 --- a/test/files/run/t6329_vanilla_bug.check +++ b/test/files/run/t6329_vanilla_bug.check @@ -1,2 +1,2 @@ -scala.collection.immutable.List[] +scala.collection.immutable.List[Any] scala.collection.immutable.List From bc1f606655024bd8104aca215fe517c77ff85870 Mon Sep 17 00:00:00 2001 From: Georgi Krastev Date: Sun, 11 Jul 2021 20:25:02 +0200 Subject: [PATCH 292/769] Add test for scala/bug#10016 Copied from #5613, credit goes to @adriaanm --- test/files/run/t10016.check | 8 ++++++++ test/files/run/t10016.scala | 11 +++++++++++ 2 files changed, 19 insertions(+) create mode 100644 test/files/run/t10016.check create mode 100644 test/files/run/t10016.scala diff --git a/test/files/run/t10016.check b/test/files/run/t10016.check new file mode 100644 index 000000000000..7457fcc9b22c --- /dev/null +++ b/test/files/run/t10016.check @@ -0,0 +1,8 @@ + +scala> def existWith(x: (List[T] forSome {type T}) with Int {def xxx: Int}) = ??? +def existWith(x: List[_] with Int{def xxx: Int}): Nothing + +scala> def existKeepsAnnot(x: (List[T] forSome {type T})@SerialVersionUID(1L) with Int {def xxx: Int}) = ??? +def existKeepsAnnot(x: List[Any] @SerialVersionUID(value = 1L) with Int{def xxx: Int}): Nothing + +scala> :quit diff --git a/test/files/run/t10016.scala b/test/files/run/t10016.scala new file mode 100644 index 000000000000..113046527a04 --- /dev/null +++ b/test/files/run/t10016.scala @@ -0,0 +1,11 @@ +import scala.tools.partest.ReplTest + +// check that we don't lose the annotation on the existential type nested in an intersection type +// it's okay that List[_] is represented as List[Any] -- they are equivalent due to variance (existential extrapolation) +// (The above comment should not be construed as an endorsement of rewrapping as a great way to implement a bunch of different type "proxies") +object Test extends ReplTest { + def code = """ + |def existWith(x: (List[T] forSome {type T}) with Int {def xxx: Int}) = ??? + |def existKeepsAnnot(x: (List[T] forSome {type T})@SerialVersionUID(1L) with Int {def xxx: Int}) = ??? + """.stripMargin +} From 92610b8ac90a8f2ef98e00548cb92d5f7a814238 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Mon, 12 Jul 2021 16:23:51 +1000 Subject: [PATCH 293/769] Avoid deprectated DSL in SBT build in two more places --- project/build.sbt | 2 +- project/plugins.sbt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/project/build.sbt b/project/build.sbt index a604896dedc4..0cfcc9fd4bcb 100644 --- a/project/build.sbt +++ b/project/build.sbt @@ -1,2 +1,2 @@ // Add genprod to the build; It should be moved from `src/build` to `project` now that the Ant build is gone -sources in Compile += ((baseDirectory).value.getParentFile / "src" / "build" / "genprod.scala") +Compile / sources += ((baseDirectory).value.getParentFile / "src" / "build" / "genprod.scala") diff --git a/project/plugins.sbt b/project/plugins.sbt index 17332913bed6..2fc7b95e8495 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -12,7 +12,7 @@ enablePlugins(BuildInfoPlugin) lazy val buildClasspath = taskKey[String]("Colon-separated (or semicolon-separated in case of Windows) list of entries on the sbt build classpath.") -buildClasspath := (externalDependencyClasspath in Compile).value.map(_.data).mkString(java.io.File.pathSeparator) +buildClasspath := (Compile / externalDependencyClasspath).value.map(_.data).mkString(java.io.File.pathSeparator) buildInfoKeys := Seq[BuildInfoKey](buildClasspath) From fe2fdac879f35b3b1c06c9080cd17c6ebf3acf92 Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Mon, 12 Jul 2021 11:55:50 +0200 Subject: [PATCH 294/769] Test case for 7970 --- test/files/run/t7970.scala | 15 +++++++++++++++ 1 file changed, 15 insertions(+) create mode 100644 test/files/run/t7970.scala diff --git a/test/files/run/t7970.scala b/test/files/run/t7970.scala new file mode 100644 index 000000000000..5f90186c5b87 --- /dev/null +++ b/test/files/run/t7970.scala @@ -0,0 +1,15 @@ +object Test { + { + val session: Object = null + trait Outer{ + trait Inner{ + assert(session == null) + } + } + val o = new Outer{} + new o.Inner { } + } + + def main(args: Array[String]): Unit = { + } +} From cb27f163c9f5c578376793fb79f837e45c258435 Mon Sep 17 00:00:00 2001 From: Georgi Krastev Date: Mon, 12 Jul 2021 12:25:28 +0200 Subject: [PATCH 295/769] Avoid entering synthetic trees during specialization because the duplicated trees already contain them. When we try to retype both the existing synthetic trees and the newly entered ones it breaks (e.g. for default getters). --- .../scala/tools/nsc/transform/SpecializeTypes.scala | 11 ++++++++++- test/files/neg/t9014.check | 4 ++++ test/files/neg/t9014.scala | 7 +++++++ test/files/pos/t9014.scala | 6 ++++++ 4 files changed, 27 insertions(+), 1 deletion(-) create mode 100644 test/files/neg/t9014.check create mode 100644 test/files/neg/t9014.scala create mode 100644 test/files/pos/t9014.scala diff --git a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala index c9d9f2c9162d..221702f865b1 100644 --- a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala +++ b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala @@ -1401,7 +1401,16 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { } } - protected override def newBodyDuplicator(context: Context): SpecializeBodyDuplicator = new SpecializeBodyDuplicator(context) + private class SpecializeNamer(context: Context) extends Namer(context) { + // Avoid entering synthetic trees during specialization because the duplicated trees already contain them. + override def enterSyntheticSym(tree: Tree): Symbol = tree.symbol + } + + protected override def newBodyDuplicator(context: Context): SpecializeBodyDuplicator = + new SpecializeBodyDuplicator(context) + + override def newNamer(context: Context): Namer = + new SpecializeNamer(context) } /** Introduced to fix scala/bug#7343: Phase ordering problem between Duplicators and Specialization. diff --git a/test/files/neg/t9014.check b/test/files/neg/t9014.check new file mode 100644 index 000000000000..650093881062 --- /dev/null +++ b/test/files/neg/t9014.check @@ -0,0 +1,4 @@ +t9014.scala:4: error: Inner is already defined as case class Inner + case class Inner(default: T) + ^ +1 error diff --git a/test/files/neg/t9014.scala b/test/files/neg/t9014.scala new file mode 100644 index 000000000000..32465c3c7dcd --- /dev/null +++ b/test/files/neg/t9014.scala @@ -0,0 +1,7 @@ +object Test { + def spec[@specialized(Byte, Short, Int, Long) T : Integral](t: T) = { + // still broken - specialize can't deal with the synthetic companion object + case class Inner(default: T) + t + } +} diff --git a/test/files/pos/t9014.scala b/test/files/pos/t9014.scala new file mode 100644 index 000000000000..8af97634c488 --- /dev/null +++ b/test/files/pos/t9014.scala @@ -0,0 +1,6 @@ +object Test { + def spec[@specialized(Byte, Short, Int, Long) T : Integral](t: T) = { + def inner(default: T = t): T = t + inner() + } +} From a9c6e165a5914ab1a9a37661e12858c76e77717e Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Tue, 13 Jul 2021 10:51:02 +0200 Subject: [PATCH 296/769] refactor tasty reader logging --- .../scala/tools/nsc/tasty/TreeUnpickler.scala | 53 ++++++++------ .../nsc/tasty/bridge/AnnotationOps.scala | 2 +- .../tools/nsc/tasty/bridge/ContextOps.scala | 16 ++--- .../tools/nsc/tasty/bridge/SymbolOps.scala | 40 ++++++++--- .../tools/nsc/tasty/bridge/TreeOps.scala | 13 ++++ .../tools/nsc/tasty/bridge/TypeOps.scala | 70 ++++++++++++++++--- 6 files changed, 144 insertions(+), 50 deletions(-) diff --git a/src/compiler/scala/tools/nsc/tasty/TreeUnpickler.scala b/src/compiler/scala/tools/nsc/tasty/TreeUnpickler.scala index 45ae91f1fc67..1764cadaa334 100644 --- a/src/compiler/scala/tools/nsc/tasty/TreeUnpickler.scala +++ b/src/compiler/scala/tools/nsc/tasty/TreeUnpickler.scala @@ -72,7 +72,7 @@ class TreeUnpickler[Tasty <: TastyUniverse]( private def registerSym(addr: Addr, sym: Symbol, rejected: Boolean)(implicit ctx: Context) = { assert(!(rejected && isSymbol(sym)), "expected no symbol when rejected") ctx.log( - if (isSymbol(sym)) s"$addr registered ${showSym(sym)} in ${location(sym.owner)}" + if (isSymbol(sym)) s"$addr registered ${showSym(sym)}" else s"$addr registering symbol was rejected" ) symAtAddr(addr) = sym @@ -92,8 +92,8 @@ class TreeUnpickler[Tasty <: TastyUniverse]( /** A completer that captures the current position and context, which then uses the position to discover the symbol * to compute the info for. */ - class Completer(isClass: Boolean, reader: TastyReader, originalFlagSet: TastyFlagSet)(implicit ctx: Context) - extends TastyCompleter(isClass, originalFlagSet) { self => + class Completer(isClass: Boolean, reader: TastyReader, tflags: TastyFlagSet)(implicit ctx: Context) + extends TastyCompleter(isClass, tflags) { self => private val symAddr = reader.currentAddr @@ -494,8 +494,7 @@ class TreeUnpickler[Tasty <: TastyUniverse]( def isTypeTag = tag === TYPEDEF || tag === TYPEPARAM val end = readEnd() val parsedName: TastyName = readTastyName() - def debugSymCreate: String = s"${astTagToString(tag)} ${parsedName.debug}" - ctx.log(s"$start ::: => create $debugSymCreate") + ctx.log(s"${astTagToString(tag)} ${parsedName.debug} in ${location(ctx.owner)}") skipParams() val ttag = nextUnsharedTag val isAbsType = isAbstractType(ttag) @@ -512,11 +511,20 @@ class TreeUnpickler[Tasty <: TastyUniverse]( def isTypeParameter = flags.is(Param) && isTypeTag def canEnterInClass = !isTypeParameter ctx.log { - val privateFlag = if (isSymbol(privateWithin)) s"private[$privateWithin] " else "" + val privateFlag = { + if (isSymbol(privateWithin)) { + if (flags.is(Protected)) s"Protected[$privateWithin]" + else s"Private[$privateWithin]" + } + else { + "" + } + } val debugFlags = { if (privateFlag.nonEmpty) { - val given = if (!flags) "" else " " + (flags &~ Private).debug - privateFlag + given + val flags0 = flags &~ Protected + val rest = if (!flags0) "" else s" ${flags0.debug}" + privateFlag + rest } else flags.debug } @@ -527,7 +535,9 @@ class TreeUnpickler[Tasty <: TastyUniverse]( if (tag === TYPEPARAM && ctx.owner.isConstructor) { // TASTy encodes type parameters for constructors // nsc only has class type parameters - ctx.findOuterClassTypeParameter(name.toTypeName) + val tparam = ctx.findOuterClassTypeParameter(name.toTypeName) + ctx.log(s"$start reusing class type param ${showSym(tparam)}") + tparam } else { ctx.findRootSymbol(roots, name) match { @@ -539,7 +549,7 @@ class TreeUnpickler[Tasty <: TastyUniverse]( } else { ctx.redefineSymbol(rootd, flags, mkCompleter, privateWithin) - ctx.log(s"$start replaced info of ${showSym(rootd)}") + ctx.log(s"$start replaced info of root ${showSym(rootd)}") rootd } case _ => @@ -556,6 +566,7 @@ class TreeUnpickler[Tasty <: TastyUniverse]( if (canEnterInClass && ctx.owner.isClass) ctx.enterIfUnseen(sym) if (isClass) { + ctx.log(s"$templateStart indexing params (may be empty):") val localCtx = ctx.withOwner(sym) forkAt(templateStart).indexTemplateParams()(localCtx) } @@ -743,8 +754,8 @@ class TreeUnpickler[Tasty <: TastyUniverse]( } def DefDef(repr: TastyRepr, localCtx: Context)(implicit ctx: Context): Unit = { - val isMacro = repr.originalFlagSet.is(Erased | Macro) - checkUnsupportedFlags(repr.tastyOnlyFlags &~ (Extension | Exported | Infix | optFlag(isMacro)(Erased))) + val isMacro = repr.tflags.is(Erased | Macro) + checkUnsupportedFlags(repr.unsupportedFlags &~ (Extension | Exported | Infix | optFlag(isMacro)(Erased))) val isCtor = sym.isConstructor val paramDefss = readParamss()(localCtx).map(_.map(symFromNoCycle)) val typeParams = { @@ -759,7 +770,7 @@ class TreeUnpickler[Tasty <: TastyUniverse]( unsupportedWhen(hasTypeParams, { val noun = ( if (isCtor) "constructor" - else if (repr.tastyOnlyFlags.is(Extension)) "extension method" + else if (repr.unsupportedFlags.is(Extension)) "extension method" else "method" ) s"$noun with unmergeable type parameters: $tname" @@ -783,10 +794,10 @@ class TreeUnpickler[Tasty <: TastyUniverse]( def ValDef(repr: TastyRepr, localCtx: Context)(implicit ctx: Context): Unit = { // valdef in TASTy is either a singleton object or a method forwarder to a local value. - checkUnsupportedFlags(repr.tastyOnlyFlags &~ (Enum | Extension | Exported)) + checkUnsupportedFlags(repr.unsupportedFlags &~ (Enum | Extension | Exported)) val tpe = readTpt()(localCtx).tpe ctx.setInfo(sym, - if (repr.originalFlagSet.is(FlagSets.SingletonEnum)) { + if (repr.tflags.is(FlagSets.SingletonEnum)) { ctx.completeEnumSingleton(sym, tpe) prefixedRef(sym.owner.thisPrefix, sym.objectImplementation) } @@ -801,15 +812,15 @@ class TreeUnpickler[Tasty <: TastyUniverse]( val allowedTypeFlags = allowedShared | Exported val allowedClassFlags = allowedShared | Open | Transparent if (sym.isClass) { - checkUnsupportedFlags(repr.tastyOnlyFlags &~ allowedClassFlags) + checkUnsupportedFlags(repr.unsupportedFlags &~ allowedClassFlags) sym.owner.ensureCompleted() readTemplate()(localCtx) } else { - checkUnsupportedFlags(repr.tastyOnlyFlags &~ allowedTypeFlags) - val rhs = readTpt()(if (repr.originalFlagSet.is(Opaque)) localCtx.addMode(OpaqueTypeDef) else localCtx) + checkUnsupportedFlags(repr.unsupportedFlags &~ allowedTypeFlags) + val rhs = readTpt()(if (repr.tflags.is(Opaque)) localCtx.addMode(OpaqueTypeDef) else localCtx) val info = - if (repr.originalFlagSet.is(Opaque)) { + if (repr.tflags.is(Opaque)) { val (info, alias) = defn.OpaqueTypeToBounds(rhs.tpe) ctx.markAsOpaqueType(sym, alias) info @@ -821,7 +832,7 @@ class TreeUnpickler[Tasty <: TastyUniverse]( } def TermParam(repr: TastyRepr, localCtx: Context)(implicit ctx: Context): Unit = { - checkUnsupportedFlags(repr.tastyOnlyFlags &~ (ParamAlias | Exported)) + checkUnsupportedFlags(repr.unsupportedFlags &~ (ParamAlias | Exported)) val tpt = readTpt()(localCtx) ctx.setInfo(sym, if (nothingButMods(end) && sym.not(ParamSetter)) tpt.tpe @@ -845,7 +856,7 @@ class TreeUnpickler[Tasty <: TastyUniverse]( try { initialize() - ctx.log(s"$symAddr @@@ ${showSym(sym)}.tpe =:= '[${if (sym.isType) sym.tpe else sym.info}]; owned by ${location(sym.owner)}") + ctx.log(s"$symAddr @@@ ${showSym(sym)}.tpe =:= ${showType(sym.info)}; owned by ${location(sym.owner)}") NoCycle(at = symAddr) } catch ctx.onCompletionError(sym) diff --git a/src/compiler/scala/tools/nsc/tasty/bridge/AnnotationOps.scala b/src/compiler/scala/tools/nsc/tasty/bridge/AnnotationOps.scala index 948bbc2868e7..901ccf7fcc0f 100644 --- a/src/compiler/scala/tools/nsc/tasty/bridge/AnnotationOps.scala +++ b/src/compiler/scala/tools/nsc/tasty/bridge/AnnotationOps.scala @@ -47,7 +47,7 @@ trait AnnotationOps { self: TastyUniverse => class FromTree(tree: Symbol => Context => Tree) extends DeferredAnnotation { private[bridge] def eager(annotee: Symbol)(implicit ctx: Context): u.AnnotationInfo = { val atree = tree(annotee)(ctx) - ctx.log(s"annotation on $annotee: $atree") + ctx.log(s"annotation on $annotee: ${showTree(atree)}") val annot = mkAnnotation(atree) val annotSym = annot.tpe.typeSymbol if ((annotSym eq defn.TargetNameAnnotationClass) || (annotSym eq defn.StaticMethodAnnotationClass)) { diff --git a/src/compiler/scala/tools/nsc/tasty/bridge/ContextOps.scala b/src/compiler/scala/tools/nsc/tasty/bridge/ContextOps.scala index ca1052bb7241..1079456db6d8 100644 --- a/src/compiler/scala/tools/nsc/tasty/bridge/ContextOps.scala +++ b/src/compiler/scala/tools/nsc/tasty/bridge/ContextOps.scala @@ -158,7 +158,7 @@ trait ContextOps { self: TastyUniverse => if (u.settings.YdebugTasty) u.reporter.echo( pos = u.NoPosition, - msg = str.linesIterator.map(line => s"#[$classRoot]: $line").mkString(System.lineSeparator) + msg = str.linesIterator.map(line => s"${showSymStable(classRoot)}: $line").mkString(System.lineSeparator) ) } @@ -256,11 +256,11 @@ trait ContextOps { self: TastyUniverse => /** Guards the creation of an object val by checking for an existing definition in the owner's scope */ final def delayCompletion(owner: Symbol, name: TastyName, completer: TastyCompleter, privateWithin: Symbol = noSymbol): Symbol = { - def default() = unsafeNewSymbol(owner, name, completer.originalFlagSet, completer, privateWithin) - if (completer.originalFlagSet.is(Object)) { + def default() = unsafeNewSymbol(owner, name, completer.tflags, completer, privateWithin) + if (completer.tflags.is(Object)) { val sourceObject = findObject(owner, encodeTermName(name)) if (isSymbol(sourceObject)) - redefineSymbol(sourceObject, completer.originalFlagSet, completer, privateWithin) + redefineSymbol(sourceObject, completer.tflags, completer, privateWithin) else default() } @@ -272,11 +272,11 @@ trait ContextOps { self: TastyUniverse => /** Guards the creation of an object class by checking for an existing definition in the owner's scope */ final def delayClassCompletion(owner: Symbol, typeName: TastyName.TypeName, completer: TastyCompleter, privateWithin: Symbol): Symbol = { - def default() = unsafeNewClassSymbol(owner, typeName, completer.originalFlagSet, completer, privateWithin) - if (completer.originalFlagSet.is(Object)) { + def default() = unsafeNewClassSymbol(owner, typeName, completer.tflags, completer, privateWithin) + if (completer.tflags.is(Object)) { val sourceObject = findObject(owner, encodeTermName(typeName.toTermName)) if (isSymbol(sourceObject)) - redefineSymbol(sourceObject.objectImplementation, completer.originalFlagSet, completer, privateWithin) + redefineSymbol(sourceObject.objectImplementation, completer.tflags, completer, privateWithin) else default() } @@ -412,7 +412,7 @@ trait ContextOps { self: TastyUniverse => val moduleCls = sym.moduleClass val moduleClsFlags = FlagSets.withAccess( flags = FlagSets.Creation.ObjectClassDef, - inheritedAccess = sym.repr.originalFlagSet + inheritedAccess = sym.repr.tflags ) val selfTpe = defn.SingleType(sym.owner.thisPrefix, sym) val ctor = newConstructor(moduleCls, selfTpe) diff --git a/src/compiler/scala/tools/nsc/tasty/bridge/SymbolOps.scala b/src/compiler/scala/tools/nsc/tasty/bridge/SymbolOps.scala index 21afc92da34f..8acf83ec2bdf 100644 --- a/src/compiler/scala/tools/nsc/tasty/bridge/SymbolOps.scala +++ b/src/compiler/scala/tools/nsc/tasty/bridge/SymbolOps.scala @@ -12,9 +12,8 @@ package scala.tools.nsc.tasty.bridge -import scala.tools.nsc.tasty.SafeEq - -import scala.tools.nsc.tasty.{TastyUniverse, TastyModes}, TastyModes._ +import scala.annotation.tailrec +import scala.tools.nsc.tasty.{SafeEq, TastyUniverse, TastyModes}, TastyModes._ import scala.tools.tasty.{TastyName, Signature, TastyFlags}, TastyName.SignedName, Signature.MethodSignature, TastyFlags._ import scala.tools.tasty.ErasedTypeRef import scala.util.chaining._ @@ -45,14 +44,29 @@ trait SymbolOps { self: TastyUniverse => } } + /** Fetch the symbol of a path type without forcing the symbol, + * `NoSymbol` if not a path. + */ + @tailrec + private[bridge] final def symOfType(tpe: Type): Symbol = tpe match { + case tpe: u.TypeRef => tpe.sym + case tpe: u.SingleType => tpe.sym + case tpe: u.ThisType => tpe.sym + case tpe: u.ConstantType => symOfType(tpe.value.tpe) + case tpe: u.ClassInfoType => tpe.typeSymbol + case tpe: u.RefinedType0 => tpe.typeSymbol + case tpe: u.ExistentialType => symOfType(tpe.underlying) + case _ => u.NoSymbol + } + implicit final class SymbolDecorator(val sym: Symbol) { - def isScala3Inline: Boolean = repr.originalFlagSet.is(Inline) - def isScala2Macro: Boolean = repr.originalFlagSet.is(FlagSets.Scala2Macro) - def isTraitParamAccessor: Boolean = sym.owner.isTrait && repr.originalFlagSet.is(FieldAccessor|ParamSetter) + def isScala3Inline: Boolean = repr.tflags.is(Inline) + def isScala2Macro: Boolean = repr.tflags.is(FlagSets.Scala2Macro) + def isTraitParamAccessor: Boolean = sym.owner.isTrait && repr.tflags.is(FieldAccessor|ParamSetter) def isParamGetter: Boolean = - sym.isMethod && sym.repr.originalFlagSet.is(FlagSets.ParamGetter) + sym.isMethod && sym.repr.tflags.is(FlagSets.ParamGetter) /** A computed property that should only be called on a symbol which is known to have been initialised by the * Tasty Unpickler and is not yet completed. @@ -70,8 +84,13 @@ trait SymbolOps { self: TastyUniverse => } def ensureCompleted(): Unit = { - sym.info - sym.annotations.foreach(_.completeInfo()) + val raw = sym.rawInfo + if (raw.isInstanceOf[u.LazyType]) { + sym.info + sym.annotations.foreach(_.completeInfo()) + } else { + assert(!raw.isInstanceOf[TastyRepr], s"${showSym(sym)} has incorrectly initialised info $raw") + } } def objectImplementation: Symbol = sym.moduleClass def sourceObject: Symbol = sym.sourceModule @@ -195,5 +214,6 @@ trait SymbolOps { self: TastyUniverse => } def showSig(sig: MethodSignature[ErasedTypeRef]): String = sig.map(_.signature).show - def showSym(sym: Symbol): String = s"Symbol(${sym.accurateKindString} ${sym.name}, #${sym.id})" + def showSym(sym: Symbol): String = s"`(#${sym.id}) ${sym.accurateKindString} ${sym.name}`" + def showSymStable(sym: Symbol): String = s"#[${sym.id}, ${sym.name}]" } diff --git a/src/compiler/scala/tools/nsc/tasty/bridge/TreeOps.scala b/src/compiler/scala/tools/nsc/tasty/bridge/TreeOps.scala index 6f6edd0de981..f8cb55181005 100644 --- a/src/compiler/scala/tools/nsc/tasty/bridge/TreeOps.scala +++ b/src/compiler/scala/tools/nsc/tasty/bridge/TreeOps.scala @@ -35,6 +35,19 @@ trait TreeOps { self: TastyUniverse => } } + def showTree(tree: Tree): String = { + // here we want to avoid forcing the symbols of type trees, + // so instead substitute the type tree with an Identifier + // of the `showType`, which does not force. + val tree1 = tree.transform(new u.Transformer { + override def transform(tree: Tree) = tree match { + case tree: u.TypeTree => u.Ident(s"${showType(tree.tpe, wrap = false)}") // ident prints its name directly + case tree => super.transform(tree) + } + }) + u.show(tree1) + } + object tpd { @inline final def Constant(value: Any): Constant = diff --git a/src/compiler/scala/tools/nsc/tasty/bridge/TypeOps.scala b/src/compiler/scala/tools/nsc/tasty/bridge/TypeOps.scala index f553f3a6b030..5cc28daa8b25 100644 --- a/src/compiler/scala/tools/nsc/tasty/bridge/TypeOps.scala +++ b/src/compiler/scala/tools/nsc/tasty/bridge/TypeOps.scala @@ -52,9 +52,59 @@ trait TypeOps { self: TastyUniverse => } } - def lzyShow(tpe: Type): String = tpe match { - case u.TypeRef(_, sym, args) => s"$sym${if (args.nonEmpty) args.map(lzyShow).mkString("[", ",","]") else ""}" - case tpe => tpe.typeSymbolDirect.toString + def lzyShow(tpe: Type): String = { + val sym = symOfType(tpe) + if (isSymbol(sym)) { + val args = tpe.typeArgs + s"${sym.fullName}${if (args.nonEmpty) args.map(lzyShow).mkString("[", ",", "]") else ""}" + } + else { + s"${tpe.typeSymbolDirect.fullName}" + } + } + + def showType(tpe: Type, wrap: Boolean = true): String = { + def prefixed(prefix: String)(op: => String) = { + val raw = op + if (wrap) s"""$prefix"$raw"""" + else raw + } + def parameterised(tparams: List[Symbol], prefix: String)(f: String => String) = prefixed(prefix) { + f(if (tparams.isEmpty) "" else tparams.map(p => s"${p.name}").mkString("[", ", ", "]")) + } + def cls(tparams: List[Symbol], tpe: u.ClassInfoType) = parameterised(tparams, "cls") { paramStr => + s"$paramStr${tpe.typeSymbol.fullName}$paramStr" + } + def meth(tparams: List[Symbol], tpe: u.MethodType) = parameterised(tparams, "meth") { paramStr => + s"$paramStr$tpe" + } + def preStr(pre: Type): String = { + val preSym = symOfType(pre) + if (isSymbol(preSym)) s"${preSym.fullName}." else "" + } + tpe match { + case tpe: u.ClassInfoType => cls(Nil, tpe) + case u.PolyType(tparams, tpe: u.ClassInfoType) => cls(tparams, tpe) + case u.PolyType(tparams, tpe: u.MethodType) => meth(tparams, tpe) + case tpe: u.MethodType => meth(Nil, tpe) + case tpe: u.ThisType => prefixed("path") { s"${tpe.sym.fullName}.this" } + + case tpe: u.SingleType => + prefixed("path") { s"${preStr(tpe.prefix)}${tpe.sym.name}.type" } + + case tpe: u.TypeRef => + val pre = preStr(tpe.prefix) + if (tpe.sym.is(Object)) prefixed("path") { + s"$pre${tpe.sym.name}.type" + } + else prefixed("tpelazy") { + val argsStrs = tpe.args.map(showType(_, wrap = false)) + val argsStr = if (argsStrs.nonEmpty) argsStrs.mkString("[", ", ", "]") else "" + s"$pre${tpe.sym.name}$argsStr" + } + + case tpe => prefixed("tpe") { s"$tpe" } + } } def fnResult(fn: Type): Type = fn.dealiasWiden.finalResultType @@ -98,11 +148,11 @@ trait TypeOps { self: TastyUniverse => */ object DefaultInfo extends TastyRepr { override def isTrivial: Boolean = true - def originalFlagSet: TastyFlagSet = EmptyTastyFlags + def tflags: TastyFlagSet = EmptyTastyFlags } - private[bridge] def CopyInfo(underlying: u.TermSymbol, originalFlagSet: TastyFlagSet): TastyRepr = - new CopyCompleter(underlying, originalFlagSet) + private[bridge] def CopyInfo(underlying: u.TermSymbol, tflags: TastyFlagSet): TastyRepr = + new CopyCompleter(underlying, tflags) def OpaqueTypeToBounds(tpe: Type): (Type, Type) = tpe match { case u.PolyType(tparams, tpe) => @@ -393,11 +443,11 @@ trait TypeOps { self: TastyUniverse => private[TypeOps] val NoSymbolFn = (_: Context) => u.NoSymbol sealed abstract trait TastyRepr extends u.Type { - def originalFlagSet: TastyFlagSet - final def tastyOnlyFlags: TastyFlagSet = originalFlagSet & FlagSets.TastyOnlyFlags + def tflags: TastyFlagSet + final def unsupportedFlags: TastyFlagSet = tflags & FlagSets.TastyOnlyFlags } - abstract class TastyCompleter(isClass: Boolean, final val originalFlagSet: TastyFlagSet)(implicit + abstract class TastyCompleter(isClass: Boolean, final val tflags: TastyFlagSet)(implicit capturedCtx: Context) extends u.LazyType with TastyRepr with u.FlagAgnosticCompleter { override final val decls: u.Scope = if (isClass) u.newScope else u.EmptyScope @@ -415,7 +465,7 @@ trait TypeOps { self: TastyUniverse => def computeInfo(sym: Symbol)(implicit ctx: Context): Unit } - private[TypeOps] class CopyCompleter(underlying: u.TermSymbol, final val originalFlagSet: TastyFlagSet) + private[TypeOps] class CopyCompleter(underlying: u.TermSymbol, final val tflags: TastyFlagSet) extends u.LazyType with TastyRepr with u.FlagAgnosticCompleter { override final def complete(sym: Symbol): Unit = { underlying.ensureCompleted() From fa95bbc3bde924c918569b77a42138940d90a1f9 Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Tue, 13 Jul 2021 10:19:33 +0200 Subject: [PATCH 297/769] add tasty unpickling tracing framework --- .../tools/nsc/tasty/bridge/ContextOps.scala | 70 +++++++++++++++++-- .../scala/tools/nsc/tasty/package.scala | 7 ++ 2 files changed, 71 insertions(+), 6 deletions(-) diff --git a/src/compiler/scala/tools/nsc/tasty/bridge/ContextOps.scala b/src/compiler/scala/tools/nsc/tasty/bridge/ContextOps.scala index 1079456db6d8..630691a73b74 100644 --- a/src/compiler/scala/tools/nsc/tasty/bridge/ContextOps.scala +++ b/src/compiler/scala/tools/nsc/tasty/bridge/ContextOps.scala @@ -13,12 +13,17 @@ package scala.tools.nsc.tasty.bridge import scala.annotation.tailrec + +import scala.collection.mutable import scala.reflect.io.AbstractFile +import scala.reflect.internal.MissingRequirementError import scala.tools.tasty.{TastyName, TastyFlags}, TastyFlags._, TastyName.ObjectName import scala.tools.nsc.tasty.{TastyUniverse, TastyModes, SafeEq}, TastyModes._ -import scala.reflect.internal.MissingRequirementError -import scala.collection.mutable +import scala.tools.nsc.tasty.{cyan, yellow, magenta, blue, green} + +import scala.util.chaining._ + /**This contains the definition for `Context`, along with standard error throwing capabilities with user friendly * formatted errors that can change their output depending on the context mode. @@ -121,6 +126,8 @@ trait ContextOps { self: TastyUniverse => } } + final case class TraceInfo[-T](query: String, qual: String, res: T => String, modifiers: List[String] = Nil) + /**Maintains state through traversal of a TASTy file, such as the outer scope of the defintion being traversed, the * traversal mode, and the root owners and source path for the TASTy file. * It also provides all operations for manipulation of the symbol table, such as creating/updating symbols and @@ -155,11 +162,33 @@ trait ContextOps { self: TastyUniverse => } final def log(str: => String): Unit = { - if (u.settings.YdebugTasty) - u.reporter.echo( - pos = u.NoPosition, - msg = str.linesIterator.map(line => s"${showSymStable(classRoot)}: $line").mkString(System.lineSeparator) + if (u.settings.YdebugTasty) { + logImpl(str) + } + } + + private final def logImpl(str: => String): Unit = u.reporter.echo( + pos = u.NoPosition, + msg = str + .linesIterator + .map(line => s"${blue(s"${showSymStable(classRoot)}:")} $line") + .mkString(System.lineSeparator) + ) + + @inline final def trace[T](info: => TraceInfo[T])(op: => T): T = { + + def withTrace(info: => TraceInfo[T], op: => T)(traceId: String): T = { + val i = info + val modStr = ( + if (i.modifiers.isEmpty) "" + else " " + green(i.modifiers.mkString("[", ",", "]")) ) + logImpl(s"${yellow(s"$traceId")} ${cyan(s"<<< ${i.query}:")} ${magenta(i.qual)}$modStr") + op.tap(eval => logImpl(s"${yellow(s"$traceId")} ${cyan(s">>>")} ${magenta(i.res(eval))}$modStr")) + } + + if (u.settings.YdebugTasty) initialContext.addFrame(withTrace(info, op)) + else op } def owner: Symbol @@ -510,6 +539,35 @@ trait ContextOps { self: TastyUniverse => def mode: TastyMode = EmptyTastyMode def owner: Symbol = topLevelClass.owner + private class TraceFrame(val id: Int, val next: TraceFrame) { + + var nextChild: Int = 0 + + def show: String = { + val buf = mutable.ArrayDeque.empty[String] + var cur = this + while (cur.id != -1) { + buf.prepend(cur.id.toString) + cur = cur.next + } + buf.mkString("[", " ", ")") + } + + } + + private[this] var _trace: TraceFrame = new TraceFrame(id = -1, next = null) + + private[ContextOps] def addFrame[T](op: String => T): T = { + val oldFrame = _trace + val newFrame = new TraceFrame(id = oldFrame.nextChild, next = oldFrame) + _trace = newFrame + try op(newFrame.show) + finally { + _trace = oldFrame + _trace.nextChild += 1 + } + } + private[this] var mySymbolsToForceAnnots: mutable.LinkedHashSet[Symbol] = _ private[ContextOps] def stageSymbolToForceAnnots(sym: Symbol): Unit = { diff --git a/src/compiler/scala/tools/nsc/tasty/package.scala b/src/compiler/scala/tools/nsc/tasty/package.scala index 5122e0711a88..a490d74d6ede 100644 --- a/src/compiler/scala/tools/nsc/tasty/package.scala +++ b/src/compiler/scala/tools/nsc/tasty/package.scala @@ -22,4 +22,11 @@ package object tasty { @inline final def !==(u: T): Boolean = t != u } + def cyan(str: String): String = Console.CYAN + str + Console.RESET + def yellow(str: String): String = Console.YELLOW + str + Console.RESET + def magenta(str: String): String = Console.MAGENTA + str + Console.RESET + def red(str: String): String = Console.RED + str + Console.RESET + def green(str: String): String = Console.GREEN + str + Console.RESET + def blue(str: String): String = Console.BLUE + str + Console.RESET + } From a486385ff107e3369244c6e845aafbe34b2579e6 Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Tue, 13 Jul 2021 13:00:18 +0200 Subject: [PATCH 298/769] trace nontrivial operations --- .../scala/tools/nsc/tasty/ForceKinds.scala | 60 +++++ .../scala/tools/nsc/tasty/TreeUnpickler.scala | 224 ++++++++++++------ .../nsc/tasty/bridge/AnnotationOps.scala | 1 - .../tools/nsc/tasty/bridge/ContextOps.scala | 31 ++- .../tools/nsc/tasty/bridge/SymbolOps.scala | 123 ++++++---- .../tools/nsc/tasty/bridge/TreeOps.scala | 4 +- .../tools/nsc/tasty/bridge/TypeOps.scala | 47 ++-- 7 files changed, 347 insertions(+), 143 deletions(-) create mode 100644 src/compiler/scala/tools/nsc/tasty/ForceKinds.scala diff --git a/src/compiler/scala/tools/nsc/tasty/ForceKinds.scala b/src/compiler/scala/tools/nsc/tasty/ForceKinds.scala new file mode 100644 index 000000000000..b2b3c2bdf9ed --- /dev/null +++ b/src/compiler/scala/tools/nsc/tasty/ForceKinds.scala @@ -0,0 +1,60 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.tools.nsc.tasty + +import scala.language.implicitConversions + +import ForceKinds._ + +object ForceKinds { + + /** When forcing the constructor of an annotation */ + final val AnnotCtor: ForceKinds.Single = of(1 << 0) + /** When forcing the companion of a module */ + final val DeepForce: ForceKinds.Single = of(1 << 1) + /** When forcing the owner of a symbol */ + final val CompleteOwner: ForceKinds.Single = of(1 << 2) + /** When forcing an overloaded signature */ + final val OverloadedSym: ForceKinds.Single = of(1 << 3) + /** When forcing a symbol that will be copied */ + final val CopySym: ForceKinds.Single = of(1 << 4) + /** When forcing the underlying symbol of some type space */ + final val SpaceForce: ForceKinds.Single = of(1 << 5) + + private def of(mask: Int): ForceKinds.Single = new ForceKinds.Single(mask) + + class Single(val toInt: Int) extends AnyVal { mode => + def |(single: ForceKinds.Single): ForceKinds = new ForceKinds(toInt | single.toInt) + } + + @inline implicit def single2ForceKinds(single: ForceKinds.Single): ForceKinds = new ForceKinds(single.toInt) + +} + +/**A static type representing a bitset of modes that are for debugging why a symbol may have been forced + */ +class ForceKinds(val toInt: Int) extends AnyVal { + def is(single: ForceKinds.Single): Boolean = (toInt & single.toInt) == single.toInt + def |(single: ForceKinds.Single): ForceKinds = new ForceKinds(toInt | single.toInt) + + def describe: List[String] = { + var xs = List.empty[String] + if (is(AnnotCtor)) xs ::= "reading annotation constructor" + if (is(DeepForce)) xs ::= "deep" + if (is(CompleteOwner)) xs ::= "class owner is required" + if (is(OverloadedSym)) xs ::= "overload resolution" + if (is(CopySym)) xs ::= "copying its info" + if (is(SpaceForce)) xs ::= "space" + xs + } +} diff --git a/src/compiler/scala/tools/nsc/tasty/TreeUnpickler.scala b/src/compiler/scala/tools/nsc/tasty/TreeUnpickler.scala index 1764cadaa334..714e65e5cea2 100644 --- a/src/compiler/scala/tools/nsc/tasty/TreeUnpickler.scala +++ b/src/compiler/scala/tools/nsc/tasty/TreeUnpickler.scala @@ -12,7 +12,9 @@ package scala.tools.nsc.tasty -import scala.tools.tasty.{TastyRefs, TastyReader, TastyName, TastyFormat, TastyFlags}, TastyRefs._, TastyFlags._, TastyFormat._ +import scala.tools.tasty.{TastyRefs, TastyReader, TastyName, TastyFormat, TastyFlags} +import TastyRefs._, TastyFlags._, TastyFormat._ +import ForceKinds._ import scala.annotation.switch import scala.collection.mutable @@ -84,16 +86,31 @@ class TreeUnpickler[Tasty <: TastyUniverse]( this.roots = Set(objectRoot, classRoot) val rdr = new TreeReader(reader).fork ownerTree = new OwnerTree(NoAddr, 0, rdr.fork, reader.endAddr) - def indexTopLevel(implicit ctx: Context): Unit = rdr.indexStats(reader.endAddr) - if (rdr.isTopLevel) - inIndexScopedStatsContext(indexTopLevel(_)) + def indexTopLevel()(implicit ctx: Context): Unit = rdr.indexStats(reader.endAddr) + if (rdr.isTopLevel) { + inIndexScopedStatsContext { ctx0 => + ctx0.trace(traceTopLevel(classRoot, objectRoot)) { + indexTopLevel()(ctx0) + } + } + } } + private def traceTopLevel(classRoot: Symbol, objectRoot: Symbol) = TraceInfo[Unit]( + query = s"reading top level roots", + qual = s"${showSym(classRoot)}, ${showSym(objectRoot)}", + res = _ => "entered top level roots" + ) + /** A completer that captures the current position and context, which then uses the position to discover the symbol * to compute the info for. */ - class Completer(isClass: Boolean, reader: TastyReader, tflags: TastyFlagSet)(implicit ctx: Context) - extends TastyCompleter(isClass, tflags) { self => + class Completer( + isClass: Boolean, + reader: TastyReader, + tflags: TastyFlagSet + )(implicit ctx: Context) + extends TastyCompleter(isClass, tflags) { private val symAddr = reader.currentAddr @@ -238,23 +255,35 @@ class TreeUnpickler[Tasty <: TastyUniverse]( case Some(sym) => sym case None => - ctx.log(s"<<< No symbol found at forward reference $addr, ensuring one exists:") - val ctxAtOwner = ctx.withOwner(ownerTree.findOwner(addr)) - val sym = forkAt(addr).createSymbol()(ctxAtOwner) - ctx.log(s">>> $addr forward reference to ${showSym(sym)}") - sym + ctx.trace(traceForwardReference(addr)) { + val ctxAtOwner = ctx.withOwner(ownerTree.findOwner(addr)) + forkAt(addr).createSymbol()(ctxAtOwner) + } } + private def traceForwardReference(addr: Addr) = TraceInfo[Symbol]( + query = s"creating forward reference", + qual = s"at $addr", + res = sym => s"$addr forward reference to ${showSym(sym)}" + ) + /** The symbol defined by current definition */ def symbolAtCurrent()(implicit ctx: Context): Symbol = symAtAddr.get(currentAddr) match { case Some(sym) => assert(ctx.owner === sym.owner, s"owner discrepancy for ${showSym(sym)}, expected: ${showSym(ctx.owner)}, found: ${showSym(sym.owner)}") sym case None => - ctx.log(s"$currentAddr No symbol found at current address, ensuring one exists:") - createSymbol() + ctx.trace(traceCurrentSymbol(currentAddr)) { + createSymbol() + } } + private def traceCurrentSymbol(addr: Addr) = TraceInfo[Symbol]( + query = "create symbol at current address", + qual = s"$addr", + res = sym => if (!isSymbol(sym)) s"evicted symbol at $addr" else s"created ${showSym(sym)} at $addr" + ) + def readConstant(tag: Int)(implicit ctx: Context): Constant = (tag: @switch) match { case UNITconst => tpd.Constant(()) @@ -288,7 +317,12 @@ class TreeUnpickler[Tasty <: TastyUniverse]( def readType()(implicit ctx: Context): Type = { val start = currentAddr val tag = readByte() - ctx.log(s"$start reading type ${astTagToString(tag)}:") + + def traceReadType = TraceInfo[Type]( + query = "reading type", + qual = s"${astTagToString(tag)} $start", + res = tpe => s"exit ${showType(tpe)} ${astTagToString(tag)} $start" + ) def registeringTypeWith[T](tp: Type, op: => T): T = { typeAtAddr(start) = tp @@ -393,7 +427,9 @@ class TreeUnpickler[Tasty <: TastyUniverse]( case _ => defn.ConstantType(readConstant(tag)) } } - if (tag < firstLengthTreeTag) readSimpleType() else readLengthType() + ctx.traceV(traceReadType) { + if (tag < firstLengthTreeTag) readSimpleType() else readLengthType() + } } private def readSymNameRef()(implicit ctx: Context): Type = { @@ -653,18 +689,25 @@ class TreeUnpickler[Tasty <: TastyUniverse]( private val readTypedAnnot: Context => DeferredAnnotation = { implicit ctx => val annotCtx = ctx.addMode(ReadAnnotation) val start = currentAddr - ctx.log(s"<<< $start reading annotation:") readByte() // tag val end = readEnd() val annotSym = readType()(annotCtx).typeSymbolDirect - val deferred = readLaterWithOwner(end, rdr => ctx => { - ctx.log(s"${rdr.reader.currentAddr} reading LazyAnnotationRef[${annotSym.fullName}]()") - rdr.readTerm()(ctx) - })(annotCtx.retractMode(IndexScopedStats)) - ctx.log(s">>> $start LazyAnnotationRef[${annotSym.fullName}]()") - DeferredAnnotation.fromTree(deferred) + val annotStart = currentAddr + ctx.log(s"$annotStart collected annotation ${showSym(annotSym)}, starting at $start, ending at $end") + val mkTree = readLaterWithOwner(end, rdr => ctx => + ctx.trace(traceAnnotation(annotStart, annotSym, ctx.owner)) { + rdr.readTerm()(ctx) + } + )(annotCtx.retractMode(IndexScopedStats)) + DeferredAnnotation.fromTree(mkTree) } + private def traceAnnotation(annotStart: Addr, annotSym: Symbol, annotee: Symbol) = TraceInfo[Tree]( + query = s"reading annotation tree", + qual = s"${showSym(annotSym)} at $annotStart", + res = atree => s"annotation of ${showSym(annotee)} = ${showTree(atree)}" + ) + /** Create symbols for the definitions in the statement sequence between * current address and `end`. */ @@ -813,7 +856,7 @@ class TreeUnpickler[Tasty <: TastyUniverse]( val allowedClassFlags = allowedShared | Open | Transparent if (sym.isClass) { checkUnsupportedFlags(repr.unsupportedFlags &~ allowedClassFlags) - sym.owner.ensureCompleted() + sym.owner.ensureCompleted(CompleteOwner) readTemplate()(localCtx) } else { @@ -839,38 +882,74 @@ class TreeUnpickler[Tasty <: TastyUniverse]( else defn.ExprType(tpt.tpe)) } - def initialize()(implicit ctx: Context): Unit = { - val repr = sym.rawInfo match { - case repr: TastyRepr => repr - case _ => return () // nothing to do here (assume correctly initalised) - } - ctx.log(s"$symAddr completing ${showSym(sym)} in scope ${showSym(ctx.owner)}") - val localCtx = ctx.withOwner(sym) - tag match { - case DEFDEF => DefDef(repr, localCtx) - case VALDEF => ValDef(repr, localCtx) - case TYPEDEF | TYPEPARAM => TypeDef(repr, localCtx) - case PARAM => TermParam(repr, localCtx) + def initialize(localCtx: Context)(implicit ctx: Context): Unit = ctx.trace(traceCompletion(symAddr, sym)) { + sym.rawInfo match { + case repr: TastyRepr => + tag match { + case DEFDEF => DefDef(repr, localCtx) + case VALDEF => ValDef(repr, localCtx) + case TYPEDEF | TYPEPARAM => TypeDef(repr, localCtx) + case PARAM => TermParam(repr, localCtx) + } + case _ => // nothing to do here (assume correctly initalised) + ctx.log(s"${showSym(sym)} is already initialised, in owner ${showSym(sym.owner)}") } } try { - initialize() - ctx.log(s"$symAddr @@@ ${showSym(sym)}.tpe =:= ${showType(sym.info)}; owned by ${location(sym.owner)}") + val localCtx = ctx.withOwner(sym) + if (sym.isClass) { + inIndexScopedStatsContext(localCtx0 => initialize(localCtx0)(ctx))(localCtx) + } + else { + initialize(localCtx) + } NoCycle(at = symAddr) } catch ctx.onCompletionError(sym) finally goto(end) } + private def traceCompletion(addr: Addr, sym: Symbol)(implicit ctx: Context) = TraceInfo[Unit]( + query = "begin completion", + qual = s"${showSym(sym)} in context ${showSym(ctx.owner)} $addr", + res = _ => s"completed ${showSym(sym)}: ${showType(sym.info)}" + ) + private def readTemplate()(implicit ctx: Context): Unit = { val cls = ctx.enterClassCompletion() val localDummy = symbolAtCurrent() assert(readByte() === TEMPLATE) val end = readEnd() - def completeTypeParameters()(implicit ctx: Context): List[Symbol] = { - ctx.log(s"$currentAddr Template: reading parameters of $cls:") + def traceCompleteParams = TraceInfo[List[Symbol]]( + query = "force template parameters", + qual = s"${showSym(cls)} $currentAddr", + res = _ => "forced template parameters" + ) + + def traceIndexMembers = TraceInfo[Unit]( + query = "index template body", + qual = s"${showSym(cls)} $currentAddr", + res = _ => "indexed template body" + ) + + def traceCollectParents = TraceInfo[List[Type]]( + query = "collect template parents", + qual = s"${showSym(cls)} $currentAddr", + res = { parentTypes => + val addendum = parentTypes.map(lzyShow).mkString(s"`${cls.fullName} extends ", " with ", "`") + s"collected template parents $addendum" + } + ) + + def traceReadSelf = TraceInfo[Type]( + query = "reading template self-type", + qual = s"${showSym(cls)} $currentAddr", + res = tpe => s"template self-type is $tpe" + ) + + def completeParameters()(implicit ctx: Context): List[Symbol] = ctx.trace(traceCompleteParams) { val tparams = readIndexedParams[NoCycle](TYPEPARAM).map(symFromNoCycle) if (tparams.nonEmpty) { cls.info = defn.PolyType(tparams, cls.info) @@ -879,41 +958,35 @@ class TreeUnpickler[Tasty <: TastyUniverse]( tparams } - def indexMembers()(implicit ctx: Context): Unit = { - ctx.log(s"$currentAddr Template: indexing members of $cls:") + def indexMembers()(implicit ctx: Context): Unit = ctx.trace(traceIndexMembers) { val bodyIndexer = fork while (bodyIndexer.reader.nextByte != DEFDEF) bodyIndexer.skipTree() // skip until primary ctor bodyIndexer.indexStats(end) } - def traverseParents()(implicit ctx: Context): List[Type] = { - ctx.log(s"$currentAddr Template: adding parents of $cls:") + def collectParents()(implicit ctx: Context): List[Type] = ctx.trace(traceCollectParents) { val parentCtx = ctx.withOwner(localDummy).addMode(ReadParents) val parentWithOuter = parentCtx.addMode(OuterTerm) collectWhile(nextByte != SELFDEF && nextByte != DEFDEF) { - nextUnsharedTag match { - case APPLY | TYPEAPPLY | BLOCK => readTerm()(parentWithOuter).tpe - case _ => readTpt()(parentCtx).tpe - } + defn.adjustParent( + nextUnsharedTag match { + case APPLY | TYPEAPPLY | BLOCK => readTerm()(parentWithOuter).tpe + case _ => readTpt()(parentCtx).tpe + } + ) } } def addSelfDef()(implicit ctx: Context): Unit = { - ctx.log(s"$currentAddr Template: adding self-type of $cls:") - readByte() // read SELFDEF tag - readLongNat() // skip Name - val selfTpe = readTpt().tpe - ctx.log(s"$currentAddr Template: self-type is $selfTpe") + val selfTpe = ctx.trace(traceReadSelf) { + readByte() // read SELFDEF tag + readLongNat() // skip Name + readTpt().tpe + } cls.typeOfThis = selfTpe } def setInfoWithParents(tparams: List[Symbol], parentTypes: List[Type])(implicit ctx: Context): Unit = { - def debugMsg = { - val addendum = - if (parentTypes.isEmpty) "" - else parentTypes.map(lzyShow).mkString(" extends ", " with ", "") // don't force types - s"$currentAddr Template: Updated info of $cls$addendum" - } val info = { val classInfo = defn.ClassInfoType(parentTypes, cls) // TODO [tasty]: if support opaque types, refine the self type with any opaque members here @@ -921,21 +994,19 @@ class TreeUnpickler[Tasty <: TastyUniverse]( else defn.PolyType(tparams, classInfo) } ctx.setInfo(cls, info) - ctx.log(debugMsg) } def traverseTemplate()(implicit ctx: Context): Unit = { - val tparams = completeTypeParameters() + val tparams = completeParameters() indexMembers() - val parents = traverseParents() + val parents = collectParents() if (nextByte === SELFDEF) { addSelfDef() } - val parentTypes = ctx.adjustParents(cls, parents) - setInfoWithParents(tparams, parentTypes) + setInfoWithParents(tparams, ctx.processParents(cls, parents)) } - inIndexScopedStatsContext(traverseTemplate()(_)) + traverseTemplate() } @@ -982,7 +1053,12 @@ class TreeUnpickler[Tasty <: TastyUniverse]( def readTerm()(implicit ctx: Context): Tree = { val start = currentAddr val tag = readByte() - ctx.log(s"$start reading term ${astTagToString(tag)}:") + + def traceReadTerm = TraceInfo[Tree]( + query = "reading term", + qual = s"${astTagToString(tag)} $start", + res = tree => s"exit term `${showTree(tree)}` ${astTagToString(tag)} $start" + ) def inParentCtor = ctx.mode.is(ReadParents | OuterTerm) @@ -1117,7 +1193,9 @@ class TreeUnpickler[Tasty <: TastyUniverse]( result } - if (tag < firstLengthTreeTag) readSimpleTerm() else readLengthTerm() // dotty sets span of tree to start + ctx.traceV(traceReadTerm) { + if (tag < firstLengthTreeTag) readSimpleTerm() else readLengthTerm() // dotty sets span of tree to start + } } def readTpt()(implicit ctx: Context): Tree = { @@ -1155,21 +1233,27 @@ class TreeUnpickler[Tasty <: TastyUniverse]( } def readWith[T <: AnyRef]( - reader: TreeReader, + treader: TreeReader, owner: Symbol, mode: TastyMode, source: AbstractFile, op: TreeReader => Context => T)( implicit ctx: Context - ): T = + ): T = ctx.trace[T](traceReadWith(treader, mode, owner)) { ctx.withPhaseNoLater("pickler") { ctx0 => - ctx0.log(s"${reader.reader.currentAddr} starting to read with owner ${location(owner)}:") - op(reader)(ctx0 + op(treader)(ctx0 .withOwner(owner) .withMode(mode) .withSource(source) ) } + } + + private def traceReadWith[T](treader: TreeReader, mode: TastyMode, owner: Symbol) = TraceInfo[T]( + query = "read within owner", + qual = s"${showSym(owner)} with modes `${mode.debug}` at ${treader.reader.currentAddr}", + res = t => s"exiting sub reader" + ) /** A lazy datastructure that records how definitions are nested in TASTY data. * The structure is lazy because it needs to be computed only for forward references diff --git a/src/compiler/scala/tools/nsc/tasty/bridge/AnnotationOps.scala b/src/compiler/scala/tools/nsc/tasty/bridge/AnnotationOps.scala index 901ccf7fcc0f..da033324bd42 100644 --- a/src/compiler/scala/tools/nsc/tasty/bridge/AnnotationOps.scala +++ b/src/compiler/scala/tools/nsc/tasty/bridge/AnnotationOps.scala @@ -47,7 +47,6 @@ trait AnnotationOps { self: TastyUniverse => class FromTree(tree: Symbol => Context => Tree) extends DeferredAnnotation { private[bridge] def eager(annotee: Symbol)(implicit ctx: Context): u.AnnotationInfo = { val atree = tree(annotee)(ctx) - ctx.log(s"annotation on $annotee: ${showTree(atree)}") val annot = mkAnnotation(atree) val annotSym = annot.tpe.typeSymbol if ((annotSym eq defn.TargetNameAnnotationClass) || (annotSym eq defn.StaticMethodAnnotationClass)) { diff --git a/src/compiler/scala/tools/nsc/tasty/bridge/ContextOps.scala b/src/compiler/scala/tools/nsc/tasty/bridge/ContextOps.scala index 630691a73b74..aa9785adb14c 100644 --- a/src/compiler/scala/tools/nsc/tasty/bridge/ContextOps.scala +++ b/src/compiler/scala/tools/nsc/tasty/bridge/ContextOps.scala @@ -191,6 +191,15 @@ trait ContextOps { self: TastyUniverse => else op } + /** Trace only when `-Vdebug` is set + */ + @inline final def traceV[T](info: => TraceInfo[T])(op: => T): T = { + if (u.settings.debug.value) { + trace(info)(op) + } + else op + } + def owner: Symbol def source: AbstractFile def mode: TastyMode @@ -410,13 +419,8 @@ trait ContextOps { self: TastyUniverse => cls } - /** Normalises the parents and sets up value class machinery */ - final def adjustParents(cls: Symbol, parents: List[Type]): List[Type] = { - val parentTypes = parents.map { tp => - val tpe = tp.dealias - if (tpe.typeSymbolDirect === u.definitions.ObjectClass) u.definitions.AnyRefTpe - else tpe - } + /** sets up value class machinery */ + final def processParents(cls: Symbol, parentTypes: List[Type]): parentTypes.type = { if (parentTypes.head.typeSymbolDirect === u.definitions.AnyValClass) { // TODO [tasty]: please reconsider if there is some shared optimised logic that can be triggered instead. withPhaseNoLater("extmethods") { ctx0 => @@ -590,13 +594,20 @@ trait ContextOps { self: TastyUniverse => val toForce = mySymbolsToForceAnnots.toList mySymbolsToForceAnnots.clear() for (sym <- toForce) { - log(s"!!! forcing annotations on ${showSym(sym)}") - analyseAnnotations(sym) + trace(traceForceAnnotations(sym)) { + analyseAnnotations(sym) + } } assert(mySymbolsToForceAnnots.isEmpty, "more symbols added while forcing") } } + private def traceForceAnnotations(sym: Symbol) = TraceInfo[Unit]( + query = "forcing annotations of symbol", + qual = s"${showSym(sym)}", + res = _ => s"annotations were forced on ${showSym(sym)}" + ) + private[this] var myInlineDefs: mutable.Map[Symbol, mutable.ArrayBuffer[Symbol]] = null private[this] var myMacros: mutable.Map[Symbol, mutable.ArrayBuffer[Symbol]] = null private[this] var myTraitParamAccessors: mutable.Map[Symbol, mutable.ArrayBuffer[Symbol]] = null @@ -636,7 +647,7 @@ trait ContextOps { self: TastyUniverse => * Reports illegal definitions: * - trait constructors with parameters * - * @param cls should be a symbol associated with a non-empty scope + * @param cls should be a class symbol associated with a non-empty scope */ private[ContextOps] def enterLatentDefs(cls: Symbol): Unit = { diff --git a/src/compiler/scala/tools/nsc/tasty/bridge/SymbolOps.scala b/src/compiler/scala/tools/nsc/tasty/bridge/SymbolOps.scala index 8acf83ec2bdf..4384cc14a193 100644 --- a/src/compiler/scala/tools/nsc/tasty/bridge/SymbolOps.scala +++ b/src/compiler/scala/tools/nsc/tasty/bridge/SymbolOps.scala @@ -13,10 +13,9 @@ package scala.tools.nsc.tasty.bridge import scala.annotation.tailrec -import scala.tools.nsc.tasty.{SafeEq, TastyUniverse, TastyModes}, TastyModes._ +import scala.tools.nsc.tasty.{SafeEq, TastyUniverse, ForceKinds, TastyModes}, TastyModes._, ForceKinds._ import scala.tools.tasty.{TastyName, Signature, TastyFlags}, TastyName.SignedName, Signature.MethodSignature, TastyFlags._ import scala.tools.tasty.ErasedTypeRef -import scala.util.chaining._ /**This layer deals with selecting a member symbol from a type using a `TastyName`, * also contains factories for making type references to symbols. @@ -34,13 +33,20 @@ trait SymbolOps { self: TastyUniverse => final def declaringSymbolOf(sym: Symbol): Symbol = if (sym.isModuleClass) sym.sourceModule else sym - private final def deepComplete(tpe: Type): Unit = { - val asTerm = tpe.termSymbol - if (asTerm ne u.NoSymbol) { - asTerm.ensureCompleted() - deepComplete(tpe.widen) - } else { - tpe.typeSymbol.ensureCompleted() + private final def deepComplete(tpe: Type)(implicit ctx: Context): Unit = { + symOfType(tpe) match { + case u.NoSymbol => + ctx.log(s"could not retrieve symbol from type ${showType(tpe)}") + case termSym if termSym.isTerm => + if (termSym.is(Object)) { + termSym.ensureCompleted(SpaceForce) + termSym.moduleClass.ensureCompleted(DeepForce | SpaceForce) + } + else { + ctx.log(s"deep complete on non-module term ${showSym(termSym)}, not taking action") + } + case typeSym => + typeSym.ensureCompleted(SpaceForce) } } @@ -83,15 +89,28 @@ trait SymbolOps { self: TastyUniverse => } } - def ensureCompleted(): Unit = { + def ensureCompleted(forceKinds: ForceKinds)(implicit ctx: Context): Unit = { val raw = sym.rawInfo if (raw.isInstanceOf[u.LazyType]) { - sym.info - sym.annotations.foreach(_.completeInfo()) + ctx.trace(traceForceInfo(sym, forceKinds)) { + sym.info + sym.annotations.foreach(_.completeInfo()) + } } else { assert(!raw.isInstanceOf[TastyRepr], s"${showSym(sym)} has incorrectly initialised info $raw") } } + + private def traceForceInfo( + sym: Symbol, + forceKinds: ForceKinds + )(implicit ctx: Context) = TraceInfo[Unit]( + query = "force symbol info", + qual = s"${showSym(sym)} in context ${showSym(ctx.owner)}", + res = _ => s"${showSym(sym)} was forced", + modifiers = forceKinds.describe + ) + def objectImplementation: Symbol = sym.moduleClass def sourceObject: Symbol = sym.sourceModule def ref(args: List[Type]): Type = u.appliedType(sym, args) @@ -170,49 +189,61 @@ trait SymbolOps { self: TastyUniverse => typeError(s"can't find $missing; perhaps it is missing from the classpath.") } - private def signedMemberOfSpace(space: Type, qual: TastyName, sig: MethodSignature[ErasedTypeRef], target: TastyName)(implicit ctx: Context): Symbol = { + private def signedMemberOfSpace( + space: Type, + qual: TastyName, + sig: MethodSignature[ErasedTypeRef], + target: TastyName + )(implicit ctx: Context): Symbol = { if (target ne qual) { unsupportedError(s"selection of method $qual with @targetName(" + '"' + target + '"' + ")") } else { - ctx.log(s"""<<< looking for overload in symbolOf[$space] @@ $qual: ${showSig(sig)}""") - val member = space.member(encodeTermName(qual)) - if (!(isSymbol(member) && hasType(member))) errorMissing(space, qual) - val (tyParamCount, paramRefs) = { - val (tyParamCounts, params) = sig.params.partitionMap(identity) - if (tyParamCounts.length > 1) { - unsupportedError(s"method with unmergeable type parameters: $qual") + ctx.trace(traceOverload(space, qual, sig)) { + val member = space.member(encodeTermName(qual)) + if (!(isSymbol(member) && hasType(member))) errorMissing(space, qual) + val (tyParamCount, paramRefs) = { + val (tyParamCounts, params) = sig.params.partitionMap(identity) + if (tyParamCounts.length > 1) { + unsupportedError(s"method with unmergeable type parameters: $qual") + } + (tyParamCounts.headOption.getOrElse(0), params) } - (tyParamCounts.headOption.getOrElse(0), params) - } - def compareSym(sym: Symbol): Boolean = sym match { - case sym: u.MethodSymbol => - val meth0 = u.unwrapWrapperTypes(sym.tpe.asSeenFrom(space, sym.owner)) - val paramSyms = meth0.paramss.flatten - val resTpe = meth0.finalResultType - val sameParamSize = paramSyms.length === paramRefs.length - def sameTyParamSize = tyParamCount === ({ - // the signature of a class/mixin constructor includes - // type parameters, in nsc these come from the parent. - val tyParamOwner = if (qual.isConstructorName) member.owner else sym - tyParamOwner.typeParams.length - }) - def sameParams = paramSyms.lazyZip(paramRefs).forall({ - case (paramSym, paramRef) => sameErasure(sym)(paramSym.tpe, paramRef) - }) - sameParamSize && sameTyParamSize && sameParams && sameErasure(sym)(resTpe, sig.result) - case _ => - ctx.log(s"""! member[$space]("$qual") ${showSym(sym)} is not a method""") - false + def compareSym(sym: Symbol): Boolean = sym match { + case sym: u.MethodSymbol => + sym.ensureCompleted(OverloadedSym) + // TODO [tasty]: we should cache signatures for symbols and compare against `sig` + val meth0 = u.unwrapWrapperTypes(sym.tpe.asSeenFrom(space, sym.owner)) + val paramSyms = meth0.paramss.flatten + val resTpe = meth0.finalResultType + val sameParamSize = paramSyms.length === paramRefs.length + def sameTyParamSize = tyParamCount === ({ + // the signature of a class/mixin constructor includes + // type parameters, in nsc these come from the parent. + val tyParamOwner = if (qual.isConstructorName) member.owner else sym + tyParamOwner.typeParams.length + }) + def sameParams = paramSyms.lazyZip(paramRefs).forall({ + case (paramSym, paramRef) => sameErasure(sym)(paramSym.tpe, paramRef) + }) + sameParamSize && sameTyParamSize && sameParams && sameErasure(sym)(resTpe, sig.result) + case _ => + ctx.log(s"""! member[$space]("$qual") ${showSym(sym)} is not a method""") + false + } + member.asTerm.alternatives.find(compareSym).getOrElse( + typeError(s"No matching overload of $space.$qual with signature ${showSig(sig)}") + ) } - member.asTerm.alternatives.find(compareSym).getOrElse( - typeError(s"No matching overload of $space.$qual with signature ${showSig(sig)}") - ).tap(overload => - ctx.log(s">>> selected ${showSym(overload)}: ${overload.tpe}") - ) } } + private def traceOverload(space: Type, tname: TastyName, sig: MethodSignature[ErasedTypeRef]) = TraceInfo[Symbol]( + query = s"looking for overload", + qual = s"symbolOf[$space] @@ $tname: ${showSig(sig)}", + res = overload => s"selected overload ${showSym(overload)}" + ) + def showSig(sig: MethodSignature[ErasedTypeRef]): String = sig.map(_.signature).show def showSym(sym: Symbol): String = s"`(#${sym.id}) ${sym.accurateKindString} ${sym.name}`" def showSymStable(sym: Symbol): String = s"#[${sym.id}, ${sym.name}]" diff --git a/src/compiler/scala/tools/nsc/tasty/bridge/TreeOps.scala b/src/compiler/scala/tools/nsc/tasty/bridge/TreeOps.scala index f8cb55181005..57401cb81bce 100644 --- a/src/compiler/scala/tools/nsc/tasty/bridge/TreeOps.scala +++ b/src/compiler/scala/tools/nsc/tasty/bridge/TreeOps.scala @@ -12,7 +12,7 @@ package scala.tools.nsc.tasty.bridge -import scala.tools.nsc.tasty.{TastyUniverse, TastyModes}, TastyModes._ +import scala.tools.nsc.tasty.{TastyUniverse, TastyModes, ForceKinds}, TastyModes._, ForceKinds._ import scala.tools.tasty.TastyName import scala.reflect.internal.Flags @@ -72,7 +72,7 @@ trait TreeOps { self: TastyUniverse => if (ctx.mode.is(ReadAnnotation) && name.isSignedConstructor) { val cls = qual.tpe.typeSymbol - cls.ensureCompleted() // need to force flags + cls.ensureCompleted(AnnotCtor) if (cls.isJavaAnnotation) selectCtor(qual) else diff --git a/src/compiler/scala/tools/nsc/tasty/bridge/TypeOps.scala b/src/compiler/scala/tools/nsc/tasty/bridge/TypeOps.scala index 5cc28daa8b25..cebaa0075d65 100644 --- a/src/compiler/scala/tools/nsc/tasty/bridge/TypeOps.scala +++ b/src/compiler/scala/tools/nsc/tasty/bridge/TypeOps.scala @@ -12,7 +12,7 @@ package scala.tools.nsc.tasty.bridge -import scala.tools.nsc.tasty.{TastyUniverse, SafeEq, TastyModes}, TastyModes._ +import scala.tools.nsc.tasty.{TastyUniverse, SafeEq, TastyModes, ForceKinds}, TastyModes._, ForceKinds._ import scala.tools.tasty.{TastyName, ErasedTypeRef, TastyFlags}, TastyFlags._ @@ -143,6 +143,12 @@ trait TypeOps { self: TastyUniverse => final val NoType: Type = u.NoType + def adjustParent(tp: Type): Type = { + val tpe = tp.dealias + if (tpe.typeSymbolDirect === u.definitions.ObjectClass) u.definitions.AnyRefTpe + else tpe + } + /** Represents a symbol that has been initialised by TastyUnpickler, but can not be in a state of completion * because its definition has not yet been seen. */ @@ -151,7 +157,7 @@ trait TypeOps { self: TastyUniverse => def tflags: TastyFlagSet = EmptyTastyFlags } - private[bridge] def CopyInfo(underlying: u.TermSymbol, tflags: TastyFlagSet): TastyRepr = + private[bridge] def CopyInfo(underlying: u.TermSymbol, tflags: TastyFlagSet)(implicit ctx: Context): TastyRepr = new CopyCompleter(underlying, tflags) def OpaqueTypeToBounds(tpe: Type): (Type, Type) = tpe match { @@ -447,10 +453,32 @@ trait TypeOps { self: TastyUniverse => final def unsupportedFlags: TastyFlagSet = tflags & FlagSets.TastyOnlyFlags } - abstract class TastyCompleter(isClass: Boolean, final val tflags: TastyFlagSet)(implicit - capturedCtx: Context) extends u.LazyType with TastyRepr with u.FlagAgnosticCompleter { - + abstract class TastyCompleter( + isClass: Boolean, + tflags: TastyFlagSet + )(implicit capturedCtx: Context) + extends BaseTastyCompleter(tflags) { override final val decls: u.Scope = if (isClass) u.newScope else u.EmptyScope + } + + private[TypeOps] class CopyCompleter( + underlying: u.TermSymbol, + tflags: TastyFlagSet + )(implicit ctx: Context) + extends BaseTastyCompleter(tflags) { + def computeInfo(sym: Symbol)(implicit ctx: Context): Unit = { + underlying.ensureCompleted(CopySym) + sym.info = underlying.tpe + underlying.attachments.all.foreach(sym.updateAttachment(_)) + } + } + + abstract class BaseTastyCompleter( + final val tflags: TastyFlagSet + )(implicit capturedCtx: Context) + extends u.LazyType + with TastyRepr + with u.FlagAgnosticCompleter { override final def load(sym: Symbol): Unit = complete(sym) @@ -465,15 +493,6 @@ trait TypeOps { self: TastyUniverse => def computeInfo(sym: Symbol)(implicit ctx: Context): Unit } - private[TypeOps] class CopyCompleter(underlying: u.TermSymbol, final val tflags: TastyFlagSet) - extends u.LazyType with TastyRepr with u.FlagAgnosticCompleter { - override final def complete(sym: Symbol): Unit = { - underlying.ensureCompleted() - sym.info = underlying.tpe - underlying.attachments.all.foreach(sym.updateAttachment(_)) - } - } - def prefixedRef(prefix: Type, sym: Symbol): Type = { if (sym.isType) { prefix match { From 5b7643e8709d3416d921247411dd77b8fb4ec065 Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Tue, 13 Jul 2021 13:32:16 +0200 Subject: [PATCH 299/769] fix scala/bug#12369: do not force sealed child --- .../scala/tools/nsc/tasty/ForceKinds.scala | 3 +++ .../tools/nsc/tasty/bridge/ContextOps.scala | 26 ++++++++++++++++--- .../tools/nsc/tasty/bridge/TypeOps.scala | 20 ++++++++++++++ .../run/src-2/tastytest/TestImports.scala | 5 ++++ test/tasty/run/src-3/tastytest/Imports.scala | 9 +++++++ 5 files changed, 59 insertions(+), 4 deletions(-) create mode 100644 test/tasty/run/src-2/tastytest/TestImports.scala create mode 100644 test/tasty/run/src-3/tastytest/Imports.scala diff --git a/src/compiler/scala/tools/nsc/tasty/ForceKinds.scala b/src/compiler/scala/tools/nsc/tasty/ForceKinds.scala index b2b3c2bdf9ed..a0577f9eb5f9 100644 --- a/src/compiler/scala/tools/nsc/tasty/ForceKinds.scala +++ b/src/compiler/scala/tools/nsc/tasty/ForceKinds.scala @@ -30,6 +30,8 @@ object ForceKinds { final val CopySym: ForceKinds.Single = of(1 << 4) /** When forcing the underlying symbol of some type space */ final val SpaceForce: ForceKinds.Single = of(1 << 5) + /** When forcing the enum singleton from its "fake" module class */ + final val EnumProxy: ForceKinds.Single = of(1 << 6) private def of(mask: Int): ForceKinds.Single = new ForceKinds.Single(mask) @@ -55,6 +57,7 @@ class ForceKinds(val toInt: Int) extends AnyVal { if (is(OverloadedSym)) xs ::= "overload resolution" if (is(CopySym)) xs ::= "copying its info" if (is(SpaceForce)) xs ::= "space" + if (is(EnumProxy)) xs ::= "forcing enum value from fake object" xs } } diff --git a/src/compiler/scala/tools/nsc/tasty/bridge/ContextOps.scala b/src/compiler/scala/tools/nsc/tasty/bridge/ContextOps.scala index aa9785adb14c..00673e15cb12 100644 --- a/src/compiler/scala/tools/nsc/tasty/bridge/ContextOps.scala +++ b/src/compiler/scala/tools/nsc/tasty/bridge/ContextOps.scala @@ -116,12 +116,25 @@ trait ContextOps { self: TastyUniverse => * sealed child. */ private def analyseAnnotations(sym: Symbol)(implicit ctx: Context): Unit = { + + def lookupChild(childTpe: Type): Symbol = { + val child = symOfType(childTpe) + assert(isSymbol(child), s"did not find symbol of sealed child ${showType(childTpe)}") + if (child.isClass) { + child + } + else { + assert(child.isModule, s"sealed child was not class or object ${showSym(child)}") + child.moduleClass + } + } + for (annot <- sym.annotations) { annot.completeInfo() if (annot.tpe.typeSymbolDirect === defn.ChildAnnot) { - val child = annot.tpe.typeArgs.head.typeSymbolDirect - sym.addChild(child) + val child = lookupChild(annot.tpe.typeArgs.head) ctx.log(s"adding sealed child ${showSym(child)} to ${showSym(sym)}") + sym.addChild(child) } } } @@ -378,9 +391,14 @@ trait ContextOps { self: TastyUniverse => } } else if (flags.is(FlagSets.Creation.ObjectDef)) { - log(s"!!! visited module value $name first") + val isEnum = flags.is(FlagSets.SingletonEnum) + if (!isEnum) { + log(s"!!! visited module value $name first") + } val module = owner.newModule(encodeTermName(name), u.NoPosition, newSymbolFlagSet(flags)) - module.moduleClass.info = defn.DefaultInfo + module.moduleClass.info = + if (isEnum) defn.SingletonEnumClassInfo(module, flags) + else defn.DefaultInfo module } else if (name.isTypeName) { diff --git a/src/compiler/scala/tools/nsc/tasty/bridge/TypeOps.scala b/src/compiler/scala/tools/nsc/tasty/bridge/TypeOps.scala index cebaa0075d65..5d99290cb74e 100644 --- a/src/compiler/scala/tools/nsc/tasty/bridge/TypeOps.scala +++ b/src/compiler/scala/tools/nsc/tasty/bridge/TypeOps.scala @@ -160,6 +160,12 @@ trait TypeOps { self: TastyUniverse => private[bridge] def CopyInfo(underlying: u.TermSymbol, tflags: TastyFlagSet)(implicit ctx: Context): TastyRepr = new CopyCompleter(underlying, tflags) + private[bridge] def SingletonEnumClassInfo( + enumValue: u.TermSymbol, + originalFlagSet: TastyFlagSet + )(implicit ctx: Context): TastyRepr = + new SingletonEnumModuleClassCompleter(enumValue, originalFlagSet) + def OpaqueTypeToBounds(tpe: Type): (Type, Type) = tpe match { case u.PolyType(tparams, tpe) => val (bounds, alias) = OpaqueTypeToBounds(tpe) @@ -473,6 +479,20 @@ trait TypeOps { self: TastyUniverse => } } + /** This completer ensures that if the "fake" singleton enum module class + * is completed first, that it completes the module symbol which + * then completes the module class. + */ + private[TypeOps] class SingletonEnumModuleClassCompleter( + enumValue: u.TermSymbol, + tflags: TastyFlagSet + )(implicit ctx: Context) + extends BaseTastyCompleter(tflags) { + def computeInfo(sym: Symbol)(implicit ctx: Context): Unit = { + enumValue.ensureCompleted(EnumProxy) + } + } + abstract class BaseTastyCompleter( final val tflags: TastyFlagSet )(implicit capturedCtx: Context) diff --git a/test/tasty/run/src-2/tastytest/TestImports.scala b/test/tasty/run/src-2/tastytest/TestImports.scala new file mode 100644 index 000000000000..f9da4fac2e12 --- /dev/null +++ b/test/tasty/run/src-2/tastytest/TestImports.scala @@ -0,0 +1,5 @@ +package tastytest + +import test.Imports + +object TestImports extends App diff --git a/test/tasty/run/src-3/tastytest/Imports.scala b/test/tasty/run/src-3/tastytest/Imports.scala new file mode 100644 index 000000000000..f153b584a254 --- /dev/null +++ b/test/tasty/run/src-3/tastytest/Imports.scala @@ -0,0 +1,9 @@ +package tastytest.test + +sealed trait Imports + +object Imports { + sealed trait Mixin + case object First extends Imports with Mixin + case object Second extends Imports with Mixin +} From 1ac3e9c4cac87d202de3978bcd90815e113751da Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Tue, 13 Jul 2021 14:33:28 +0200 Subject: [PATCH 300/769] be more lazy in TastyReader --- .../scala/tools/nsc/tasty/TreeUnpickler.scala | 1 + .../tools/nsc/tasty/bridge/ContextOps.scala | 19 +++++++++++++------ .../tools/nsc/tasty/bridge/TypeOps.scala | 1 + 3 files changed, 15 insertions(+), 6 deletions(-) diff --git a/src/compiler/scala/tools/nsc/tasty/TreeUnpickler.scala b/src/compiler/scala/tools/nsc/tasty/TreeUnpickler.scala index 714e65e5cea2..0de4fdaa1bd0 100644 --- a/src/compiler/scala/tools/nsc/tasty/TreeUnpickler.scala +++ b/src/compiler/scala/tools/nsc/tasty/TreeUnpickler.scala @@ -861,6 +861,7 @@ class TreeUnpickler[Tasty <: TastyUniverse]( } else { checkUnsupportedFlags(repr.unsupportedFlags &~ allowedTypeFlags) + sym.info = defn.InitialTypeInfo // needed to avoid cyclic references when unpickling rhs, see dotty_i3816.scala val rhs = readTpt()(if (repr.tflags.is(Opaque)) localCtx.addMode(OpaqueTypeDef) else localCtx) val info = if (repr.tflags.is(Opaque)) { diff --git a/src/compiler/scala/tools/nsc/tasty/bridge/ContextOps.scala b/src/compiler/scala/tools/nsc/tasty/bridge/ContextOps.scala index 00673e15cb12..9ac2b0aaf1b8 100644 --- a/src/compiler/scala/tools/nsc/tasty/bridge/ContextOps.scala +++ b/src/compiler/scala/tools/nsc/tasty/bridge/ContextOps.scala @@ -337,8 +337,10 @@ trait ContextOps { self: TastyUniverse => } def evict(sym: Symbol): Unit = { - sym.owner.rawInfo.decls.unlink(sym) - sym.info = u.NoType + if (isSymbol(sym)) { + sym.owner.rawInfo.decls.unlink(sym) + sym.info = u.NoType + } } final def enterIfUnseen(sym: Symbol): Unit = { @@ -430,10 +432,15 @@ trait ContextOps { self: TastyUniverse => final def enterClassCompletion(): Symbol = { val cls = globallyVisibleOwner.asClass - val assumedSelfType = - if (cls.is(Object) && cls.owner.isClass) defn.SingleType(cls.owner.thisType, cls.sourceModule) - else u.NoType - cls.info = u.ClassInfoType(cls.repr.parents, cls.repr.decls, assumedSelfType.typeSymbolDirect) + val assumedSelfSym = { + if (cls.is(Object) && cls.owner.isClass) { + cls.sourceModule + } + else { + u.NoSymbol + } + } + cls.info = u.ClassInfoType(cls.repr.parents, cls.repr.decls, assumedSelfSym) cls } diff --git a/src/compiler/scala/tools/nsc/tasty/bridge/TypeOps.scala b/src/compiler/scala/tools/nsc/tasty/bridge/TypeOps.scala index 5d99290cb74e..b1109efb23b1 100644 --- a/src/compiler/scala/tools/nsc/tasty/bridge/TypeOps.scala +++ b/src/compiler/scala/tools/nsc/tasty/bridge/TypeOps.scala @@ -180,6 +180,7 @@ trait TypeOps { self: TastyUniverse => } def ByNameType(arg: Type): Type = u.definitions.byNameType(arg) def TypeBounds(lo: Type, hi: Type): Type = u.TypeBounds.apply(lo, hi) + def InitialTypeInfo: Type = u.TypeBounds.empty def SingleType(pre: Type, sym: Symbol): Type = u.singleType(pre, sym) def ExprType(res: Type): Type = u.NullaryMethodType(res) def InlineExprType(res: Type): Type = res match { From 473a2cbceaeae6bdad68bd6179e9cee8c3da22f9 Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Tue, 13 Jul 2021 15:24:59 +0200 Subject: [PATCH 301/769] support local sealed children --- .../tools/nsc/tasty/bridge/ContextOps.scala | 28 ++++++++++++++- .../tools/nsc/tasty/bridge/TypeOps.scala | 13 +++++++ test/tasty/neg/src-2/TestFooMatch.check | 7 ++++ test/tasty/neg/src-2/TestFooMatch_fail.scala | 8 +++++ test/tasty/neg/src-3/dottyi3149/foo.scala | 19 ++++++++++ test/tasty/pos/pre/tastytest/package.scala | 35 +++++++++++++++---- .../src-2/dottyi3149/TestFooChildren.scala | 13 +++++++ test/tasty/pos/src-3/dottyi3149/foo.scala | 19 ++++++++++ 8 files changed, 134 insertions(+), 8 deletions(-) create mode 100644 test/tasty/neg/src-2/TestFooMatch.check create mode 100644 test/tasty/neg/src-2/TestFooMatch_fail.scala create mode 100644 test/tasty/neg/src-3/dottyi3149/foo.scala create mode 100644 test/tasty/pos/src-2/dottyi3149/TestFooChildren.scala create mode 100644 test/tasty/pos/src-3/dottyi3149/foo.scala diff --git a/src/compiler/scala/tools/nsc/tasty/bridge/ContextOps.scala b/src/compiler/scala/tools/nsc/tasty/bridge/ContextOps.scala index 9ac2b0aaf1b8..9abd7099169d 100644 --- a/src/compiler/scala/tools/nsc/tasty/bridge/ContextOps.scala +++ b/src/compiler/scala/tools/nsc/tasty/bridge/ContextOps.scala @@ -132,7 +132,22 @@ trait ContextOps { self: TastyUniverse => for (annot <- sym.annotations) { annot.completeInfo() if (annot.tpe.typeSymbolDirect === defn.ChildAnnot) { - val child = lookupChild(annot.tpe.typeArgs.head) + val child = { + val child0 = lookupChild(annot.tpe.typeArgs.head) + if (child0 eq sym) { + // dotty represents a local sealed child of `C` with a child annotation + // that directly references `C`, this causes an infinite loop in + // `sealedDescendants`. See the tests: + // - test/tasty/neg/src-3/dottyi3149/dotty_i3149.scala + // - test/tasty/neg/src-2/Testdotty_i3149_fail.scala + // TODO [tasty] - fix assumption in compiler that sealed children cannot + // contain the parent class + ctx.newLocalSealedChildProxy(sym) + } + else { + child0 + } + } ctx.log(s"adding sealed child ${showSym(child)} to ${showSym(sym)}") sym.addChild(child) } @@ -256,6 +271,17 @@ trait ContextOps { self: TastyUniverse => info = info ) + final def newLocalSealedChildProxy(cls: Symbol): Symbol = { + val tflags = Private | Local + unsafeNewClassSymbol( + owner = cls, + typeName = TastyName.SimpleName(cls.fullName('$') + "$$localSealedChildProxy").toTypeName, + flags = tflags, + info = defn.LocalSealedChildProxyInfo(cls, tflags), + privateWithin = u.NoSymbol + ) + } + final def findRootSymbol(roots: Set[Symbol], name: TastyName): Option[Symbol] = { import TastyName.TypeName diff --git a/src/compiler/scala/tools/nsc/tasty/bridge/TypeOps.scala b/src/compiler/scala/tools/nsc/tasty/bridge/TypeOps.scala index b1109efb23b1..dcddcbdc0d0f 100644 --- a/src/compiler/scala/tools/nsc/tasty/bridge/TypeOps.scala +++ b/src/compiler/scala/tools/nsc/tasty/bridge/TypeOps.scala @@ -166,6 +166,9 @@ trait TypeOps { self: TastyUniverse => )(implicit ctx: Context): TastyRepr = new SingletonEnumModuleClassCompleter(enumValue, originalFlagSet) + private[bridge] def LocalSealedChildProxyInfo(parent: Symbol, tflags: TastyFlagSet)(implicit ctx: Context): Type = + new LocalSealedChildProxyCompleter(parent, tflags) + def OpaqueTypeToBounds(tpe: Type): (Type, Type) = tpe match { case u.PolyType(tparams, tpe) => val (bounds, alias) = OpaqueTypeToBounds(tpe) @@ -494,6 +497,16 @@ trait TypeOps { self: TastyUniverse => } } + private[TypeOps] class LocalSealedChildProxyCompleter( + parent: Symbol, + tflags: TastyFlagSet + )(implicit ctx: Context) + extends BaseTastyCompleter(tflags) { + def computeInfo(sym: Symbol)(implicit ctx: Context): Unit = { + sym.info = defn.ClassInfoType(parent.tpe_* :: Nil, sym) // TODO [tasty]: check if tpe_* forces + } + } + abstract class BaseTastyCompleter( final val tflags: TastyFlagSet )(implicit capturedCtx: Context) diff --git a/test/tasty/neg/src-2/TestFooMatch.check b/test/tasty/neg/src-2/TestFooMatch.check new file mode 100644 index 000000000000..5bcfabce3eca --- /dev/null +++ b/test/tasty/neg/src-2/TestFooMatch.check @@ -0,0 +1,7 @@ +TestFooMatch_fail.scala:5: warning: match may not be exhaustive. +It would fail on the following input: Foo() + def foo(f: Foo): Unit = f match { + ^ +error: No warnings can be incurred under -Werror. +1 warning +1 error diff --git a/test/tasty/neg/src-2/TestFooMatch_fail.scala b/test/tasty/neg/src-2/TestFooMatch_fail.scala new file mode 100644 index 000000000000..d6c459deefca --- /dev/null +++ b/test/tasty/neg/src-2/TestFooMatch_fail.scala @@ -0,0 +1,8 @@ +// here we test unpickling a sealed child in another tasty file +package dottyi3149 + +object TestFooMatch { + def foo(f: Foo): Unit = f match { + case f: Foo.Bar => () + } +} diff --git a/test/tasty/neg/src-3/dottyi3149/foo.scala b/test/tasty/neg/src-3/dottyi3149/foo.scala new file mode 100644 index 000000000000..e7a2797ab0bd --- /dev/null +++ b/test/tasty/neg/src-3/dottyi3149/foo.scala @@ -0,0 +1,19 @@ +// here we test unpickling a sealed child in another tasty file +package dottyi3149 + +sealed class Foo +object Foo { + final class Bar extends Foo +} + +class Test { + def f = { + class Bar extends Foo + } + class C { + class Bar extends Foo + } + object O { + class Bar extends Foo + } +} diff --git a/test/tasty/pos/pre/tastytest/package.scala b/test/tasty/pos/pre/tastytest/package.scala index cc823305a7cc..179fc8aefa9d 100644 --- a/test/tasty/pos/pre/tastytest/package.scala +++ b/test/tasty/pos/pre/tastytest/package.scala @@ -5,31 +5,52 @@ package object tastytest { import scala.util.Random import scala.reflect.macros.blackbox.Context + import scala.collection.mutable + implicit final class SafeEq[T](private val t: T) extends AnyVal { final def ===[U](u: U)(implicit ev: T =:= U): Boolean = ??? } def compiletimeHasChild[T](child: String): Unit = macro Macros.hasChildImpl[T] - def compiletimeHasNestedChildren[T](children: String*): Unit = macro Macros.hasChildrenImpl[T] + def compiletimeHasNestedChildren[T](expected: String*): Unit = macro Macros.hasChildrenImpl[T] object Macros { - def hasChildrenImpl[T](c: Context)(children: c.Expr[String]*)(implicit T: c.WeakTypeTag[T]): c.Expr[Unit] = { + def hasChildrenImpl[T](c: Context)(expected: c.Expr[String]*)(implicit T: c.WeakTypeTag[T]): c.Expr[Unit] = { import c.universe._ - def findChildren(sym: Symbol): Set[Symbol] = - sym.asClass.knownDirectSubclasses.flatMap(s => findChildren(s) + s) + def findChildren(sym: Symbol): Set[Symbol] = { + def findLvlN(explore: mutable.ArrayDeque[Symbol], seen: Set[Symbol]): Set[Symbol] = { + if (explore.nonEmpty) { + val (s, rest) = (explore.head, explore.dropInPlace(1)) + val lvlN = s.asClass.knownDirectSubclasses + val unseen = lvlN -- seen + if (unseen.nonEmpty) { + findLvlN(rest ++= unseen, seen ++ unseen) + } else { + findLvlN(rest, seen) + } + } + else { + seen + } + } + + val lvl1 = sym.asClass.knownDirectSubclasses + if (lvl1.isEmpty) lvl1 + else findLvlN(mutable.ArrayDeque.from(lvl1 - sym), lvl1) + } val sym = T.tpe.typeSymbol + lazy val children = findChildren(sym) if (!sym.isClass) { c.error(c.enclosingPosition, s"${T.tpe} is not a class type; cannot inspect sealed children") } else { - children.foreach { child => + expected.foreach { child => child.tree match { case Literal(Constant(nmeString: String)) => - val children = findChildren(sym) - val contains = children.toList.map(_.fullName).exists(_ == nmeString) + val contains = children.exists(_.fullName == nmeString) if (!contains) { c.error(child.tree.pos, s"$sym does not have a child symbol $nmeString") } diff --git a/test/tasty/pos/src-2/dottyi3149/TestFooChildren.scala b/test/tasty/pos/src-2/dottyi3149/TestFooChildren.scala new file mode 100644 index 000000000000..d15b84dadfd2 --- /dev/null +++ b/test/tasty/pos/src-2/dottyi3149/TestFooChildren.scala @@ -0,0 +1,13 @@ +// here we test unpickling a sealed child in another tasty file +package dottyi3149 + +import tastytest._ + +object TestFooChildren { + compiletimeHasNestedChildren[Foo]( + "dottyi3149.Foo.Bar", + // "dottyi3149.Foo.dottyi3149$Foo$$localSealedChildProxy", // workaround to represent "dottyi3149.Test.Bar$1", + "dottyi3149.Test.O.Bar", + "dottyi3149.Test.C.Bar" + ) +} diff --git a/test/tasty/pos/src-3/dottyi3149/foo.scala b/test/tasty/pos/src-3/dottyi3149/foo.scala new file mode 100644 index 000000000000..e7a2797ab0bd --- /dev/null +++ b/test/tasty/pos/src-3/dottyi3149/foo.scala @@ -0,0 +1,19 @@ +// here we test unpickling a sealed child in another tasty file +package dottyi3149 + +sealed class Foo +object Foo { + final class Bar extends Foo +} + +class Test { + def f = { + class Bar extends Foo + } + class C { + class Bar extends Foo + } + object O { + class Bar extends Foo + } +} From bc4fc11bb0afb0b870c7b7c9490717498a7c91df Mon Sep 17 00:00:00 2001 From: Georgi Krastev Date: Tue, 13 Jul 2021 03:13:33 +0200 Subject: [PATCH 302/769] Specialize trait val setters (forward to overload) They are not recognized as setters due to the expanded name: * `setterIn(clazz)` relies on `setterNameInBase` * but the base is different (implementing class, not trait) Note: we can't avoid boxing, because the setter is called in the trait init method where it's generic. --- .../tools/nsc/transform/SpecializeTypes.scala | 8 ++++++-- test/files/run/t12221.check | 1 + test/files/run/t12221.scala | 16 ++++++++++++++++ 3 files changed, 23 insertions(+), 2 deletions(-) create mode 100644 test/files/run/t12221.check create mode 100644 test/files/run/t12221.scala diff --git a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala index c9d9f2c9162d..a5031e894095 100644 --- a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala +++ b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala @@ -14,7 +14,7 @@ package scala package tools.nsc package transform -import scala.annotation.nowarn +import scala.annotation.{nowarn, tailrec} import scala.collection.mutable import scala.tools.nsc.symtab.Flags import scala.tools.nsc.Reporting.WarningCategory @@ -744,6 +744,10 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { enterMember(om) } + @tailrec def isTraitValSetter(sym: Symbol): Boolean = + sym.isSetter && sym.getterIn(sym.owner).isStable && + (sym.hasFlag(SYNTHESIZE_IMPL_IN_SUBCLASS) || isTraitValSetter(sym.nextOverriddenSymbol)) + for (m <- normMembers if needsSpecialization(fullEnv, m) && satisfiable(fullEnv)) { if (!m.isDeferred) addConcreteSpecMethod(m) @@ -791,7 +795,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { specMember.asInstanceOf[TermSymbol].referenced = m.alias info(specMember) = SpecialSuperAccessor(specMember) - } else if (m.isMethod && !m.hasFlag(DEFERRED) && (!m.hasFlag(ACCESSOR) || m.hasFlag(LAZY))) { // other concrete methods + } else if (m.isMethod && !m.isDeferred && (!m.isAccessor || m.isLazy || isTraitValSetter(m))) { // other concrete methods forwardToOverload(m) } else if (m.isValue && !m.isMethod) { // concrete value definition def mkAccessor(field: Symbol, name: Name) = { diff --git a/test/files/run/t12221.check b/test/files/run/t12221.check new file mode 100644 index 000000000000..d3827e75a5ca --- /dev/null +++ b/test/files/run/t12221.check @@ -0,0 +1 @@ +1.0 diff --git a/test/files/run/t12221.scala b/test/files/run/t12221.scala new file mode 100644 index 000000000000..8a52989b84fc --- /dev/null +++ b/test/files/run/t12221.scala @@ -0,0 +1,16 @@ +object Test { + trait GenericBase[T] { + def init: T + val value: T = init + def get: T = value + } + + class SpecializedClass[@specialized(Double)T](x: T) extends GenericBase[T] { + override def init: T = x + } + + def main(args: Array[String]): Unit = { + val x = new SpecializedClass(1.0) + println(x.get) + } +} \ No newline at end of file From ce50ba5e115b15f309d3affcb28d996233db075a Mon Sep 17 00:00:00 2001 From: Georgi Krastev Date: Tue, 13 Jul 2021 22:58:39 +0200 Subject: [PATCH 303/769] Fix scala/bug#10094 - add regression test Didn't investigate in which version it progressed. --- test/files/run/t10094.check | 1 + test/files/run/t10094.scala | 11 +++++++++++ 2 files changed, 12 insertions(+) create mode 100644 test/files/run/t10094.check create mode 100644 test/files/run/t10094.scala diff --git a/test/files/run/t10094.check b/test/files/run/t10094.check new file mode 100644 index 000000000000..45b983be36b7 --- /dev/null +++ b/test/files/run/t10094.check @@ -0,0 +1 @@ +hi diff --git a/test/files/run/t10094.scala b/test/files/run/t10094.scala new file mode 100644 index 000000000000..74f507e447d8 --- /dev/null +++ b/test/files/run/t10094.scala @@ -0,0 +1,11 @@ +trait T[@specialized(Int) S] { + def initialValue: S + var value: S = initialValue +} + +final class C[@specialized(Int) S](val initialValue: S) extends T[S] + +object Test { + def main(args: Array[String]): Unit = + println(new C("hi").initialValue) +} From 6857c8c59ec62d8c59f445b6939cd3b2c9e1190f Mon Sep 17 00:00:00 2001 From: Seth Tisue Date: Tue, 13 Jul 2021 18:17:06 -0400 Subject: [PATCH 304/769] upgrade to ASM 9.2, for JDK 18 support in optimizer --- src/intellij/scala.ipr.SAMPLE | 26 +++++++++++++------------- versions.properties | 2 +- 2 files changed, 14 insertions(+), 14 deletions(-) diff --git a/src/intellij/scala.ipr.SAMPLE b/src/intellij/scala.ipr.SAMPLE index 5bfb74e6f218..673c7eec2349 100644 --- a/src/intellij/scala.ipr.SAMPLE +++ b/src/intellij/scala.ipr.SAMPLE @@ -231,7 +231,7 @@ - + @@ -250,7 +250,7 @@ - + @@ -262,7 +262,7 @@ - + @@ -280,7 +280,7 @@ - + @@ -290,7 +290,7 @@ - + @@ -317,7 +317,7 @@ - + @@ -331,7 +331,7 @@ - + @@ -340,7 +340,7 @@ - + @@ -350,7 +350,7 @@ - + @@ -511,7 +511,7 @@ - + @@ -524,7 +524,7 @@ - + @@ -535,7 +535,7 @@ - + @@ -560,7 +560,7 @@ - + diff --git a/versions.properties b/versions.properties index 7621a21f96d6..a267143cb781 100644 --- a/versions.properties +++ b/versions.properties @@ -21,5 +21,5 @@ scala.binary.version=2.12 scala-xml.version.number=1.0.6 scala-parser-combinators.version.number=1.0.7 scala-swing.version.number=2.0.3 -scala-asm.version=9.1.0-scala-1 +scala-asm.version=9.2.0-scala-1 jline.version=2.14.6 From d985911c4e8783a8ef2958600d360e1efee2ee97 Mon Sep 17 00:00:00 2001 From: Seth Tisue Date: Tue, 13 Jul 2021 18:22:18 -0400 Subject: [PATCH 305/769] upgrade to ASM 9.2, for JDK 18 support --- src/intellij/scala.ipr.SAMPLE | 28 ++++++++++++++-------------- versions.properties | 2 +- 2 files changed, 15 insertions(+), 15 deletions(-) diff --git a/src/intellij/scala.ipr.SAMPLE b/src/intellij/scala.ipr.SAMPLE index eabf6729ecde..80484746d667 100644 --- a/src/intellij/scala.ipr.SAMPLE +++ b/src/intellij/scala.ipr.SAMPLE @@ -232,7 +232,7 @@ - + @@ -243,7 +243,7 @@ - + @@ -252,7 +252,7 @@ - + @@ -266,7 +266,7 @@ - + @@ -287,7 +287,7 @@ - + @@ -296,14 +296,14 @@ - + - + @@ -312,7 +312,7 @@ - + @@ -456,7 +456,7 @@ - + @@ -465,7 +465,7 @@ - + @@ -475,7 +475,7 @@ - + @@ -506,7 +506,7 @@ - + @@ -522,7 +522,7 @@ - + @@ -533,7 +533,7 @@ - + diff --git a/versions.properties b/versions.properties index 971b4a002731..33e144c53a49 100644 --- a/versions.properties +++ b/versions.properties @@ -6,7 +6,7 @@ starr.version=2.13.6 # - scala-compiler: jline (% "optional") # Other usages: # - scala-asm: jar content included in scala-compiler -scala-asm.version=9.1.0-scala-1 +scala-asm.version=9.2.0-scala-1 # jna.version must be updated together with jline-terminal-jna jline.version=3.19.0 From 5ac7cc5c7609a27c2895a0065c703393e6334b6f Mon Sep 17 00:00:00 2001 From: Seth Tisue Date: Tue, 13 Jul 2021 18:22:34 -0400 Subject: [PATCH 306/769] make -target support JDK 18 --- project/ScalaOptionParser.scala | 2 +- .../scala/tools/nsc/backend/jvm/analysis/BackendUtils.scala | 1 + .../scala/tools/nsc/settings/StandardScalaSettings.scala | 2 +- test/junit/scala/tools/nsc/settings/TargetTest.scala | 5 ++++- 4 files changed, 7 insertions(+), 3 deletions(-) diff --git a/project/ScalaOptionParser.scala b/project/ScalaOptionParser.scala index e3149a39c048..64d9db857982 100644 --- a/project/ScalaOptionParser.scala +++ b/project/ScalaOptionParser.scala @@ -140,5 +140,5 @@ object ScalaOptionParser { private def scaladocPathSettingNames = List("-doc-root-content", "-diagrams-dot-path") private def scaladocMultiStringSettingNames = List("-doc-external-doc") - private val targetSettingNames = (8 to 17).map(_.toString).flatMap(v => v :: s"jvm-1.$v" :: s"jvm-$v" :: s"1.$v" :: Nil).toList + private val targetSettingNames = (8 to 18).map(_.toString).flatMap(v => v :: s"jvm-1.$v" :: s"jvm-$v" :: s"1.$v" :: Nil).toList } diff --git a/src/compiler/scala/tools/nsc/backend/jvm/analysis/BackendUtils.scala b/src/compiler/scala/tools/nsc/backend/jvm/analysis/BackendUtils.scala index 0e6939a97fd3..b86d33a16ce2 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/analysis/BackendUtils.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/analysis/BackendUtils.scala @@ -81,6 +81,7 @@ abstract class BackendUtils extends PerRunInit { case "15" => asm.Opcodes.V15 case "16" => asm.Opcodes.V16 case "17" => asm.Opcodes.V17 + case "18" => asm.Opcodes.V18 // to be continued... }) diff --git a/src/compiler/scala/tools/nsc/settings/StandardScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/StandardScalaSettings.scala index 02e6da5afe0e..7da06bb6c7bd 100644 --- a/src/compiler/scala/tools/nsc/settings/StandardScalaSettings.scala +++ b/src/compiler/scala/tools/nsc/settings/StandardScalaSettings.scala @@ -73,7 +73,7 @@ trait StandardScalaSettings { _: MutableSettings => object StandardScalaSettings { // not final in case some separately compiled client code wanted to depend on updated values val MinTargetVersion = 8 - val MaxTargetVersion = 17 + val MaxTargetVersion = 18 private val AllTargetVersions = (MinTargetVersion to MaxTargetVersion).map(_.toString).to(List) } diff --git a/test/junit/scala/tools/nsc/settings/TargetTest.scala b/test/junit/scala/tools/nsc/settings/TargetTest.scala index 065aa4d5a98f..4925ed6a56fe 100644 --- a/test/junit/scala/tools/nsc/settings/TargetTest.scala +++ b/test/junit/scala/tools/nsc/settings/TargetTest.scala @@ -65,8 +65,11 @@ class TargetTest { check("-target:jvm-17", "17") check("-target:17", "17") + check("-target:jvm-18", "18") + check("-target:18", "18") + checkFail("-target:jvm-6") // no longer - checkFail("-target:jvm-18") // not yet... + checkFail("-target:jvm-19") // not yet... checkFail("-target:jvm-3000") // not in our lifetime checkFail("-target:msil") // really? From 3eeed4d49a11b49859a3e03be55f670fe88af290 Mon Sep 17 00:00:00 2001 From: Seth Tisue Date: Thu, 15 Jul 2021 16:14:45 -0400 Subject: [PATCH 307/769] sbt 1.5.5 (was 1.5.4) --- project/build.properties | 2 +- scripts/common | 2 +- src/intellij/scala.ipr.SAMPLE | 36 +++++++++++++------------- test/jcstress/project/build.properties | 2 +- 4 files changed, 21 insertions(+), 21 deletions(-) diff --git a/project/build.properties b/project/build.properties index 9edb75b77c28..10fd9eee04ac 100644 --- a/project/build.properties +++ b/project/build.properties @@ -1 +1 @@ -sbt.version=1.5.4 +sbt.version=1.5.5 diff --git a/scripts/common b/scripts/common index 447ac660b6bd..5118e9ec4b17 100644 --- a/scripts/common +++ b/scripts/common @@ -11,7 +11,7 @@ else fi SBT_CMD=${SBT_CMD-sbt} -SBT_CMD="$SBT_CMD -sbt-version 1.5.4" +SBT_CMD="$SBT_CMD -sbt-version 1.5.5" # repo to publish builds integrationRepoUrl=${integrationRepoUrl-"https://scala-ci.typesafe.com/artifactory/scala-integration/"} diff --git a/src/intellij/scala.ipr.SAMPLE b/src/intellij/scala.ipr.SAMPLE index eabf6729ecde..e1df0896e37b 100644 --- a/src/intellij/scala.ipr.SAMPLE +++ b/src/intellij/scala.ipr.SAMPLE @@ -325,13 +325,13 @@ - + - + - - + + @@ -355,7 +355,7 @@ - + @@ -369,8 +369,8 @@ - - + + @@ -382,13 +382,13 @@ - + - + @@ -402,16 +402,16 @@ - + - + - + - + @@ -435,18 +435,18 @@ - - + + - + - + - + diff --git a/test/jcstress/project/build.properties b/test/jcstress/project/build.properties index 9edb75b77c28..10fd9eee04ac 100644 --- a/test/jcstress/project/build.properties +++ b/test/jcstress/project/build.properties @@ -1 +1 @@ -sbt.version=1.5.4 +sbt.version=1.5.5 From b19d3e80d96f2b38babfba60e8179bf08b379335 Mon Sep 17 00:00:00 2001 From: Georgi Krastev Date: Wed, 14 Jul 2021 00:42:24 +0200 Subject: [PATCH 308/769] Don't expand the name of accessors I couldn't find a single use case where this is needed. The callee is usually either not an accessor at all or all declarations in scope are looped over and expanded. On the other hand this fixes a bug in specialization. After all the accessor(s) could be implementing an abstract method. --- src/reflect/scala/reflect/internal/Symbols.scala | 14 ++++---------- test/files/run/t12222.check | 1 + test/files/run/t12222/Buffer_1.scala | 10 ++++++++++ test/files/run/t12222/Test_2.scala | 7 +++++++ 4 files changed, 22 insertions(+), 10 deletions(-) create mode 100644 test/files/run/t12222.check create mode 100644 test/files/run/t12222/Buffer_1.scala create mode 100644 test/files/run/t12222/Test_2.scala diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala index 4f4ad17caf1f..6ff5b453b12f 100644 --- a/src/reflect/scala/reflect/internal/Symbols.scala +++ b/src/reflect/scala/reflect/internal/Symbols.scala @@ -838,10 +838,10 @@ trait Symbols extends api.Symbols { self: SymbolTable => isAbstractType && !isExistential && !isTypeParameterOrSkolem && isLocalToBlock /** change name by appending $$ - * Do the same for any accessed symbols or setters/getters. + * Do the same for any accessed symbols to preserve serialization compatibility. * Implementation in TermSymbol. */ - def expandName(base: Symbol): Unit = { } + def expandName(base: Symbol): Unit = () // In java.lang, Predef, or scala package/package object def isInDefaultNamespace = UnqualifiedOwners(effectiveOwner) @@ -2988,18 +2988,12 @@ trait Symbols extends api.Symbols { self: SymbolTable => } /** change name by appending $$ - * Do the same for any accessed symbols or setters/getters + * Do the same for any accessed symbols to preserve serialization compatibility. */ override def expandName(base: Symbol): Unit = { if (!hasFlag(EXPANDEDNAME)) { setFlag(EXPANDEDNAME) - if (hasAccessorFlag && !isDeferred) { - accessed.expandName(base) - } - else if (hasGetter) { - getterIn(owner).expandName(base) - setterIn(owner).expandName(base) - } + if (hasAccessorFlag && !isDeferred) accessed.expandName(base) name = nme.expandedName(name.toTermName, base) } } diff --git a/test/files/run/t12222.check b/test/files/run/t12222.check new file mode 100644 index 000000000000..573541ac9702 --- /dev/null +++ b/test/files/run/t12222.check @@ -0,0 +1 @@ +0 diff --git a/test/files/run/t12222/Buffer_1.scala b/test/files/run/t12222/Buffer_1.scala new file mode 100644 index 000000000000..353ecdd4af27 --- /dev/null +++ b/test/files/run/t12222/Buffer_1.scala @@ -0,0 +1,10 @@ +trait ABuffer[@specialized(Float)T] { + def count: Int +} + +class Buffer[@specialized(Float) T](array_par: Array[T]) extends ABuffer[T] { + var array: Array[T] = array_par + var count: Int = 0 +} + +class Float32Buffer(array_par: Array[Float]) extends Buffer[Float](array_par) \ No newline at end of file diff --git a/test/files/run/t12222/Test_2.scala b/test/files/run/t12222/Test_2.scala new file mode 100644 index 000000000000..a5c975cd349e --- /dev/null +++ b/test/files/run/t12222/Test_2.scala @@ -0,0 +1,7 @@ +object Test { + def main(args: Array[String]): Unit = { + val vertices = Array[Float]() + val attribute = new Float32Buffer(vertices) + println(attribute.count) + } +} \ No newline at end of file From c4459f8134ea1211c6199d153fdd2635dfc607f6 Mon Sep 17 00:00:00 2001 From: Seth Tisue Date: Thu, 24 Jun 2021 14:03:43 -0700 Subject: [PATCH 309/769] fix StringLikeTest on JDK 17 --- .../scala/collection/immutable/StringLikeTest.scala | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/test/junit/scala/collection/immutable/StringLikeTest.scala b/test/junit/scala/collection/immutable/StringLikeTest.scala index 4de7763efcd3..fd0f228162f3 100644 --- a/test/junit/scala/collection/immutable/StringLikeTest.scala +++ b/test/junit/scala/collection/immutable/StringLikeTest.scala @@ -64,10 +64,14 @@ class StringLikeTest { assertEquals("no trim toDouble", 2.0d, sOk.toDouble, 0.1d) assertEquals("no trim toFloat", 2.0f, sOk.toFloat, 0.1f) - assertThrows[java.lang.NumberFormatException](sNull.toInt, {s => s == "null"}) - assertThrows[java.lang.NumberFormatException](sNull.toLong, {s => s == "null"}) - assertThrows[java.lang.NumberFormatException](sNull.toShort, {s => s == "null"}) - assertThrows[java.lang.NumberFormatException](sNull.toByte, {s => s == "null"}) + // JDK 17 gives the nicer message + def isNullStringMessage(s: String) = + s == "null" || s == "Cannot parse null string" + + assertThrows[java.lang.NumberFormatException](sNull.toInt, isNullStringMessage) + assertThrows[java.lang.NumberFormatException](sNull.toLong, isNullStringMessage) + assertThrows[java.lang.NumberFormatException](sNull.toShort, isNullStringMessage) + assertThrows[java.lang.NumberFormatException](sNull.toByte, isNullStringMessage) assertThrows[java.lang.NullPointerException](sNull.toDouble) assertThrows[java.lang.NullPointerException](sNull.toFloat) From 1617f38c70f14b595d681f1836abd6a441745e74 Mon Sep 17 00:00:00 2001 From: Seth Tisue Date: Fri, 16 Jul 2021 17:23:11 -0400 Subject: [PATCH 310/769] make MultiReleaseJarTest pass on Windows --- .../nsc/classpath/MultiReleaseJarTest.scala | 29 +++++++------------ 1 file changed, 11 insertions(+), 18 deletions(-) diff --git a/test/junit/scala/tools/nsc/classpath/MultiReleaseJarTest.scala b/test/junit/scala/tools/nsc/classpath/MultiReleaseJarTest.scala index 97e551bbf8ef..542408f6b1cd 100644 --- a/test/junit/scala/tools/nsc/classpath/MultiReleaseJarTest.scala +++ b/test/junit/scala/tools/nsc/classpath/MultiReleaseJarTest.scala @@ -7,28 +7,24 @@ import java.util.jar.Attributes.Name import org.junit.{Assert, Test} import scala.tools.nsc.{CloseableRegistry, Global, Settings} -import scala.tools.testkit.BytecodeTesting -import scala.util.Properties +import scala.tools.testkit.{BytecodeTesting, ForDeletion} +import scala.util.{Properties, Using} class MultiReleaseJarTest extends BytecodeTesting { import compiler._ @Test def mrJar(): Unit = { - if (!Properties.isJavaAtLeast("9")) { println("skipping mrJar() on old JDK"); return} // TODO test that the compiler warns that --release is unsupported. + if (!Properties.isJavaAtLeast("9")) return // TODO test that the compiler warns that --release is unsupported. - val temp1 = Files.createTempFile("mr-jar-test-", ".jar") + // TODO test fails if both Global runs look at the same JAR on disk. Caching problem in our classpath implementation? So use two JARs. + def makeTemp() = Files.createTempFile("mr-jar-test-", ".jar") + Using.resources(ForDeletion(makeTemp()), ForDeletion(makeTemp())) { (temp1, temp2) => - // TODO test fails if both Global runs look at the same JAR on disk. Caching problem in our classpath implementation? - // val temp2 = temp1 - val temp2 = Files.createTempFile("mr-jar-test-", ".jar") - val cleanup = new CloseableRegistry - - try { def code(newApi: String) = s"package p1; abstract class Versioned { def oldApi: Int; $newApi }" val oldC = compileToBytes(code("")).head._2 val newC = compileToBytes(code("def newApi: Int")).head._2 - List(temp1, temp2).foreach(temp => createZip(temp, List( + List(temp1.path, temp2.path).foreach(temp => createZip(temp, List( "/p1/Versioned.class" -> oldC, "/META-INF/versions/9/p1/Versioned.class" -> newC, "/META-INF/MANIFEST.MF" -> createManifest) @@ -39,24 +35,21 @@ class MultiReleaseJarTest extends BytecodeTesting { settings.usejavacp.value = true settings.classpath.value = jarPath.toAbsolutePath.toString val g = new Global(settings) - cleanup.registerCloseable(g) settings.release.value = release new g.Run val decls = g.rootMirror.staticClass("p1.Versioned").info.decls.filterNot(_.isConstructor).map(_.name.toString).toList.sorted + g.close() decls } - Assert.assertEquals(List("newApi", "oldApi"), declsOfC(temp1, "9")) - Assert.assertEquals(List("oldApi"), declsOfC(temp2, "8")) - } finally { - cleanup.close() - List(temp1, temp2).foreach(Files.deleteIfExists) + Assert.assertEquals(List("newApi", "oldApi"), declsOfC(temp1.path, "9")) + Assert.assertEquals(List("oldApi"), declsOfC(temp2.path, "8")) } } @Test def ctSymTest(): Unit = { - if (!Properties.isJavaAtLeast("9")) { println("skipping mrJar() on old JDK"); return} // TODO test that the compiler warns that --release is unsupported. + if (!Properties.isJavaAtLeast("9")) return // TODO test that the compiler warns that --release is unsupported. val cleanup = new CloseableRegistry def lookup(className: String, release: String): Boolean = { From f34696c34e12283ff5ca24f730f205cd909f208e Mon Sep 17 00:00:00 2001 From: Seth Tisue Date: Wed, 23 Jun 2021 10:56:42 -0700 Subject: [PATCH 311/769] Windows CI: add JDK 17 (alongside 8) --- .github/workflows/ci.yml | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 70647980f2e2..65f8d9429d88 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -14,6 +14,10 @@ jobs: runs-on: windows-latest strategy: fail-fast: false + matrix: + include: + - java: 8 + - java: 17-ea steps: - run: git config --global core.autocrlf false - name: Checkout @@ -26,7 +30,7 @@ jobs: uses: actions/setup-java@v2 with: distribution: adopt - java-version: 8 + java-version: ${{matrix.java}} - name: Cache uses: actions/cache@v2 From 24571e1d3ef0de266fe57fad109bcd6805ddd8cb Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Tue, 20 Jul 2021 12:07:45 +0200 Subject: [PATCH 312/769] Silence unused-nowarn warnings --- build.sbt | 4 +++- src/compiler/scala/tools/nsc/javac/JavaParsers.scala | 5 ----- 2 files changed, 3 insertions(+), 6 deletions(-) diff --git a/build.sbt b/build.sbt index 82208895c7ad..02aa6619bd21 100644 --- a/build.sbt +++ b/build.sbt @@ -158,6 +158,8 @@ lazy val commonSettings = instanceSettings ++ clearSourceAndResourceDirectories // we don't want optimizer warnings to interfere with `-Werror`. we have hundreds of such warnings // when the optimizer is enabled (as it is in CI and release builds, though not in local development) Compile / scalacOptions += "-Wconf:cat=optimizer:is", + // We use @nowarn for some methods that are deprecated in Java > 8 + Compile / scalacOptions += "-Wconf:cat=unused-nowarn:s", Compile / scalacOptions ++= Seq("-deprecation", "-feature"), Compile / doc / scalacOptions ++= Seq( "-doc-footer", "epfl", @@ -227,7 +229,7 @@ lazy val commonSettings = instanceSettings ++ clearSourceAndResourceDirectories lazy val fatalWarningsSettings = Seq( Compile / scalacOptions ++= { - if (fatalWarnings.value) Seq("-Werror", "-Wconf:cat=unused-nowarn:is") + if (fatalWarnings.value) Seq("-Werror") else Nil }, Compile / doc / scalacOptions -= "-Werror", // there are too many doc errors to enable this right now diff --git a/src/compiler/scala/tools/nsc/javac/JavaParsers.scala b/src/compiler/scala/tools/nsc/javac/JavaParsers.scala index 8bccad6b8250..fdd81da701da 100644 --- a/src/compiler/scala/tools/nsc/javac/JavaParsers.scala +++ b/src/compiler/scala/tools/nsc/javac/JavaParsers.scala @@ -752,11 +752,6 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { makeTemplate(List(), statics)) } - def importCompanionObject(cdef: ClassDef): Tree = - atPos(cdef.pos) { - Import(Ident(cdef.name.toTermName), ImportSelector.wildList) - } - def addCompanionObject(statics: List[Tree], cdef: ClassDef): List[Tree] = List(makeCompanionObject(cdef, statics), cdef) From b124a5434f19b90cdbc0afe3cbf4505b4019f701 Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Sun, 4 Jul 2021 02:39:02 -0700 Subject: [PATCH 313/769] Accept supplementary characters in identifiers Also accept supplementary characters in simple identifiers in string interpolation, and as leading characters of varids. Reject a supplementary character as a char literal. Clarify Unicode support in the spec. Clarify letter and special operator symbols in precedence table. --- build.sbt | 1 + spec/01-lexical-syntax.md | 14 +- spec/06-expressions.md | 4 +- .../scala/tools/nsc/ast/parser/Parsers.scala | 2 +- .../scala/tools/nsc/ast/parser/Scanners.scala | 199 ++++++++++++------ .../symtab/classfile/AbstractFileReader.scala | 10 +- .../scala/tools/partest/DirectTest.scala | 1 + src/partest/scala/tools/partest/package.scala | 14 -- .../scala/reflect/internal/StdNames.scala | 11 +- .../scala/tools/testkit/AssertUtil.scala | 19 ++ test/files/neg/surrogates.check | 4 + test/files/neg/surrogates.scala | 4 + test/files/pos/surrogates.scala | 28 +++ test/files/run/t12276.scala | 3 +- test/files/run/t1406.scala | 32 +++ test/files/run/t1406b.check | 6 + test/files/run/t1406b.scala | 22 ++ test/files/run/t9915/Test_2.scala | 14 +- .../scala/tools/testkit/AssertUtilTest.scala | 6 + 19 files changed, 287 insertions(+), 107 deletions(-) create mode 100644 test/files/neg/surrogates.check create mode 100644 test/files/neg/surrogates.scala create mode 100644 test/files/pos/surrogates.scala create mode 100644 test/files/run/t1406.scala create mode 100644 test/files/run/t1406b.check create mode 100644 test/files/run/t1406b.scala diff --git a/build.sbt b/build.sbt index 02aa6619bd21..058e68edc911 100644 --- a/build.sbt +++ b/build.sbt @@ -719,6 +719,7 @@ lazy val junit = project.in(file("test") / "junit") "-feature", "-Xlint:-valpattern,_", "-Wconf:msg=match may not be exhaustive:s", // if we missed a case, all that happens is the test fails + "-Wconf:cat=lint-nullary-unit&site=.*Test:s", // normal unit test style "-Ypatmat-exhaust-depth", "40", // despite not caring about patmat exhaustiveness, we still get warnings for this ), Compile / javacOptions ++= Seq("-Xlint"), diff --git a/spec/01-lexical-syntax.md b/spec/01-lexical-syntax.md index c703b49c0ef3..3dbed39d6806 100644 --- a/spec/01-lexical-syntax.md +++ b/spec/01-lexical-syntax.md @@ -6,13 +6,11 @@ chapter: 1 # Lexical Syntax -Scala programs are written using the Unicode Basic Multilingual Plane -(_BMP_) character set; Unicode supplementary characters are not -presently supported. This chapter defines the two modes of Scala's -lexical syntax, the Scala mode, and the _XML mode_. If not -otherwise mentioned, the following descriptions of Scala tokens refer -to _Scala mode_, and literal characters ‘c’ refer to the ASCII fragment -`\u0000` – `\u007F`. +Scala source code consists of Unicode text. + +The program text is tokenized as described in this chapter. +See the last section for special support for XML literals, +which are parsed in _XML mode_. To construct tokens, characters are distinguished according to the following classes (Unicode general category given in parentheses): @@ -74,7 +72,7 @@ or `_`, and _constant identifiers_, which do not. For this purpose, lower case letters include not only a-z, but also all characters in Unicode category Ll (lowercase letter), as well as all letters that have contributory property -Other_Lowercase, except characters in category Nl (letter numerals) +Other_Lowercase, except characters in category Nl (letter numerals), which are never taken as lower case. The following are examples of variable identifiers: diff --git a/spec/06-expressions.md b/spec/06-expressions.md index da88cbaa5ae5..0387ce17e7f8 100644 --- a/spec/06-expressions.md +++ b/spec/06-expressions.md @@ -659,7 +659,7 @@ character. Characters are listed below in increasing order of precedence, with characters on the same line having the same precedence. ```scala -(all letters) +(all letters, as defined in [chapter 1](01-lexical-syntax.html), including `_` and `$`) | ^ & @@ -668,7 +668,7 @@ precedence, with characters on the same line having the same precedence. : + - * / % -(all other special characters) +(other operator characters, as defined in [chapter 1](01-lexical-syntax.html), including Unicode categories `Sm` and `So`) ``` That is, operators starting with a letter have lowest precedence, diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala index ff9b8747f17c..adc577f54c86 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala @@ -264,7 +264,7 @@ self => if (syntaxErrors.isEmpty) firstTry else in.healBraces() match { case Nil => showSyntaxErrors() ; firstTry - case patches => (this withPatches patches).parse() + case patches => withPatches(patches).parse() } } } diff --git a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala index 17b46da9191c..b40ad37f6bf2 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala @@ -172,7 +172,45 @@ trait Scanners extends ScannersCommon { /** A switch whether operators at the start of lines can be infix operators. */ private var allowLeadingInfixOperators = true - private def isDigit(c: Char) = java.lang.Character isDigit c + private def isDigit(c: Char) = Character.isDigit(c) + + import Character.{isHighSurrogate, isLowSurrogate, isUnicodeIdentifierPart, isUnicodeIdentifierStart, isValidCodePoint, toCodePoint} + + // given char (ch) is high surrogate followed by low, codepoint passes predicate. + // true means supplementary chars were put to buffer. + // strict to require low surrogate (if not in string literal). + private def isSupplementary(high: Char, test: Int => Boolean, strict: Boolean = true): Boolean = + isHighSurrogate(high) && { + var res = false + nextChar() + val low = ch + if (isLowSurrogate(low)) { + nextChar() + val codepoint = toCodePoint(high, low) + if (isValidCodePoint(codepoint) && test(codepoint)) { + putChar(high) + putChar(low) + res = true + } else + syntaxError(f"illegal character '\\u$high%04x\\u$low%04x'") + } else if (!strict) { + putChar(high) + res = true + } else + syntaxError(f"illegal character '\\u$high%04x' missing low surrogate") + res + } + private def atSupplementary(ch: Char, f: Int => Boolean): Boolean = + isHighSurrogate(ch) && { + val hi = ch + val r = lookaheadReader + r.nextRawChar() + val lo = r.ch + isLowSurrogate(lo) && { + val codepoint = toCodePoint(hi, lo) + isValidCodePoint(codepoint) && f(codepoint) + } + } private var openComments = 0 final protected def putCommentChar(): Unit = { processCommentChar(); nextChar() } @@ -705,14 +743,18 @@ trait Scanners extends ScannersCommon { syntaxError("empty character literal (use '\\'' for single quote)") else { nextChar() - token = CHARLIT - setStrVal() + if (cbuf.length != 1) + syntaxError("illegal codepoint in Char constant: " + cbuf.toString.map(c => f"\\u$c%04x").mkString("'", "", "'")) + else { + token = CHARLIT + setStrVal() + } } - } else if (isEmptyCharLit) { + } + else if (isEmptyCharLit) syntaxError("empty character literal") - } else { + else unclosedCharLit() - } } else unclosedCharLit() } @@ -755,7 +797,7 @@ trait Scanners extends ScannersCommon { } else if (ch == '\u2190') { deprecationWarning("The unicode arrow `←` is deprecated, use `<-` instead. If you still wish to display it as one character, consider using a font with programming ligatures such as Fira Code.", "2.13.0") nextChar(); token = LARROW - } else if (Character.isUnicodeIdentifierStart(ch)) { + } else if (isUnicodeIdentifierStart(ch)) { putChar(ch) nextChar() getIdentRest() @@ -763,8 +805,10 @@ trait Scanners extends ScannersCommon { putChar(ch) nextChar() getOperatorRest() + } else if (isSupplementary(ch, isUnicodeIdentifierStart)) { + getIdentRest() } else { - syntaxError("illegal character '" + ("" + '\\' + 'u' + "%04x".format(ch.toInt)) + "'") + syntaxError(f"illegal character '\\u$ch%04x'") nextChar() } } @@ -831,13 +875,15 @@ trait Scanners extends ScannersCommon { case SU => // strangely enough, Character.isUnicodeIdentifierPart(SU) returns true! finishNamed() case _ => - if (Character.isUnicodeIdentifierPart(ch)) { + if (isUnicodeIdentifierPart(ch)) { putChar(ch) nextChar() getIdentRest() - } else { - finishNamed() } + else if (isSupplementary(ch, isUnicodeIdentifierPart)) + getIdentRest() + else + finishNamed() } @tailrec @@ -955,6 +1001,25 @@ trait Scanners extends ScannersCommon { } getStringPart(multiLine, seenEscapedQuote || q) } else if (ch == '$') { + @tailrec def getInterpolatedIdentRest(): Unit = + if (ch != SU && isUnicodeIdentifierPart(ch)) { + putChar(ch) + nextRawChar() + getInterpolatedIdentRest() + } else if (atSupplementary(ch, isUnicodeIdentifierPart)) { + putChar(ch) + nextRawChar() + putChar(ch) + nextRawChar() + getInterpolatedIdentRest() + } else { + next.token = IDENTIFIER + next.name = newTermName(cbuf.toCharArray) + cbuf.clear() + val idx = next.name.start - kwOffset + if (idx >= 0 && idx < kwArray.length) + next.token = kwArray(idx) + } nextRawChar() if (ch == '$' || ch == '"') { putChar(ch) @@ -968,32 +1033,29 @@ trait Scanners extends ScannersCommon { finishStringPart() nextRawChar() next.token = USCORE - } else if (Character.isUnicodeIdentifierStart(ch)) { + } else if (isUnicodeIdentifierStart(ch)) { finishStringPart() - do { - putChar(ch) - nextRawChar() - } while (ch != SU && Character.isUnicodeIdentifierPart(ch)) - next.token = IDENTIFIER - next.name = newTermName(cbuf.toString) - cbuf.clear() - val idx = next.name.start - kwOffset - if (idx >= 0 && idx < kwArray.length) { - next.token = kwArray(idx) - } + putChar(ch) + nextRawChar() + getInterpolatedIdentRest() + } else if (atSupplementary(ch, isUnicodeIdentifierStart)) { + finishStringPart() + putChar(ch) + nextRawChar() + putChar(ch) + nextRawChar() + getInterpolatedIdentRest() } else { val expectations = "$$, $\", $identifier or ${expression}" syntaxError(s"invalid string interpolation $$$ch, expected: $expectations") } } else { val isUnclosedLiteral = (ch == SU || (!multiLine && (ch == CR || ch == LF))) - if (isUnclosedLiteral) { + if (isUnclosedLiteral) if (multiLine) incompleteInputError("unclosed multi-line string literal") - else { + else unclosedStringLit(seenEscapedQuote) - } - } else { putChar(ch) nextRawChar() @@ -1027,53 +1089,38 @@ trait Scanners extends ScannersCommon { false } - /** copy current character into cbuf, interpreting any escape sequences, - * and advance to next character. + /** Copy current character into cbuf, interpreting any escape sequences, + * and advance to next character. Surrogate pairs are consumed (see check + * at fetchSingleQuote), but orphan surrogate is allowed. */ protected def getLitChar(): Unit = if (ch == '\\') { nextChar() - if ('0' <= ch && ch <= '7') { - val start = charOffset - 2 - val leadch: Char = ch - var oct: Int = digit2int(ch, 8) - nextChar() - if ('0' <= ch && ch <= '7') { - oct = oct * 8 + digit2int(ch, 8) - nextChar() - if (leadch <= '3' && '0' <= ch && ch <= '7') { - oct = oct * 8 + digit2int(ch, 8) - nextChar() - } - } - val alt = if (oct == LF) "\\n" else "\\u%04x" format oct - syntaxError(start, s"octal escape literals are unsupported: use $alt instead") - putChar(oct.toChar) - } else { - if (ch == 'u') { - if (getUEscape()) nextChar() - } - else { - ch match { - case 'b' => putChar('\b') - case 't' => putChar('\t') - case 'n' => putChar('\n') - case 'f' => putChar('\f') - case 'r' => putChar('\r') - case '\"' => putChar('\"') - case '\'' => putChar('\'') - case '\\' => putChar('\\') - case _ => invalidEscape() - } - nextChar() - } - } - } else { + charEscape() + } else if (!isSupplementary(ch, _ => true, strict = false)) { putChar(ch) nextChar() } - private def getUEscape(): Boolean = { + private def charEscape(): Unit = { + var bump = true + ch match { + case 'b' => putChar('\b') + case 't' => putChar('\t') + case 'n' => putChar('\n') + case 'f' => putChar('\f') + case 'r' => putChar('\r') + case '\"' => putChar('\"') + case '\'' => putChar('\'') + case '\\' => putChar('\\') + case 'u' => bump = uEscape() + case x if '0' <= x && x <= '7' => bump = octalEscape() + case _ => invalidEscape() + } + if (bump) nextChar() + } + + private def uEscape(): Boolean = { while (ch == 'u') nextChar() var codepoint = 0 var digitsRead = 0 @@ -1094,7 +1141,25 @@ trait Scanners extends ScannersCommon { putChar(found) true } - + + private def octalEscape(): Boolean = { + val start = charOffset - 2 + val leadch: Char = ch + var oct: Int = digit2int(ch, 8) + nextChar() + if ('0' <= ch && ch <= '7') { + oct = oct * 8 + digit2int(ch, 8) + nextChar() + if (leadch <= '3' && '0' <= ch && ch <= '7') { + oct = oct * 8 + digit2int(ch, 8) + nextChar() + } + } + val alt = if (oct == LF) "\\n" else f"\\u$oct%04x" + syntaxError(start, s"octal escape literals are unsupported: use $alt instead") + putChar(oct.toChar) + false + } protected def invalidEscape(): Unit = { syntaxError(charOffset - 1, "invalid escape character") diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/AbstractFileReader.scala b/src/compiler/scala/tools/nsc/symtab/classfile/AbstractFileReader.scala index ca1378e6c87e..faf69d5769e3 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/AbstractFileReader.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/AbstractFileReader.scala @@ -27,9 +27,7 @@ import scala.tools.nsc.io.AbstractFile */ final class AbstractFileReader(val buf: Array[Byte]) extends DataReader { @deprecated("Use other constructor", "2.13.0") - def this(file: AbstractFile) = { - this(file.toByteArray) - } + def this(file: AbstractFile) = this(file.toByteArray) /** the current input pointer */ @@ -67,9 +65,8 @@ final class AbstractFileReader(val buf: Array[Byte]) extends DataReader { def getByte(mybp: Int): Byte = buf(mybp) - def getBytes(mybp: Int, bytes: Array[Byte]): Unit = { + def getBytes(mybp: Int, bytes: Array[Byte]): Unit = System.arraycopy(buf, mybp, bytes, 0, bytes.length) - } /** extract a character at position bp from buf */ @@ -95,9 +92,8 @@ final class AbstractFileReader(val buf: Array[Byte]) extends DataReader { */ def getDouble(mybp: Int): Double = longBitsToDouble(getLong(mybp)) - def getUTF(mybp: Int, len: Int): String = { + def getUTF(mybp: Int, len: Int): String = new DataInputStream(new ByteArrayInputStream(buf, mybp, len)).readUTF - } /** skip next 'n' bytes */ diff --git a/src/partest/scala/tools/partest/DirectTest.scala b/src/partest/scala/tools/partest/DirectTest.scala index 17de444bb7c4..d923829b8c14 100644 --- a/src/partest/scala/tools/partest/DirectTest.scala +++ b/src/partest/scala/tools/partest/DirectTest.scala @@ -45,6 +45,7 @@ abstract class DirectTest { protected def pathOf(locations: String*) = locations.mkString(sys.props("path.separator")) // override to add additional settings besides -d testOutput.path + // default is -usejavacp def extraSettings: String = "-usejavacp" // a default Settings object using only extraSettings def settings: Settings = newSettings(CommandLineParser.tokenize(extraSettings)) diff --git a/src/partest/scala/tools/partest/package.scala b/src/partest/scala/tools/partest/package.scala index d3e5f070eed9..5484b5dc8b94 100644 --- a/src/partest/scala/tools/partest/package.scala +++ b/src/partest/scala/tools/partest/package.scala @@ -19,7 +19,6 @@ import scala.concurrent.duration.Duration import scala.io.Codec import scala.jdk.CollectionConverters._ import scala.tools.nsc.util.Exceptional -import scala.util.chaining._ package object partest { type File = java.io.File @@ -180,17 +179,4 @@ package object partest { def isDebug = sys.props.contains("partest.debug") || sys.env.contains("PARTEST_DEBUG") def debugSettings = sys.props.getOrElse("partest.debug.settings", "") def log(msg: => Any): Unit = if (isDebug) Console.err.println(msg) - - private val printable = raw"\p{Print}".r - - def hexdump(s: String): Iterator[String] = { - var offset = 0 - def hex(bytes: Array[Byte]) = bytes.map(b => f"$b%02x").mkString(" ") - def charFor(byte: Byte): Char = byte.toChar match { case c @ printable() => c ; case _ => '.' } - def ascii(bytes: Array[Byte]) = bytes.map(charFor).mkString - def format(bytes: Array[Byte]): String = - f"$offset%08x ${hex(bytes.slice(0, 8))}%-24s ${hex(bytes.slice(8, 16))}%-24s |${ascii(bytes)}|" - .tap(_ => offset += bytes.length) - s.getBytes(codec.charSet).grouped(16).map(format) - } } diff --git a/src/reflect/scala/reflect/internal/StdNames.scala b/src/reflect/scala/reflect/internal/StdNames.scala index fb4444800098..926fca90e649 100644 --- a/src/reflect/scala/reflect/internal/StdNames.scala +++ b/src/reflect/scala/reflect/internal/StdNames.scala @@ -424,8 +424,17 @@ trait StdNames { /** Is name a variable name? */ def isVariableName(name: Name): Boolean = { + import Character.{isHighSurrogate, isLowSurrogate, isLetter, isLowerCase, isValidCodePoint, toCodePoint} val first = name.startChar - ( ((first.isLower && first.isLetter) || first == '_') + def isLowerLetterSupplementary: Boolean = + first == '$' && { + val decoded = name.decoded + isHighSurrogate(decoded.charAt(0)) && decoded.length > 1 && isLowSurrogate(decoded.charAt(1)) && { + val codepoint = toCodePoint(decoded.charAt(0), decoded.charAt(1)) + isValidCodePoint(codepoint) && isLetter(codepoint) && isLowerCase(codepoint) + } + } + ( ((first.isLower && first.isLetter) || first == '_' || isLowerLetterSupplementary) && (name != nme.false_) && (name != nme.true_) && (name != nme.null_) diff --git a/src/testkit/scala/tools/testkit/AssertUtil.scala b/src/testkit/scala/tools/testkit/AssertUtil.scala index e969376a71d9..47d41aa29d31 100644 --- a/src/testkit/scala/tools/testkit/AssertUtil.scala +++ b/src/testkit/scala/tools/testkit/AssertUtil.scala @@ -51,6 +51,25 @@ object AssertUtil { // junit fail is Unit def fail(message: String): Nothing = throw new AssertionError(message) + private val printable = raw"\p{Print}".r + + def hexdump(s: String): Iterator[String] = { + import scala.io.Codec + val codec: Codec = Codec.UTF8 + var offset = 0 + def hex(bytes: Array[Byte]) = bytes.map(b => f"$b%02x").mkString(" ") + def charFor(byte: Byte): Char = byte.toChar match { case c @ printable() => c ; case _ => '.' } + def ascii(bytes: Array[Byte]) = bytes.map(charFor).mkString + def format(bytes: Array[Byte]): String = + f"$offset%08x ${hex(bytes.slice(0, 8))}%-24s ${hex(bytes.slice(8, 16))}%-24s |${ascii(bytes)}|" + .tap(_ => offset += bytes.length) + s.getBytes(codec.charSet).grouped(16).map(format) + } + + private def dump(s: String) = hexdump(s).mkString("\n") + def assertEqualStrings(expected: String)(actual: String) = + assert(expected == actual, s"Expected:\n${dump(expected)}\nActual:\n${dump(actual)}") + private final val timeout = 60 * 1000L // wait a minute private implicit class `ref helper`[A](val r: Reference[A]) extends AnyVal { diff --git a/test/files/neg/surrogates.check b/test/files/neg/surrogates.check new file mode 100644 index 000000000000..3521b9b72817 --- /dev/null +++ b/test/files/neg/surrogates.check @@ -0,0 +1,4 @@ +surrogates.scala:3: error: illegal codepoint in Char constant: '\ud801\udc00' + def `too wide for Char` = '𐐀' + ^ +1 error diff --git a/test/files/neg/surrogates.scala b/test/files/neg/surrogates.scala new file mode 100644 index 000000000000..d8e2ef545a18 --- /dev/null +++ b/test/files/neg/surrogates.scala @@ -0,0 +1,4 @@ + +class C { + def `too wide for Char` = '𐐀' +} diff --git a/test/files/pos/surrogates.scala b/test/files/pos/surrogates.scala new file mode 100644 index 000000000000..1b710ad901ae --- /dev/null +++ b/test/files/pos/surrogates.scala @@ -0,0 +1,28 @@ + +// allow supplementary chars in identifiers + +class 𐐀 { + def 𐐀 = 42 + + // regression check: anything goes in strings + def x = "𐐀" + def y = s"$𐐀" + def w = s" 𐐀" +} + +case class 𐐀𐐀(n: Int) { + def 𐐀𐐀 = n + def `𐐀𐐀1` = n + n +} + +// uncontroversially, orphan surrogates may be introduced +// via unicode escape. +class Construction { + def hi = '\ud801' + def lo = '\udc00' + def endhi = "abc\ud801" + def startlo = "\udc00xyz" + def reversed = "xyz\udc00\ud801abc" +} + +// was: error: illegal character '\ud801', '\udc00' diff --git a/test/files/run/t12276.scala b/test/files/run/t12276.scala index 50ef6b0edc5e..36fbbbc6c558 100644 --- a/test/files/run/t12276.scala +++ b/test/files/run/t12276.scala @@ -1,6 +1,7 @@ import scala.tools.nsc.Settings import scala.tools.nsc.interpreter.shell.{ILoop, ShellConfig} -import scala.tools.partest.{hexdump, ReplTest} +import scala.tools.partest.ReplTest +import scala.tools.testkit.AssertUtil.hexdump object Test extends ReplTest { def code = s""" diff --git a/test/files/run/t1406.scala b/test/files/run/t1406.scala new file mode 100644 index 000000000000..c027771716a8 --- /dev/null +++ b/test/files/run/t1406.scala @@ -0,0 +1,32 @@ + +import scala.tools.partest.DirectTest + +object Test extends DirectTest { + // for reference, UTF-8 of U0 + //val data = Array(0xed, 0xa0, 0x81).map(_.asInstanceOf[Byte]) + def U0 = "\ud801" + def U1 = "\udc00" + // \u10428 isLetter and isLowerCase + def U2 = "\ud801" + def U3 = "\udc28" + def code = + s"""class C { + | def x = "$U0" + | def y = "$U1" + | def `$U0` = x + | def `$U1` = y + | + | def f(x: Any): Boolean = x match { + | case ${U2}${U3}XYZ: String => true + | case $U2$U3 => true + | } + | def g(x: Any) = x match { + | case $U2$U3 @ _ => $U2$U3 + | } + |}""".stripMargin + + def show(): Unit = { + assert(U0.length == 1) + assert(compile()) + } +} diff --git a/test/files/run/t1406b.check b/test/files/run/t1406b.check new file mode 100644 index 000000000000..407e44adf89d --- /dev/null +++ b/test/files/run/t1406b.check @@ -0,0 +1,6 @@ +newSource1.scala:4: error: illegal character '\ud801' missing low surrogate + def ? = x + ^ +newSource1.scala:5: error: illegal character '\udc00' + def ? = y + ^ diff --git a/test/files/run/t1406b.scala b/test/files/run/t1406b.scala new file mode 100644 index 000000000000..bd1868a642fb --- /dev/null +++ b/test/files/run/t1406b.scala @@ -0,0 +1,22 @@ + +import scala.tools.partest.DirectTest + +object Test extends DirectTest { + // for reference, UTF-8 of U0 + //val data = Array(0xed, 0xa0, 0x81).map(_.asInstanceOf[Byte]) + def U0 = "\ud801" + def U1 = "\udc00" + def code = + s"""class C { + | def x = "$U0" + | def y = "$U1" + | def $U0 = x + | def $U1 = y + |}""".stripMargin + + def show(): Unit = { + assert(U0.length == 1) + assert(!compile()) + } +} + diff --git a/test/files/run/t9915/Test_2.scala b/test/files/run/t9915/Test_2.scala index afed667cc6e5..f26f1c1a3d91 100644 --- a/test/files/run/t9915/Test_2.scala +++ b/test/files/run/t9915/Test_2.scala @@ -1,12 +1,14 @@ +import scala.tools.testkit.AssertUtil.assertEqualStrings + object Test extends App { val c = new C_1 - assert(c.nulled == "X\u0000ABC") // "X\000ABC" - assert(c.supped == "𐒈𐒝𐒑𐒛𐒐𐒘𐒕𐒖") + assert(C_1.NULLED.length == "XYABC".length) + assert(C_1.SUPPED.codePointCount(0, C_1.SUPPED.length) == 8) - assert(C_1.NULLED == "X\u0000ABC") // "X\000ABC" - assert(C_1.SUPPED == "𐒈𐒝𐒑𐒛𐒐𐒘𐒕𐒖") + assertEqualStrings(c.nulled)("X\u0000ABC") // "X\000ABC" in java source + assertEqualStrings(c.supped)("𐒈𐒝𐒑𐒛𐒐𐒘𐒕𐒖") - assert(C_1.NULLED.size == "XYABC".size) - assert(C_1.SUPPED.codePointCount(0, C_1.SUPPED.length) == 8) + assertEqualStrings(C_1.NULLED)("X\u0000ABC") // "X\000ABC" in java source + assertEqualStrings(C_1.SUPPED)("𐒈𐒝𐒑𐒛𐒐𐒘𐒕𐒖") } diff --git a/test/junit/scala/tools/testkit/AssertUtilTest.scala b/test/junit/scala/tools/testkit/AssertUtilTest.scala index 98e2c0308553..90e98e1598e3 100644 --- a/test/junit/scala/tools/testkit/AssertUtilTest.scala +++ b/test/junit/scala/tools/testkit/AssertUtilTest.scala @@ -110,4 +110,10 @@ class AssertUtilTest { assertEquals(1, sut.errors.size) assertEquals(0, sut.errors.head._2.getSuppressed.length) } + + /** TODO + @Test def `hexdump is supplementary-aware`: Unit = { + assertEquals("00000000 f0 90 90 80 |𐐀.|", hexdump("\ud801\udc00").next()) + } + */ } From 657c12a3b2cfe2d93e21e96d638be1674e0ba076 Mon Sep 17 00:00:00 2001 From: Seth Tisue Date: Thu, 22 Jul 2021 14:08:29 -0700 Subject: [PATCH 314/769] sbt 1.5.5 (was 1.5.4) --- project/build.properties | 2 +- scripts/common | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/project/build.properties b/project/build.properties index 9edb75b77c28..10fd9eee04ac 100644 --- a/project/build.properties +++ b/project/build.properties @@ -1 +1 @@ -sbt.version=1.5.4 +sbt.version=1.5.5 diff --git a/scripts/common b/scripts/common index 106d96cc2296..8cfac63b2f47 100644 --- a/scripts/common +++ b/scripts/common @@ -15,7 +15,7 @@ rm -rf "$WORKSPACE/resolutionScratch_" mkdir -p "$WORKSPACE/resolutionScratch_" SBT_CMD=${SBT_CMD-sbt} -SBT_CMD="$SBT_CMD -sbt-version 1.5.4" +SBT_CMD="$SBT_CMD -sbt-version 1.5.5" # repo to publish builds integrationRepoUrl=${integrationRepoUrl-"https://scala-ci.typesafe.com/artifactory/scala-integration/"} From 055b3215059d485c32ffb5022b90ebbff8748803 Mon Sep 17 00:00:00 2001 From: Seth Tisue Date: Fri, 16 Jul 2021 17:23:11 -0400 Subject: [PATCH 315/769] make MultiReleaseJarTest pass on Windows --- .../nsc/classpath/MultiReleaseJarTest.scala | 27 +- test/junit/scala/tools/testing/TempDir.scala | 13 + test/junit/scala/tools/testing/Using.scala | 396 ++++++++++++++++++ 3 files changed, 419 insertions(+), 17 deletions(-) create mode 100644 test/junit/scala/tools/testing/Using.scala diff --git a/test/junit/scala/tools/nsc/classpath/MultiReleaseJarTest.scala b/test/junit/scala/tools/nsc/classpath/MultiReleaseJarTest.scala index 96d118847ec7..3e11d281bb24 100644 --- a/test/junit/scala/tools/nsc/classpath/MultiReleaseJarTest.scala +++ b/test/junit/scala/tools/nsc/classpath/MultiReleaseJarTest.scala @@ -7,28 +7,24 @@ import java.util.jar.Attributes.Name import org.junit.{Assert, Test} import scala.tools.nsc.{CloseableRegistry, Global, Settings} -import scala.tools.testing.BytecodeTesting +import scala.tools.testing.{BytecodeTesting, ForDeletion, Using} import scala.util.Properties class MultiReleaseJarTest extends BytecodeTesting { import compiler._ @Test def mrJar(): Unit = { - if (!Properties.isJavaAtLeast("9")) { println("skipping mrJar() on old JdK"); return} // TODO test that the compiler warns that --release is unsupported. + if (!Properties.isJavaAtLeast("9")) return // TODO test that the compiler warns that --release is unsupported. - val temp1 = Files.createTempFile("mr-jar-test-", ".jar") + // TODO test fails if both Global runs look at the same JAR on disk. Caching problem in our classpath implementation? So use two JARs. + def makeTemp() = Files.createTempFile("mr-jar-test-", ".jar") + Using.resources(ForDeletion(makeTemp()), ForDeletion(makeTemp())) { (temp1, temp2) => - // TODO test fails if both Global runs look at the same JAR on disk. Caching problem in our classpath implementation? - // val temp2 = temp1 - val temp2 = Files.createTempFile("mr-jar-test-", ".jar") - val cleanup = new CloseableRegistry - - try { def code(newApi: String) = s"package p1; abstract class Versioned { def oldApi: Int; $newApi }" val oldC = compileToBytes(code("")).head._2 val newC = compileToBytes(code("def newApi: Int")).head._2 - List(temp1, temp2).foreach(temp => createZip(temp, List( + List(temp1.path, temp2.path).foreach(temp => createZip(temp, List( "/p1/Versioned.class" -> oldC, "/META-INF/versions/9/p1/Versioned.class" -> newC, "/META-INF/MANIFEST.MF" -> createManifest) @@ -39,24 +35,21 @@ class MultiReleaseJarTest extends BytecodeTesting { settings.usejavacp.value = true settings.classpath.value = jarPath.toAbsolutePath.toString val g = new Global(settings) - cleanup.registerClosable(g) settings.release.value = release new g.Run val decls = g.rootMirror.staticClass("p1.Versioned").info.decls.filterNot(_.isConstructor).map(_.name.toString).toList.sorted + g.close() decls } - Assert.assertEquals(List("newApi", "oldApi"), declsOfC(temp1, "9")) - Assert.assertEquals(List("oldApi"), declsOfC(temp2, "8")) - } finally { - cleanup.close() - List(temp1, temp2).foreach(Files.deleteIfExists) + Assert.assertEquals(List("newApi", "oldApi"), declsOfC(temp1.path, "9")) + Assert.assertEquals(List("oldApi"), declsOfC(temp2.path, "8")) } } @Test def ctSymTest(): Unit = { - if (!Properties.isJavaAtLeast("9")) { println("skipping mrJar() on old JDK"); return} // TODO test that the compiler warns that --release is unsupported. + if (!Properties.isJavaAtLeast("9")) return // TODO test that the compiler warns that --release is unsupported. val cleanup = new CloseableRegistry def lookup(className: String, release: String): Boolean = { diff --git a/test/junit/scala/tools/testing/TempDir.scala b/test/junit/scala/tools/testing/TempDir.scala index 475de8c4a2d4..c1e4f6376699 100644 --- a/test/junit/scala/tools/testing/TempDir.scala +++ b/test/junit/scala/tools/testing/TempDir.scala @@ -1,6 +1,9 @@ package scala.tools.testing import java.io.{IOException, File} +import java.nio.file.{Path, Files} +import scala.util.{Properties, Try} +import Using.Releasable object TempDir { final val TEMP_DIR_ATTEMPTS = 10000 @@ -16,3 +19,13 @@ object TempDir { throw new IOException(s"Failed to create directory") } } + +/* Turn a path into a temp file for purposes of Using it as a resource. + * On Windows, avoid "file is in use" errors by not attempting to delete it. + */ +case class ForDeletion(path: Path) +object ForDeletion { + implicit val deleteOnRelease: Releasable[ForDeletion] = new Releasable[ForDeletion] { + override def release(releasee: ForDeletion) = if (!Properties.isWin) Files.delete(releasee.path) + } +} diff --git a/test/junit/scala/tools/testing/Using.scala b/test/junit/scala/tools/testing/Using.scala new file mode 100644 index 000000000000..e2413b8c0085 --- /dev/null +++ b/test/junit/scala/tools/testing/Using.scala @@ -0,0 +1,396 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.tools.testing + +import scala.util.Try +import scala.util.control.{ControlThrowable, NonFatal} + +/** A utility for performing automatic resource management. It can be used to perform an + * operation using resources, after which it releases the resources in reverse order + * of their creation. + * + * ==Usage== + * + * There are multiple ways to automatically manage resources with `Using`. If you only need + * to manage a single resource, the [[Using.apply `apply`]] method is easiest; it wraps the + * resource opening, operation, and resource releasing in a `Try`. + * + * Example: + * {{{ + * import java.io.{BufferedReader, FileReader} + * import scala.util.{Try, Using} + * + * val lines: Try[Seq[String]] = + * Using(new BufferedReader(new FileReader("file.txt"))) { reader => + * Iterator.continually(reader.readLine()).takeWhile(_ != null).toSeq + * } + * }}} + * + * If you need to manage multiple resources, [[Using.Manager$.apply `Using.Manager`]] should + * be used. It allows the managing of arbitrarily many resources, whose creation, use, and + * release are all wrapped in a `Try`. + * + * Example: + * {{{ + * import java.io.{BufferedReader, FileReader} + * import scala.util.{Try, Using} + * + * val lines: Try[Seq[String]] = Using.Manager { use => + * val r1 = use(new BufferedReader(new FileReader("file1.txt"))) + * val r2 = use(new BufferedReader(new FileReader("file2.txt"))) + * val r3 = use(new BufferedReader(new FileReader("file3.txt"))) + * val r4 = use(new BufferedReader(new FileReader("file4.txt"))) + * + * // use your resources here + * def lines(reader: BufferedReader): Iterator[String] = + * Iterator.continually(reader.readLine()).takeWhile(_ != null) + * + * (lines(r1) ++ lines(r2) ++ lines(r3) ++ lines(r4)).toList + * } + * }}} + * + * If you wish to avoid wrapping management and operations in a `Try`, you can use + * [[Using.resource `Using.resource`]], which throws any exceptions that occur. + * + * Example: + * {{{ + * import java.io.{BufferedReader, FileReader} + * import scala.util.Using + * + * val lines: Seq[String] = + * Using.resource(new BufferedReader(new FileReader("file.txt"))) { reader => + * Iterator.continually(reader.readLine()).takeWhile(_ != null).toSeq + * } + * }}} + * + * ==Suppression Behavior== + * + * If two exceptions are thrown (e.g., by an operation and closing a resource), + * one of them is re-thrown, and the other is + * [[java.lang.Throwable#addSuppressed added to it as a suppressed exception]]. + * If the two exceptions are of different 'severities' (see below), the one of a higher + * severity is re-thrown, and the one of a lower severity is added to it as a suppressed + * exception. If the two exceptions are of the same severity, the one thrown first is + * re-thrown, and the one thrown second is added to it as a suppressed exception. + * If an exception is a [[scala.util.control.ControlThrowable `ControlThrowable`]], or + * if it does not support suppression (see + * [[java.lang.Throwable `Throwable`'s constructor with an `enableSuppression` parameter]]), + * an exception that would have been suppressed is instead discarded. + * + * Exceptions are ranked from highest to lowest severity as follows: + * - `java.lang.VirtualMachineError` + * - `java.lang.LinkageError` + * - `java.lang.InterruptedException` and `java.lang.ThreadDeath` + * - [[scala.util.control.NonFatal fatal exceptions]], excluding `scala.util.control.ControlThrowable` + * - `scala.util.control.ControlThrowable` + * - all other exceptions + * + * When more than two exceptions are thrown, the first two are combined and + * re-thrown as described above, and each successive exception thrown is combined + * as it is thrown. + * + * @define suppressionBehavior See the main doc for [[Using `Using`]] for full details of + * suppression behavior. + */ +object Using { + /** Performs an operation using a resource, and then releases the resource, + * even if the operation throws an exception. + * + * $suppressionBehavior + * + * @return a [[Try]] containing an exception if one or more were thrown, + * or the result of the operation if no exceptions were thrown + */ + def apply[R: Releasable, A](resource: => R)(f: R => A): Try[A] = Try { Using.resource(resource)(f) } + + /** A resource manager. + * + * Resources can be registered with the manager by calling [[acquire `acquire`]]; + * such resources will be released in reverse order of their acquisition + * when the manager is closed, regardless of any exceptions thrown + * during use. + * + * $suppressionBehavior + * + * @note It is recommended for API designers to require an implicit `Manager` + * for the creation of custom resources, and to call `acquire` during those + * resources' construction. Doing so guarantees that the resource ''must'' be + * automatically managed, and makes it impossible to forget to do so. + * + * + * Example: + * {{{ + * class SafeFileReader(file: File)(implicit manager: Using.Manager) + * extends BufferedReader(new FileReader(file)) { + * + * def this(fileName: String)(implicit manager: Using.Manager) = this(new File(fileName)) + * + * manager.acquire(this) + * } + * }}} + */ + final class Manager private { + import Manager._ + + private var closed = false + private[this] var resources: List[Resource[_]] = Nil + + /** Registers the specified resource with this manager, so that + * the resource is released when the manager is closed, and then + * returns the (unmodified) resource. + */ + def apply[R: Releasable](resource: R): R = { + acquire(resource) + resource + } + + /** Registers the specified resource with this manager, so that + * the resource is released when the manager is closed. + */ + def acquire[R: Releasable](resource: R): Unit = { + if (resource == null) throw new NullPointerException("null resource") + if (closed) throw new IllegalStateException("Manager has already been closed") + resources = new Resource(resource) :: resources + } + + private def manage[A](op: Manager => A): A = { + var toThrow: Throwable = null + try { + op(this) + } catch { + case t: Throwable => + toThrow = t + null.asInstanceOf[A] // compiler doesn't know `finally` will throw + } finally { + closed = true + var rs = resources + resources = null // allow GC, in case something is holding a reference to `this` + while (rs.nonEmpty) { + val resource = rs.head + rs = rs.tail + try resource.release() + catch { + case t: Throwable => + if (toThrow == null) toThrow = t + else toThrow = preferentiallySuppress(toThrow, t) + } + } + if (toThrow != null) throw toThrow + } + } + } + + object Manager { + /** Performs an operation using a `Manager`, then closes the `Manager`, + * releasing its resources (in reverse order of acquisition). + * + * Example: + * {{{ + * val lines = Using.Manager { use => + * use(new BufferedReader(new FileReader("file.txt"))).lines() + * } + * }}} + * + * If using resources which require an implicit `Manager` as a parameter, + * this method should be invoked with an `implicit` modifier before the function + * parameter: + * + * Example: + * {{{ + * val lines = Using.Manager { implicit use => + * new SafeFileReader("file.txt").lines() + * } + * }}} + * + * See the main doc for [[Using `Using`]] for full details of suppression behavior. + * + * @param op the operation to perform using the manager + * @tparam A the return type of the operation + * @return a [[Try]] containing an exception if one or more were thrown, + * or the result of the operation if no exceptions were thrown + */ + def apply[A](op: Manager => A): Try[A] = Try { (new Manager).manage(op) } + + private final class Resource[R](resource: R)(implicit releasable: Releasable[R]) { + def release(): Unit = releasable.release(resource) + } + } + + private def preferentiallySuppress(primary: Throwable, secondary: Throwable): Throwable = { + def score(t: Throwable): Int = t match { + case _: VirtualMachineError => 4 + case _: LinkageError => 3 + case _: InterruptedException | _: ThreadDeath => 2 + case _: ControlThrowable => 0 + case e if !NonFatal(e) => 1 // in case this method gets out of sync with NonFatal + case _ => -1 + } + @inline def suppress(t: Throwable, suppressed: Throwable): Throwable = { t.addSuppressed(suppressed); t } + + if (score(secondary) > score(primary)) suppress(secondary, primary) + else suppress(primary, secondary) + } + + /** Performs an operation using a resource, and then releases the resource, + * even if the operation throws an exception. This method behaves similarly + * to Java's try-with-resources. + * + * $suppressionBehavior + * + * @param resource the resource + * @param body the operation to perform with the resource + * @tparam R the type of the resource + * @tparam A the return type of the operation + * @return the result of the operation, if neither the operation nor + * releasing the resource throws + */ + def resource[R, A](resource: R)(body: R => A)(implicit releasable: Releasable[R]): A = { + if (resource == null) throw new NullPointerException("null resource") + + var toThrow: Throwable = null + try { + body(resource) + } catch { + case t: Throwable => + toThrow = t + null.asInstanceOf[A] // compiler doesn't know `finally` will throw + } finally { + if (toThrow eq null) releasable.release(resource) + else { + try releasable.release(resource) + catch { case other: Throwable => toThrow = preferentiallySuppress(toThrow, other) } + finally throw toThrow + } + } + } + + /** Performs an operation using two resources, and then releases the resources + * in reverse order, even if the operation throws an exception. This method + * behaves similarly to Java's try-with-resources. + * + * $suppressionBehavior + * + * @param resource1 the first resource + * @param resource2 the second resource + * @param body the operation to perform using the resources + * @tparam R1 the type of the first resource + * @tparam R2 the type of the second resource + * @tparam A the return type of the operation + * @return the result of the operation, if neither the operation nor + * releasing the resources throws + */ + def resources[R1: Releasable, R2: Releasable, A]( + resource1: R1, + resource2: => R2 + )(body: (R1, R2) => A + ): A = + resource(resource1) { r1 => + resource(resource2) { r2 => + body(r1, r2) + } + } + + /** Performs an operation using three resources, and then releases the resources + * in reverse order, even if the operation throws an exception. This method + * behaves similarly to Java's try-with-resources. + * + * $suppressionBehavior + * + * @param resource1 the first resource + * @param resource2 the second resource + * @param resource3 the third resource + * @param body the operation to perform using the resources + * @tparam R1 the type of the first resource + * @tparam R2 the type of the second resource + * @tparam R3 the type of the third resource + * @tparam A the return type of the operation + * @return the result of the operation, if neither the operation nor + * releasing the resources throws + */ + def resources[R1: Releasable, R2: Releasable, R3: Releasable, A]( + resource1: R1, + resource2: => R2, + resource3: => R3 + )(body: (R1, R2, R3) => A + ): A = + resource(resource1) { r1 => + resource(resource2) { r2 => + resource(resource3) { r3 => + body(r1, r2, r3) + } + } + } + + /** Performs an operation using four resources, and then releases the resources + * in reverse order, even if the operation throws an exception. This method + * behaves similarly to Java's try-with-resources. + * + * $suppressionBehavior + * + * @param resource1 the first resource + * @param resource2 the second resource + * @param resource3 the third resource + * @param resource4 the fourth resource + * @param body the operation to perform using the resources + * @tparam R1 the type of the first resource + * @tparam R2 the type of the second resource + * @tparam R3 the type of the third resource + * @tparam R4 the type of the fourth resource + * @tparam A the return type of the operation + * @return the result of the operation, if neither the operation nor + * releasing the resources throws + */ + def resources[R1: Releasable, R2: Releasable, R3: Releasable, R4: Releasable, A]( + resource1: R1, + resource2: => R2, + resource3: => R3, + resource4: => R4 + )(body: (R1, R2, R3, R4) => A + ): A = + resource(resource1) { r1 => + resource(resource2) { r2 => + resource(resource3) { r3 => + resource(resource4) { r4 => + body(r1, r2, r3, r4) + } + } + } + } + + /** A type class describing how to release a particular type of resource. + * + * A resource is anything which needs to be released, closed, or otherwise cleaned up + * in some way after it is finished being used, and for which waiting for the object's + * garbage collection to be cleaned up would be unacceptable. For example, an instance of + * [[java.io.OutputStream]] would be considered a resource, because it is important to close + * the stream after it is finished being used. + * + * An instance of `Releasable` is needed in order to automatically manage a resource + * with [[Using `Using`]]. An implicit instance is provided for all types extending + * [[java.lang.AutoCloseable]]. + * + * @tparam R the type of the resource + */ + trait Releasable[-R] { + /** Releases the specified resource. */ + def release(resource: R): Unit + } + + object Releasable { + /** An implicit `Releasable` for [[java.lang.AutoCloseable `AutoCloseable`s]]. */ + implicit object AutoCloseableIsReleasable extends Releasable[AutoCloseable] { + def release(resource: AutoCloseable): Unit = resource.close() + } + } + +} From aaacf4dc01f712ea78df64571a0b556e8778cefd Mon Sep 17 00:00:00 2001 From: Seth Tisue Date: Thu, 24 Jun 2021 14:03:43 -0700 Subject: [PATCH 316/769] JDK 17: fix StringLikeTest --- .../scala/collection/immutable/StringLikeTest.scala | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/test/junit/scala/collection/immutable/StringLikeTest.scala b/test/junit/scala/collection/immutable/StringLikeTest.scala index d0f47bd6bb10..a1d4c00e23cc 100644 --- a/test/junit/scala/collection/immutable/StringLikeTest.scala +++ b/test/junit/scala/collection/immutable/StringLikeTest.scala @@ -63,10 +63,14 @@ class StringLikeTest { assertTrue("no trim toDouble", sOk.toDouble == 2.0d) assertTrue("no trim toFloat", sOk.toFloat == 2.0f) - AssertUtil.assertThrows[java.lang.NumberFormatException](sNull.toInt, {s => s == "null"}) - AssertUtil.assertThrows[java.lang.NumberFormatException](sNull.toLong, {s => s == "null"}) - AssertUtil.assertThrows[java.lang.NumberFormatException](sNull.toShort, {s => s == "null"}) - AssertUtil.assertThrows[java.lang.NumberFormatException](sNull.toByte, {s => s == "null"}) + // JDK 17 gives the nicer message + def isNullStringMessage(s: String) = + s == "null" || s == "Cannot parse null string" + + AssertUtil.assertThrows[java.lang.NumberFormatException](sNull.toInt, isNullStringMessage) + AssertUtil.assertThrows[java.lang.NumberFormatException](sNull.toLong, isNullStringMessage) + AssertUtil.assertThrows[java.lang.NumberFormatException](sNull.toShort, isNullStringMessage) + AssertUtil.assertThrows[java.lang.NumberFormatException](sNull.toByte, isNullStringMessage) AssertUtil.assertThrows[java.lang.NullPointerException](sNull.toDouble) AssertUtil.assertThrows[java.lang.NullPointerException](sNull.toFloat) From 80d9930da441b291a6429bcba5e293d4c00846ba Mon Sep 17 00:00:00 2001 From: Seth Tisue Date: Thu, 22 Jul 2021 15:21:13 -0700 Subject: [PATCH 317/769] JDK 17: fix testUncaughtExceptionReporting --- test/files/jvm/scala-concurrent-tck.scala | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/test/files/jvm/scala-concurrent-tck.scala b/test/files/jvm/scala-concurrent-tck.scala index e18273972ac9..b296f1c04d14 100644 --- a/test/files/jvm/scala-concurrent-tck.scala +++ b/test/files/jvm/scala-concurrent-tck.scala @@ -874,11 +874,16 @@ trait CustomExecutionContext extends TestBase { latch.countDown() }) + // scala/bug#12423, scala/scala#9680 + val threadDeathWaitingPeriod = + if (scala.util.Properties.isJavaAtLeast("17")) 1000L + else 10L + @tailrec def waitForThreadDeath(turns: Int): Boolean = if (turns <= 0) false else if ((thread ne null) && thread.isAlive == false) true else { - Thread.sleep(10) + Thread.sleep(threadDeathWaitingPeriod) waitForThreadDeath(turns - 1) } From 8e6f42637364277be8e4da941a221886ccd03732 Mon Sep 17 00:00:00 2001 From: Seth Tisue Date: Wed, 23 Jun 2021 10:56:42 -0700 Subject: [PATCH 318/769] Windows CI: add JDK 17 (alongside 8) --- .github/workflows/ci.yml | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 70647980f2e2..65f8d9429d88 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -14,6 +14,10 @@ jobs: runs-on: windows-latest strategy: fail-fast: false + matrix: + include: + - java: 8 + - java: 17-ea steps: - run: git config --global core.autocrlf false - name: Checkout @@ -26,7 +30,7 @@ jobs: uses: actions/setup-java@v2 with: distribution: adopt - java-version: 8 + java-version: ${{matrix.java}} - name: Cache uses: actions/cache@v2 From 6985b9ac031580ae543618a8151d98cd64f9352c Mon Sep 17 00:00:00 2001 From: Seth Tisue Date: Thu, 15 Jul 2021 15:43:13 -0400 Subject: [PATCH 319/769] JDK 17: suppress SecurityManager deprecation warnings --- .../scala/tools/partest/SecurityTest.scala | 26 ------------------- .../nest/DelegatingSecurityManager.scala | 1 + .../scala/tools/partest/nest/Runner.scala | 6 ++++- .../scala/tools/partest/nest/TrapExit.scala | 1 + 4 files changed, 7 insertions(+), 27 deletions(-) delete mode 100644 src/partest/scala/tools/partest/SecurityTest.scala diff --git a/src/partest/scala/tools/partest/SecurityTest.scala b/src/partest/scala/tools/partest/SecurityTest.scala deleted file mode 100644 index ce76d29e67fa..000000000000 --- a/src/partest/scala/tools/partest/SecurityTest.scala +++ /dev/null @@ -1,26 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.tools.partest - -import java.security._ -import java.util._ - -abstract class SecurityTest extends App { - def throwIt(x: Any) = throw new AccessControlException("" + x) - def propertyCheck(p: PropertyPermission): Unit = throwIt(p) - - def check(perm: Permission): Unit = perm match { - case p: PropertyPermission => propertyCheck(p) - case _ => () - } -} diff --git a/src/partest/scala/tools/partest/nest/DelegatingSecurityManager.scala b/src/partest/scala/tools/partest/nest/DelegatingSecurityManager.scala index 3ba255ad4dc0..66dff5d273c0 100644 --- a/src/partest/scala/tools/partest/nest/DelegatingSecurityManager.scala +++ b/src/partest/scala/tools/partest/nest/DelegatingSecurityManager.scala @@ -16,6 +16,7 @@ import java.io.FileDescriptor import java.net.InetAddress import java.security.Permission +@deprecated("JDK 17 deprecates SecurityManager", since="2.13.7") class DelegatingSecurityManager(delegate: SecurityManager) extends SecurityManager { override def checkExit(status: Int): Unit = if (delegate ne null) delegate.checkExit(status) override def checkPermission(perm: Permission): Unit = if (delegate ne null) delegate.checkPermission(perm) diff --git a/src/partest/scala/tools/partest/nest/Runner.scala b/src/partest/scala/tools/partest/nest/Runner.scala index 906b021771b1..3004010789ea 100644 --- a/src/partest/scala/tools/partest/nest/Runner.scala +++ b/src/partest/scala/tools/partest/nest/Runner.scala @@ -18,6 +18,7 @@ import java.lang.reflect.InvocationTargetException import java.nio.charset.Charset import java.nio.file.{Files, StandardOpenOption} +import scala.annotation.nowarn import scala.collection.mutable.ListBuffer import scala.concurrent.duration.Duration import scala.reflect.internal.FatalError @@ -258,7 +259,10 @@ class Runner(val testInfo: TestInfo, val suiteRunner: AbstractRunner) { pushTranscript(s" > ${logFile.getName}") - TrapExit(() => run()) match { + @nowarn("cat=deprecation") // JDK 17 deprecates SecurityManager, so TrapExit is deprecated too + val trapExit = TrapExit + + trapExit(() => run()) match { case Left((status, throwable)) if status != 0 => genFail("non-zero exit code") case _ => diff --git a/src/partest/scala/tools/partest/nest/TrapExit.scala b/src/partest/scala/tools/partest/nest/TrapExit.scala index 8e4e1d7cb50b..f5f00dc21859 100644 --- a/src/partest/scala/tools/partest/nest/TrapExit.scala +++ b/src/partest/scala/tools/partest/nest/TrapExit.scala @@ -12,6 +12,7 @@ package scala.tools.partest.nest +@deprecated("JDK 17 deprecates SecurityManager", since="2.13.7") object TrapExit { private class TrapExitThrowable(val status: Int) extends Throwable { From 5adc73d24460495f864990775d5cefab967489c5 Mon Sep 17 00:00:00 2001 From: Seth Tisue Date: Thu, 15 Jul 2021 16:26:27 -0400 Subject: [PATCH 320/769] JDK 17: suppress deprecation warning --- src/library/scala/runtime/ModuleSerializationProxy.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/library/scala/runtime/ModuleSerializationProxy.java b/src/library/scala/runtime/ModuleSerializationProxy.java index 0a587ade415b..d023faa1389c 100644 --- a/src/library/scala/runtime/ModuleSerializationProxy.java +++ b/src/library/scala/runtime/ModuleSerializationProxy.java @@ -13,7 +13,6 @@ package scala.runtime; import java.io.Serializable; -import java.security.AccessController; import java.security.PrivilegedActionException; import java.security.PrivilegedExceptionAction; import java.util.HashSet; @@ -25,9 +24,10 @@ public final class ModuleSerializationProxy implements Serializable { private final Class moduleClass; private static final ClassValue instances = new ClassValue() { @Override + @SuppressWarnings("removal") // JDK 17 deprecates AccessController protected Object computeValue(Class type) { try { - return AccessController.doPrivileged((PrivilegedExceptionAction) () -> type.getField("MODULE$").get(null)); + return java.security.AccessController.doPrivileged((PrivilegedExceptionAction) () -> type.getField("MODULE$").get(null)); } catch (PrivilegedActionException e) { return rethrowRuntime(e.getCause()); } From ecaa7db5da014b04a8775e07206b20e3a61969dc Mon Sep 17 00:00:00 2001 From: Seth Tisue Date: Thu, 22 Jul 2021 16:02:04 -0700 Subject: [PATCH 321/769] JDK 17: get t2318 passing --- test/files/run/t2318.scala | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/test/files/run/t2318.scala b/test/files/run/t2318.scala index f00297b5c9e1..03501b755f2e 100644 --- a/test/files/run/t2318.scala +++ b/test/files/run/t2318.scala @@ -5,9 +5,12 @@ import java.security._ import scala.language.reflectiveCalls +// SecurityManager is deprecated on JDK 17, so we sprinkle `@deprecated` around + object Test { trait Bar { def bar: Unit } + @deprecated object Mgr extends SecurityManager { def allowedProperty(name: String) = name == "sun.net.inetaddr.ttl" || @@ -29,6 +32,7 @@ object Test { def doDestroy( obj : Destroyable ) : Unit = obj.destroy(); doDestroy( p ); } + @deprecated def t2() = { System.setSecurityManager(Mgr) @@ -44,6 +48,6 @@ object Test { try t1() catch { case _: java.io.IOException => () } - t2() + t2(): @annotation.nowarn("cat=deprecation") } } From e8352e05df03f1a0db88ac66ad9af882fb28b576 Mon Sep 17 00:00:00 2001 From: Seth Tisue Date: Thu, 15 Jul 2021 16:26:40 -0400 Subject: [PATCH 322/769] build: extend fatalWarnings setting to cover Java code too --- build.sbt | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/build.sbt b/build.sbt index 02aa6619bd21..34c8de004b58 100644 --- a/build.sbt +++ b/build.sbt @@ -232,6 +232,10 @@ lazy val fatalWarningsSettings = Seq( if (fatalWarnings.value) Seq("-Werror") else Nil }, + Compile / javacOptions ++= { + if (fatalWarnings.value) Seq("-Werror") + else Nil + }, Compile / doc / scalacOptions -= "-Werror", // there are too many doc errors to enable this right now ) From 5d4b43ea83816412a51ccdbc7d7bd985c2bcb9b9 Mon Sep 17 00:00:00 2001 From: Seth Tisue Date: Wed, 23 Jun 2021 10:49:46 -0700 Subject: [PATCH 323/769] Travis-CI: add JDK 17 (replacing JDK 16) --- .travis.yml | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/.travis.yml b/.travis.yml index cfb5e32e83d9..1d7c481eae30 100644 --- a/.travis.yml +++ b/.travis.yml @@ -12,9 +12,9 @@ templates: # this has no effect on travis, it's just a place to put our template pr-jdk8: &pr-jdk8 if: type = pull_request OR repo != scala/scala - cron-jdk16: &cron-jdk16 + cron-jdk17: &cron-jdk17 if: type = cron AND repo = scala/scala - env: ADOPTOPENJDK=16 + env: ADOPTOPENJDK=17 build-for-testing: &build-for-testing # pull request validation (w/ bootstrap) @@ -97,13 +97,13 @@ jobs: <<: *pr-jdk8 - <<: *build-for-testing - <<: *cron-jdk16 + <<: *cron-jdk17 - <<: *test1 - <<: *cron-jdk16 + <<: *cron-jdk17 - <<: *test2 - <<: *cron-jdk16 + <<: *cron-jdk17 - stage: test name: build library with Scala 3 From abc4f1c0ed8565301c0d6fd9064e849e871354d5 Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Fri, 23 Jul 2021 21:30:12 -0700 Subject: [PATCH 324/769] Inline Lifted.apply in condOpt --- src/library/scala/PartialFunction.scala | 21 +++++++-------- test/junit/scala/PartialFunctionTest.scala | 30 ++++++++++++++++++++++ 2 files changed, 41 insertions(+), 10 deletions(-) create mode 100644 test/junit/scala/PartialFunctionTest.scala diff --git a/src/library/scala/PartialFunction.scala b/src/library/scala/PartialFunction.scala index c9c67ca5e7ef..d6092990446a 100644 --- a/src/library/scala/PartialFunction.scala +++ b/src/library/scala/PartialFunction.scala @@ -378,25 +378,26 @@ object PartialFunction { */ def empty[A, B] : PartialFunction[A, B] = empty_pf - /** Creates a Boolean test based on a value and a partial function. - * It behaves like a 'match' statement with an implied 'case _ => false' - * following the supplied cases. + /** A Boolean test that is the result of the given function where defined, + * and false otherwise. + * + * It behaves like a `case _ => false` were added to the partial function. * * @param x the value to test * @param pf the partial function * @return true, iff `x` is in the domain of `pf` and `pf(x) == true`. */ - def cond[T](x: T)(pf: PartialFunction[T, Boolean]): Boolean = pf.applyOrElse(x, constFalse) + def cond[A](x: A)(pf: PartialFunction[A, Boolean]): Boolean = pf.applyOrElse(x, constFalse) - /** Transforms a PartialFunction[T, U] `pf` into Function1[T, Option[U]] `f` - * whose result is `Some(x)` if the argument is in `pf`'s domain and `None` - * otherwise, and applies it to the value `x`. In effect, it is a - * `'''match'''` statement which wraps all case results in `Some(_)` and - * adds `'''case''' _ => None` to the end. + /** Apply the function to the given value if defined, and return the result + * in a `Some`; otherwise, return `None`. * * @param x the value to test * @param pf the PartialFunction[T, U] * @return `Some(pf(x))` if `pf isDefinedAt x`, `None` otherwise. */ - def condOpt[T,U](x: T)(pf: PartialFunction[T, U]): Option[U] = pf.lift(x) + def condOpt[A, B](x: A)(pf: PartialFunction[A, B]): Option[B] = { + val z = pf.applyOrElse(x, checkFallback[B]) + if (!fallbackOccurred(z)) Some(z) else None + } } diff --git a/test/junit/scala/PartialFunctionTest.scala b/test/junit/scala/PartialFunctionTest.scala new file mode 100644 index 000000000000..a4cfa5693935 --- /dev/null +++ b/test/junit/scala/PartialFunctionTest.scala @@ -0,0 +1,30 @@ +package scala + +import org.junit.Assert._ +import org.junit.Test + +class PartialFunctionTest { + + import PartialFunction.{cond, condOpt} + + @Test + def `cond evaluates pf`(): Unit = { + assertTrue(cond("x") { case "x" => true }) + } + + @Test + def `cond evaluates default`(): Unit = { + assertFalse(cond("z") { case "x" => true }) + } + + @Test + def `condOpt evaluates pf`(): Unit = { + assertEquals(Some("y"), condOpt("x") { case "x" => "y" }) + assertEquals(Some(null), condOpt("x") { case "x" => null case "z" => "y" }) + } + + @Test + def `condOpt evaluates default`(): Unit = { + assertEquals(None, condOpt("z") { case "x" => "y" }) + } +} From 36c85c1167829d2e3347ff8b79ab8d3527dbe081 Mon Sep 17 00:00:00 2001 From: Doug Roper Date: Mon, 26 Jul 2021 18:35:36 -0400 Subject: [PATCH 325/769] Fix Stream.iterator memory leak --- src/library/scala/collection/LinearSeq.scala | 7 ++++++- test/files/run/stream-gc.check | 2 +- test/files/run/stream-gc.scala | 1 + 3 files changed, 8 insertions(+), 2 deletions(-) diff --git a/src/library/scala/collection/LinearSeq.scala b/src/library/scala/collection/LinearSeq.scala index 9934f3279a3c..fdee005723b5 100644 --- a/src/library/scala/collection/LinearSeq.scala +++ b/src/library/scala/collection/LinearSeq.scala @@ -276,7 +276,12 @@ private[collection] final class LinearSeqIterator[A](coll: LinearSeqOps[A, Linea // A call-by-need cell private[this] final class LazyCell(st: => LinearSeqOps[A, LinearSeq, LinearSeq[A]]) { lazy val v = st } - private[this] var these: LazyCell = new LazyCell(coll) + private[this] var these: LazyCell = { + // Reassign reference to avoid creating a private class field and holding a reference to the head. + // LazyCell would otherwise close over `coll`. + val initialHead = coll + new LazyCell(initialHead) + } def hasNext: Boolean = these.v.nonEmpty diff --git a/test/files/run/stream-gc.check b/test/files/run/stream-gc.check index 1f954e63c641..202f49c8ebab 100644 --- a/test/files/run/stream-gc.check +++ b/test/files/run/stream-gc.check @@ -1 +1 @@ -warning: 5 deprecations (since 2.13.0); re-run with -deprecation for details +warning: 6 deprecations (since 2.13.0); re-run with -deprecation for details diff --git a/test/files/run/stream-gc.scala b/test/files/run/stream-gc.scala index 18d8b972c00c..182ba3244b70 100644 --- a/test/files/run/stream-gc.scala +++ b/test/files/run/stream-gc.scala @@ -8,4 +8,5 @@ object Test extends App { Stream.tabulate(100)(_ => new Array[AnyRef](10000)).collectFirst { case x if false => x } Stream.tabulate(100)(_ => new Array[AnyRef](10000)).collectFirst { case x if false => x } Stream.tabulate(100)(_ => new Array[AnyRef](10000)).collectFirst { case x if false => x } + Stream.tabulate(100)(_ => new Array[AnyRef](10000)).iterator.foreach(_ => ()) } From 9379c6357599c21f3613493ef9bae4258d819ee1 Mon Sep 17 00:00:00 2001 From: Dale Wijnand Date: Tue, 27 Jul 2021 11:27:48 +0100 Subject: [PATCH 326/769] Format typo in ClassfileAnnotation docs --- src/library/scala/annotation/ClassfileAnnotation.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/library/scala/annotation/ClassfileAnnotation.scala b/src/library/scala/annotation/ClassfileAnnotation.scala index 29acbc52689a..1f6317427b5e 100644 --- a/src/library/scala/annotation/ClassfileAnnotation.scala +++ b/src/library/scala/annotation/ClassfileAnnotation.scala @@ -13,7 +13,7 @@ package scala.annotation /** A base class for classfile annotations. These are stored as - * [[https://docs.oracle.com/javase/8/docs/technotes/guides/language/annotations.html Java annotations]]] + * [[https://docs.oracle.com/javase/8/docs/technotes/guides/language/annotations.html Java annotations]] * in classfiles. */ @deprecated("Annotation classes need to be written in Java in order to be stored in classfiles in a Java-compatible manner", "2.13.0") From ea9ca653fdd4ac9abca6fbf471f5f38bbc9e9962 Mon Sep 17 00:00:00 2001 From: Alec Theriault Date: Thu, 29 Jul 2021 19:30:46 -0700 Subject: [PATCH 327/769] Teach backend to emit `iinc` instructions The backend is now able to turn `x += 42` into an `iinc 42` instruction. The optimization only applies to `+=` and `-=`, provided the the net increment fits inside a signed 16-bit value (the ASM library handles choosing `iinc` or `wide iinc` as is appropriate). Fixes scala/bug#7452 --- .../nsc/backend/jvm/BCodeBodyBuilder.scala | 20 ++++++++-- .../nsc/backend/jvm/BCodeIdiomatic.scala | 1 + test/files/jvm/iinc.check | 18 +++++++++ test/files/jvm/iinc/Increment_1.scala | 37 +++++++++++++++++++ test/files/jvm/iinc/test.scala | 17 +++++++++ .../nsc/backend/jvm/opt/InlinerTest.scala | 12 +++--- 6 files changed, 96 insertions(+), 9 deletions(-) create mode 100644 test/files/jvm/iinc.check create mode 100644 test/files/jvm/iinc/Increment_1.scala create mode 100644 test/files/jvm/iinc/test.scala diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala index 1581038046a3..b9ea86288ad9 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala @@ -79,9 +79,23 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder { case Assign(lhs, rhs) => val s = lhs.symbol val Local(tk, _, idx, _) = locals.getOrMakeLocal(s) - genLoad(rhs, tk) - lineNumber(tree) - bc.store(idx, tk) + + rhs match { + case Apply(Select(larg: Ident, nme.ADD), Literal(x) :: Nil) + if larg.symbol == s && tk.isIntSizedType && x.isShortRange => + lineNumber(tree) + bc.iinc(idx, x.intValue) + + case Apply(Select(larg: Ident, nme.SUB), Literal(x) :: Nil) + if larg.symbol == s && tk.isIntSizedType && Constant(-x.intValue).isShortRange => + lineNumber(tree) + bc.iinc(idx, -x.intValue) + + case _ => + genLoad(rhs, tk) + lineNumber(tree) + bc.store(idx, tk) + } case _ => genLoad(tree, UNIT) diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeIdiomatic.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeIdiomatic.scala index 92de2aca3b9a..a2b2a21b365c 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BCodeIdiomatic.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeIdiomatic.scala @@ -392,6 +392,7 @@ abstract class BCodeIdiomatic { final def load( idx: Int, tk: BType): Unit = { emitVarInsn(Opcodes.ILOAD, idx, tk) } // can-multi-thread final def store(idx: Int, tk: BType): Unit = { emitVarInsn(Opcodes.ISTORE, idx, tk) } // can-multi-thread + final def iinc( idx: Int, increment: Int): Unit = jmethod.visitIincInsn(idx, increment) // can-multi-thread final def aload( tk: BType): Unit = { emitTypeBased(JCodeMethodN.aloadOpcodes, tk) } // can-multi-thread final def astore(tk: BType): Unit = { emitTypeBased(JCodeMethodN.astoreOpcodes, tk) } // can-multi-thread diff --git a/test/files/jvm/iinc.check b/test/files/jvm/iinc.check new file mode 100644 index 000000000000..3538a07f8587 --- /dev/null +++ b/test/files/jvm/iinc.check @@ -0,0 +1,18 @@ +def increment + iinc 1 + iinc 54 + iinc 127 + iinc -1 + iinc -54 + iinc -128 +end increment +def wideIncrement + iinc 128 + iinc 8765 + iinc 32767 + iinc -129 + iinc -8765 + iinc -32768 +end wideIncrement +def tooBigForIinc +end tooBigForIinc diff --git a/test/files/jvm/iinc/Increment_1.scala b/test/files/jvm/iinc/Increment_1.scala new file mode 100644 index 000000000000..03251016bfb3 --- /dev/null +++ b/test/files/jvm/iinc/Increment_1.scala @@ -0,0 +1,37 @@ +class Increment { + + // `iinc` + def increment(x: Int): Int = { + var i = x + i += 1 + i += 54 + i += 127 + i -= 1 + i -= 54 + i -= 128 + i + } + + // `wide iinc` + def wideIncrement(x: Int): Int = { + var i = x + i += 128 + i += 8765 + i += 32767 + i -= 129 + i -= 8765 + i -= 32768 + i + } + + def tooBigForIinc(x: Int): Int = { + var i = x + i += 32768 + i += 56789 + i += 2147483647 + i -= 32769 + i -= 56789 + i -= 2147483647 + i + } +} diff --git a/test/files/jvm/iinc/test.scala b/test/files/jvm/iinc/test.scala new file mode 100644 index 000000000000..4743fb1000af --- /dev/null +++ b/test/files/jvm/iinc/test.scala @@ -0,0 +1,17 @@ +import scala.tools.partest.BytecodeTest + +import scala.tools.asm.tree.IincInsnNode + +object Test extends BytecodeTest { + def show: Unit = { + val classNode = loadClassNode("Increment") + for (name <- List("increment", "wideIncrement", "tooBigForIinc")) { + println(s"def $name") + getMethod(classNode, name).instructions.toArray().collect { + case insn: IincInsnNode => println(s" iinc ${insn.incr}") + } + println(s"end $name") + } + } +} + diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerTest.scala index d927107df8b7..388660a1bdd7 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerTest.scala @@ -1865,14 +1865,14 @@ class InlinerTest extends BytecodeTesting { ALOAD, ARRAYLENGTH, ISTORE, ICONST_0, ISTORE, // get length, init loop counter -1 /*8*/, ILOAD, ILOAD, IF_ICMPGE /*25*/, // check loop condition ALOAD, ILOAD, IALOAD, ISTORE, ALOAD, ILOAD, "consume", // load element, store into local, call body - ILOAD, ICONST_1, IADD, ISTORE, GOTO /*8*/, // increase loop counter, jump - -1 /*25*/, RETURN)) + IINC, GOTO /*7*/, // increase loop counter, jump + -1 /*26*/, RETURN)) assertSameSummary(getMethod(c, "t2"), List( ALOAD, ARRAYLENGTH, ISTORE, ICONST_0, ISTORE, -1 /*8*/, ILOAD, ILOAD, IF_ICMPGE /*24*/, ALOAD, ILOAD, AALOAD, "trim", POP, - ILOAD, ICONST_1, IADD, ISTORE, GOTO /*8*/, + IINC, GOTO /*8*/, -1 /*24*/, RETURN) ) } @@ -1891,14 +1891,14 @@ class InlinerTest extends BytecodeTesting { -1 /*14*/, ILOAD, ILOAD, IF_ICMPGE /*39*/, // loop condition ALOAD, ILOAD, IALOAD, ICONST_1, IADD, ISTORE, // compute element ALOAD, ILOAD, ILOAD, IASTORE, // store element - ILOAD, ICONST_1, IADD, ISTORE, GOTO /*14*/, // increase counter, jump - -1 /*39*/, ALOAD, ARETURN) + IINC, GOTO /*22*/, // increase counter, jump + -1 /*44*/, ALOAD, ARETURN) ) assertSameSummary(getMethod(c, "t2"), List( ALOAD, ARRAYLENGTH, ISTORE, ILOAD, ANEWARRAY, ASTORE, ILOAD, ICONST_0, IF_ICMPLE /*38*/, ICONST_0, ISTORE, // init new array, loop counter -1 /*15*/, ILOAD, ILOAD, IF_ICMPGE /*38*/, // loop condition ALOAD, ILOAD, AALOAD, "trim", ASTORE, ALOAD, ACONST_NULL, ASTORE, ASTORE, ALOAD, ILOAD, ALOAD, AASTORE, ACONST_NULL, ASTORE, // compute and store element - ILOAD, ICONST_1, IADD, ISTORE, GOTO /*15*/, // increase counter, jump + IINC, GOTO /*15*/, // increase counter, jump -1 /*38*/, ALOAD, ARETURN) ) } From 8f623f7746539435d2a77f791de121c742f3ac1e Mon Sep 17 00:00:00 2001 From: Martijn Hoekstra Date: Sun, 1 Aug 2021 12:20:40 +0200 Subject: [PATCH 328/769] Advice 1:1 replacement Yes, Option.get is bad and you should feel bad. But the deprecation warning should not give you such lessons, but just point to the replacement at hand. --- src/library/scala/util/Either.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/library/scala/util/Either.scala b/src/library/scala/util/Either.scala index df3a36f7dcf2..34bdc1cc572c 100644 --- a/src/library/scala/util/Either.scala +++ b/src/library/scala/util/Either.scala @@ -690,7 +690,7 @@ object Either { * * @throws java.util.NoSuchElementException if the projection is `Left`. */ - @deprecated("Use `Either.getOrElse` instead", "2.13.0") + @deprecated("Use `Either.toOption.get` instead", "2.13.0") def get: B = e match { case Right(b) => b case _ => throw new NoSuchElementException("Either.right.get on Left") From cf4978249055c2429056882a8d23649b17b68e7b Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Sun, 1 Aug 2021 13:12:28 -0700 Subject: [PATCH 329/769] Error on bad unapplySeq type In future, unapplySeq returning a Seq directly will be OK. For now, avoid confusion. --- .../transform/patmat/PatternExpansion.scala | 24 +++++++++++-------- test/files/neg/t8127a.check | 2 +- test/files/neg/t8127a.scala | 7 ++++-- test/files/neg/t9538.check | 13 ++++++++++ test/files/neg/t9538.scala | 13 ++++++++++ 5 files changed, 46 insertions(+), 13 deletions(-) create mode 100644 test/files/neg/t9538.check create mode 100644 test/files/neg/t9538.scala diff --git a/src/compiler/scala/tools/nsc/transform/patmat/PatternExpansion.scala b/src/compiler/scala/tools/nsc/transform/patmat/PatternExpansion.scala index 58e35abbfd1d..de10983e95a4 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/PatternExpansion.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/PatternExpansion.scala @@ -16,9 +16,10 @@ package nsc package transform package patmat -import scala.tools.nsc.typechecker.Contexts import scala.reflect.internal.util import scala.tools.nsc.Reporting.WarningCategory +import scala.tools.nsc.typechecker.Contexts +import scala.util.chaining._ /** An 'extractor' can be a case class or an unapply or unapplySeq method. * @@ -205,15 +206,17 @@ trait PatternExpansion { if (isUnapply || equivConstrParamTypes.isEmpty) notRepeated else { val lastParamTp = equivConstrParamTypes.last - if (isUnapplySeq) { - val elementTp = elementTypeFromApply(lastParamTp) - (elementTp, scalaRepeatedType(elementTp)) - } else { + if (isUnapplySeq) + elementTypeFromApply(lastParamTp) match { + case NoType => notRepeated.tap(_ => + err(s"${unapplyResultType()} is not a valid result type of an unapplySeq method of an extractor.")) + case elementTp => (elementTp, scalaRepeatedType(elementTp)) + } + else definitions.elementType(RepeatedParamClass, lastParamTp) match { - case NoType => notRepeated + case NoType => notRepeated case elementTp => (elementTp, lastParamTp) } - } } // errors & warnings @@ -248,10 +251,11 @@ trait PatternExpansion { // emit error/warning on mismatch if (isStar && !isSeq) err("Star pattern must correspond with varargs or unapplySeq") - else if (equivConstrParamTypes == List(NoType) && unapplyResultType().isNothing) - err(s"${fun.symbol.owner} can't be used as an extractor: The result type of an ${fun.symbol.name} method may not be Nothing") else if (equivConstrParamTypes == List(NoType)) - err(s"The result type of an ${fun.symbol.name} method must contain a member `get` to be used as an extractor pattern, no such member exists in ${unapplyResultType()}") + if (unapplyResultType().isNothing) + err(s"${fun.symbol.owner} can't be used as an extractor: The result type of an ${fun.symbol.name} method may not be Nothing") + else + err(s"The result type of an ${fun.symbol.name} method must contain a member `get` to be used as an extractor pattern, no such member exists in ${unapplyResultType()}") else if (elementArity < 0) arityError("not enough") else if (elementArity > 0 && !isSeq) arityError("too many") else if (settings.warnStarsAlign && isSeq && productArity > 0 && elementArity > 0) warn( diff --git a/test/files/neg/t8127a.check b/test/files/neg/t8127a.check index 4518affe0ae3..764ab5310ff8 100644 --- a/test/files/neg/t8127a.check +++ b/test/files/neg/t8127a.check @@ -1,4 +1,4 @@ -t8127a.scala:7: error: The result type of an unapplySeq method must contain a member `get` to be used as an extractor pattern, no such member exists in Seq[Any] +t8127a.scala:7: error: Seq[Any] is not a valid result type of an unapplySeq method of an extractor. case H(v) => ^ 1 error diff --git a/test/files/neg/t8127a.scala b/test/files/neg/t8127a.scala index c05facdac1c4..e1bd1559667a 100644 --- a/test/files/neg/t8127a.scala +++ b/test/files/neg/t8127a.scala @@ -7,6 +7,9 @@ object Test { case H(v) => case _ => } - // now: too many patterns for object H offering Boolean: expected 0, found 1 - // was: result type Seq[_$2] of unapplySeq defined in method unapplySeq in object H does not conform to Option[_] } + // later: OK + // then: Seq[Any] is not a valid result type of an unapplySeq method of an extractor. + // and: The result type of an unapplySeq method must contain a member `get` to be used as an extractor pattern, no such member exists in Seq[Any] + // now: too many patterns for object H offering Boolean: expected 0, found 1 + // was: result type Seq[_$2] of unapplySeq defined in method unapplySeq in object H does not conform to Option[_] diff --git a/test/files/neg/t9538.check b/test/files/neg/t9538.check new file mode 100644 index 000000000000..17458daf5d32 --- /dev/null +++ b/test/files/neg/t9538.check @@ -0,0 +1,13 @@ +t9538.scala:9: error: Option[String] is not a valid result type of an unapplySeq method of an extractor. + def f(x: Any) = x match { case X(y, z) => } + ^ +t9538.scala:10: error: Option[(Int, Int, Int)] is not a valid result type of an unapplySeq method of an extractor. + def g0(x: Any) = x match { case Y() => } + ^ +t9538.scala:11: error: Option[(Int, Int, Int)] is not a valid result type of an unapplySeq method of an extractor. + def g1(x: Any) = x match { case Y(y) => } + ^ +t9538.scala:12: error: Option[(Int, Int, Int)] is not a valid result type of an unapplySeq method of an extractor. + def g2(x: Any) = x match { case Y(y,z) => } + ^ +4 errors diff --git a/test/files/neg/t9538.scala b/test/files/neg/t9538.scala new file mode 100644 index 000000000000..f64ef9552dd8 --- /dev/null +++ b/test/files/neg/t9538.scala @@ -0,0 +1,13 @@ + + + +object X { def unapplySeq(x: Any): Option[String] = { Some(x.toString.toUpperCase) }} + +object Y { def unapplySeq(v: Any) = Option((1, 2, 3)) } + +object Test extends App { + def f(x: Any) = x match { case X(y, z) => } + def g0(x: Any) = x match { case Y() => } + def g1(x: Any) = x match { case Y(y) => } + def g2(x: Any) = x match { case Y(y,z) => } +} From f1eca7b188a462a793ae84b5e9545cc236740687 Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Wed, 28 Jul 2021 17:03:10 +0200 Subject: [PATCH 330/769] fix scala/bug#12420: complete LambdaType param symbols lazily --- .../scala/tools/nsc/tasty/TreeUnpickler.scala | 62 ++-- .../tools/nsc/tasty/bridge/ContextOps.scala | 53 ++- .../tools/nsc/tasty/bridge/FlagOps.scala | 11 +- .../tools/nsc/tasty/bridge/TastyCore.scala | 1 - .../tools/nsc/tasty/bridge/TypeOps.scala | 346 +++++++++++------- .../files/run/tasty-lambdatype-strawman.check | 3 + .../files/run/tasty-lambdatype-strawman.scala | 168 +++++++++ .../src-2/dottyi3149/TestFooChildren.scala | 2 +- .../run/src-2/tastytest/TestIssue12420.scala | 19 + .../tastytest/issue12420/ShareLambda.scala | 14 + .../src-3/tastytest/issue12420/absurd.scala | 10 + .../src-3/tastytest/issue12420/hasId.scala | 15 + 12 files changed, 523 insertions(+), 181 deletions(-) create mode 100644 test/files/run/tasty-lambdatype-strawman.check create mode 100644 test/files/run/tasty-lambdatype-strawman.scala create mode 100644 test/tasty/run/src-2/tastytest/TestIssue12420.scala create mode 100644 test/tasty/run/src-3/tastytest/issue12420/ShareLambda.scala create mode 100644 test/tasty/run/src-3/tastytest/issue12420/absurd.scala create mode 100644 test/tasty/run/src-3/tastytest/issue12420/hasId.scala diff --git a/src/compiler/scala/tools/nsc/tasty/TreeUnpickler.scala b/src/compiler/scala/tools/nsc/tasty/TreeUnpickler.scala index 0de4fdaa1bd0..bab7e789ddfa 100644 --- a/src/compiler/scala/tools/nsc/tasty/TreeUnpickler.scala +++ b/src/compiler/scala/tools/nsc/tasty/TreeUnpickler.scala @@ -21,6 +21,7 @@ import scala.collection.mutable import scala.reflect.io.AbstractFile import scala.reflect.internal.Variance import scala.util.chaining._ +import scala.collection.immutable.ArraySeq /**`TreeUnpickler` is responsible for traversing all trees in the "ASTs" section of a TASTy file, which represent the * definitions inside the classfile associated with the root class/module. `TreeUnpickler` will enter the public api @@ -220,7 +221,7 @@ class TreeUnpickler[Tasty <: TastyUniverse]( /** Read names in an interleaved sequence of types/bounds and (parameter) names, * possibly followed by a sequence of modifiers. */ - def readParamNamesAndMods(end: Addr): (List[TastyName], TastyFlagSet) = { + def readParamNamesAndMods(end: Addr): (ArraySeq[TastyName], TastyFlagSet) = { val names = collectWhile(currentAddr != end && !isModifierTag(nextByte)) { skipTree() @@ -234,17 +235,23 @@ class TreeUnpickler[Tasty <: TastyUniverse]( case GIVEN => mods |= Given } } - (names, mods) + (names.to(ArraySeq), mods) } /** Read `n` parameter types or bounds which are interleaved with names */ - def readParamTypes[T <: Type](n: Int)(implicit ctx: Context): List[T] = { - if (n == 0) Nil - else { - val t = readType().asInstanceOf[T] - readNat() // skip name - t :: readParamTypes(n - 1) + def readParamTypes(ps: ArraySeq[Symbol])(implicit ctx: Context): ArraySeq[Type] = { + def inner(ps1: Iterator[Symbol], buf: mutable.ArrayBuffer[Type]): ArraySeq[Type] = { + if (ps1.isEmpty) buf.to(ArraySeq) + else { + val p = ps1.next() + val rest = ps1 + val localCtx = ctx.withOwner(p) + val t = readType()(localCtx) + readNat() // skip name + inner(rest, buf += t) + } } + inner(ps.iterator, new mutable.ArrayBuffer) } /** Read reference to definition and return symbol created at that definition */ @@ -332,18 +339,27 @@ class TreeUnpickler[Tasty <: TastyUniverse]( def readLengthType(): Type = { val end = readEnd() - def readMethodic[N <: TastyName] - (companionOp: TastyFlagSet => LambdaTypeCompanion[N], nameMap: TastyName => N)(implicit ctx: Context): Type = { + def readMethodic[N <: TastyName]( + factory: LambdaFactory[N], + parseFlags: FlagSets.FlagParser, + nameMap: TastyName => N + )(implicit ctx: Context): Type = { val result = typeAtAddr.getOrElse(start, { + // TODO [tasty]: can we share LambdaTypes/RecType/RefinedType safely + // under a new context owner? (aka when referenced by a `SHAREDtype`). + // So far this has been safe to do, but perhaps with macros comparing the + // owners of the symbols of PolyTypes maybe not? + // one concrete example where TypeLambdaType is shared between two unrelated classes: + // - test/tasty/run/src-3/tastytest/issue12420/ShareLambda.scala val nameReader = fork nameReader.skipTree() // skip result val paramReader = nameReader.fork val (paramNames, mods) = nameReader.readParamNamesAndMods(end) - companionOp(mods)(paramNames.map(nameMap))( - pt => typeAtAddr(start) = pt, - () => paramReader.readParamTypes(paramNames.length), - () => readType() - ).tap(typeAtAddr(start) = _) + LambdaFactory.parse(factory, paramNames.map(nameMap), parseFlags(mods)(ctx))( + ps => paramReader.readParamTypes(ps), + () => readType(), + pt => typeAtAddr(start) = pt, // register the lambda so that we can access its parameters + ) }) goto(end) result @@ -382,18 +398,10 @@ class TreeUnpickler[Tasty <: TastyUniverse]( case ORtype => unionIsUnsupported case SUPERtype => defn.SuperType(readType(), readType()) case MATCHtype | MATCHCASEtype => matchTypeIsUnsupported - case POLYtype => readMethodic(Function.const(PolyType), _.toTypeName) - case METHODtype => - def companion(mods0: TastyFlagSet) = { - var mods = EmptyTastyFlags - if (mods0.is(Erased)) erasedRefinementIsUnsupported[Unit] - if (mods0.isOneOf(Given | Implicit)) mods |= Implicit - methodTypeCompanion(mods) - } - readMethodic(companion, id) - case TYPELAMBDAtype => readMethodic(Function.const(HKTypeLambda), _.toTypeName) - case PARAMtype => // reference to a type parameter within a LambdaType - readTypeRef().typeParams(readNat()).ref + case POLYtype => readMethodic(PolyTypeLambda, FlagSets.addDeferred, _.toTypeName) + case METHODtype => readMethodic(MethodTermLambda, FlagSets.parseMethod, id) + case TYPELAMBDAtype => readMethodic(HKTypeLambda, FlagSets.addDeferred, _.toTypeName) + case PARAMtype => defn.ParamRef(readTypeRef(), readNat()) // reference to a parameter within a LambdaType } assert(currentAddr === end, s"$start $currentAddr $end ${astTagToString(tag)}") result diff --git a/src/compiler/scala/tools/nsc/tasty/bridge/ContextOps.scala b/src/compiler/scala/tools/nsc/tasty/bridge/ContextOps.scala index 9abd7099169d..a9a263cee470 100644 --- a/src/compiler/scala/tools/nsc/tasty/bridge/ContextOps.scala +++ b/src/compiler/scala/tools/nsc/tasty/bridge/ContextOps.scala @@ -156,6 +156,11 @@ trait ContextOps { self: TastyUniverse => final case class TraceInfo[-T](query: String, qual: String, res: T => String, modifiers: List[String] = Nil) + trait TraceFrame { + def parent: TraceFrame + def id: String + } + /**Maintains state through traversal of a TASTy file, such as the outer scope of the defintion being traversed, the * traversal mode, and the root owners and source path for the TASTy file. * It also provides all operations for manipulation of the symbol table, such as creating/updating symbols and @@ -205,17 +210,17 @@ trait ContextOps { self: TastyUniverse => @inline final def trace[T](info: => TraceInfo[T])(op: => T): T = { - def withTrace(info: => TraceInfo[T], op: => T)(traceId: String): T = { - val i = info + def addInfo(i: TraceInfo[T], op: => T)(frame: TraceFrame): T = { + val id0 = frame.id val modStr = ( if (i.modifiers.isEmpty) "" else " " + green(i.modifiers.mkString("[", ",", "]")) ) - logImpl(s"${yellow(s"$traceId")} ${cyan(s"<<< ${i.query}:")} ${magenta(i.qual)}$modStr") - op.tap(eval => logImpl(s"${yellow(s"$traceId")} ${cyan(s">>>")} ${magenta(i.res(eval))}$modStr")) + logImpl(s"${yellow(id0)} ${cyan(s"<<< ${i.query}:")} ${magenta(i.qual)}$modStr") + op.tap(eval => logImpl(s"${yellow(id0)} ${cyan(s">>>")} ${magenta(i.res(eval))}$modStr")) } - if (u.settings.YdebugTasty) initialContext.addFrame(withTrace(info, op)) + if (u.settings.YdebugTasty) initialContext.subTrace(addInfo(info, op)) else op } @@ -282,6 +287,16 @@ trait ContextOps { self: TastyUniverse => ) } + final def newLambdaParameter(tname: TastyName, flags: TastyFlagSet, idx: Int, infoDb: Int => Type): Symbol = { + val flags1 = flags | Param + unsafeNewSymbol( + owner = owner, + name = tname, + flags = flags1, + info = defn.LambdaParamInfo(flags1, idx, infoDb) + ) + } + final def findRootSymbol(roots: Set[Symbol], name: TastyName): Option[Symbol] = { import TastyName.TypeName @@ -594,32 +609,32 @@ trait ContextOps { self: TastyUniverse => def mode: TastyMode = EmptyTastyMode def owner: Symbol = topLevelClass.owner - private class TraceFrame(val id: Int, val next: TraceFrame) { + private class TraceFrameImpl(val worker: Int, val parent: TraceFrameImpl) extends TraceFrame { var nextChild: Int = 0 - def show: String = { - val buf = mutable.ArrayDeque.empty[String] + val id: String = { + val buf = mutable.ArrayDeque.empty[Int] var cur = this - while (cur.id != -1) { - buf.prepend(cur.id.toString) - cur = cur.next + while (cur.worker != -1) { + buf.prepend(cur.worker) + cur = cur.parent } buf.mkString("[", " ", ")") } } - private[this] var _trace: TraceFrame = new TraceFrame(id = -1, next = null) + private[this] var _trace: TraceFrameImpl = new TraceFrameImpl(worker = -1, parent = null) - private[ContextOps] def addFrame[T](op: String => T): T = { - val oldFrame = _trace - val newFrame = new TraceFrame(id = oldFrame.nextChild, next = oldFrame) - _trace = newFrame - try op(newFrame.show) + private[ContextOps] def subTrace[T](op: TraceFrame => T): T = { + val parent = _trace + val child = new TraceFrameImpl(worker = parent.nextChild, parent) + _trace = child + try op(child) finally { - _trace = oldFrame - _trace.nextChild += 1 + parent.nextChild += 1 + _trace = parent } } diff --git a/src/compiler/scala/tools/nsc/tasty/bridge/FlagOps.scala b/src/compiler/scala/tools/nsc/tasty/bridge/FlagOps.scala index cc49e5131a71..b7894f726465 100644 --- a/src/compiler/scala/tools/nsc/tasty/bridge/FlagOps.scala +++ b/src/compiler/scala/tools/nsc/tasty/bridge/FlagOps.scala @@ -29,11 +29,20 @@ trait FlagOps { self: TastyUniverse => | Enum | Infix | Open | ParamAlias | Invisible ) + type FlagParser = TastyFlagSet => Context => TastyFlagSet + + val addDeferred: FlagParser = flags => _ => flags | Deferred + val parseMethod: FlagParser = { mods0 => implicit ctx => + var mods = EmptyTastyFlags + if (mods0.is(Erased)) erasedRefinementIsUnsupported[Unit] + if (mods0.isOneOf(Given | Implicit)) mods |= Implicit + mods + } + object Creation { val ObjectDef: TastyFlagSet = Object | Lazy | Final | Stable val ObjectClassDef: TastyFlagSet = Object | Final val Default: u.FlagSet = newSymbolFlagSet(EmptyTastyFlags) - val BoundedType: u.FlagSet = newSymbolFlagSet(Deferred) } def withAccess(flags: TastyFlagSet, inheritedAccess: TastyFlagSet): TastyFlagSet = flags | (inheritedAccess & (Private | Local | Protected)) diff --git a/src/compiler/scala/tools/nsc/tasty/bridge/TastyCore.scala b/src/compiler/scala/tools/nsc/tasty/bridge/TastyCore.scala index 6af38a66246d..01ca7a60fffe 100644 --- a/src/compiler/scala/tools/nsc/tasty/bridge/TastyCore.scala +++ b/src/compiler/scala/tools/nsc/tasty/bridge/TastyCore.scala @@ -34,6 +34,5 @@ abstract class TastyCore { self: TastyUniverse => private val Identity = (x: Any) => x def id[T]: T => T = Identity.asInstanceOf[T => T] - def map[T, U](ts: List[T], f: T => U): List[U] = if (f `eq` Identity) ts.asInstanceOf[List[U]] else ts.map(f) } diff --git a/src/compiler/scala/tools/nsc/tasty/bridge/TypeOps.scala b/src/compiler/scala/tools/nsc/tasty/bridge/TypeOps.scala index dcddcbdc0d0f..a6145a026ccf 100644 --- a/src/compiler/scala/tools/nsc/tasty/bridge/TypeOps.scala +++ b/src/compiler/scala/tools/nsc/tasty/bridge/TypeOps.scala @@ -20,6 +20,8 @@ import scala.reflect.internal.Variance import scala.util.chaining._ import scala.collection.mutable +import scala.collection.immutable.ArraySeq + import scala.reflect.internal.Flags /**This layer adds factories that construct `scala.reflect` Types in the shapes that TASTy expects. @@ -169,6 +171,13 @@ trait TypeOps { self: TastyUniverse => private[bridge] def LocalSealedChildProxyInfo(parent: Symbol, tflags: TastyFlagSet)(implicit ctx: Context): Type = new LocalSealedChildProxyCompleter(parent, tflags) + private[bridge] def LambdaParamInfo( + tflags: TastyFlagSet, + idx: Int, + infoDb: Int => Type + )(implicit ctx: Context): Type = + new LambdaParamCompleter(tflags, idx, infoDb) + def OpaqueTypeToBounds(tpe: Type): (Type, Type) = tpe match { case u.PolyType(tparams, tpe) => val (bounds, alias) = OpaqueTypeToBounds(tpe) @@ -271,6 +280,10 @@ trait TypeOps { self: TastyUniverse => } } + + def ParamRef(binder: Type, idx: Int): Type = + binder.asInstanceOf[LambdaType].lambdaParams(idx).ref + } private[bridge] def mkRefinedTypeWith(parents: List[Type], clazz: Symbol, decls: u.Scope): Type = @@ -507,6 +520,16 @@ trait TypeOps { self: TastyUniverse => } } + private[TypeOps] final class LambdaParamCompleter( + flags: TastyFlagSet, + idx: Int, + infoDb: Int => Type, + )(implicit ctx: Context) + extends BaseTastyCompleter(flags) { + override def computeInfo(denot: Symbol)(implicit ctx: Context): Unit = + denot.info = infoDb(idx) + } + abstract class BaseTastyCompleter( final val tflags: TastyFlagSet )(implicit capturedCtx: Context) @@ -562,13 +585,6 @@ trait TypeOps { self: TastyUniverse => case res => res } - abstract class LambdaTypeCompanion[N <: TastyName] { - def factory(params: List[N])(registerCallback: Type => Unit, paramInfosOp: () => List[Type], resultTypeOp: () => Type)(implicit ctx: Context): LambdaType - - final def apply(params: List[N])(registerCallback: Type => Unit, paramInfosOp: () => List[Type], resultTypeOp: () => Type)(implicit ctx: Context): Type = - factory(params)(registerCallback, paramInfosOp, resultTypeOp).canonical - } - final class LambdaPolyType(typeParams: List[Symbol], val resType: Type) extends u.PolyType(typeParams, LambdaPolyType.addLower(resType)) { def toNested: u.PolyType = resType match { case _: u.TypeBounds => this @@ -604,189 +620,255 @@ trait TypeOps { self: TastyUniverse => if (sym.name == u.nme.CONSTRUCTOR) sym.owner.tpe else givenTp - private[TypeOps] type LambdaType = Type with Lambda - private[TypeOps] type TypeLambda = LambdaType with TypeLike - private[TypeOps] type TermLambda = LambdaType with TermLike + /** Lazy thread unsafe non-nullable value that can not be re-entered */ + private[bridge] final class SyncRef[A](private var compute: () => A) { + private var out: A = _ + private var entered: Boolean = false - private[TypeOps] trait TypeLike { self: Type with Lambda => - type ThisTName = TastyName.TypeName - type ThisName = u.TypeName - } - - private[TypeOps] trait TermLike { self: Type with Lambda => - type ThisTName = TastyName - type ThisName = u.TermName - type PInfo = Type + def apply(): A = { + if (entered) { + assert(out != null, "cyclic completion of SyncRef") + } + else { + entered = true + val result = compute() + compute = null + assert(result != null, "SyncRef is non-nullable") + out = result + } + out + } } - private[TypeOps] trait Lambda extends Product with Serializable { self: Type => - type ThisTName <: TastyName - type ThisName <: u.Name - type This <: Type + object MethodTermLambda extends TermLambdaFactory { - val paramNames: List[ThisName] - val paramInfos: List[Type] - val resType: Type + type ThisLambda = MethodTermLambda - def typeParams: List[Symbol] // deferred to final implementation - - final protected def validateThisLambda(): Unit = { - assert(resType.isComplete, self) - assert(paramInfos.length == paramNames.length, self) + protected def apply( + params: ArraySeq[TastyName], + flags: TastyFlagSet, + paramInfosOp: ArraySeq[Symbol] => ArraySeq[Type], + resultTypeOp: () => Type, + registerCallback: Type => Unit, + )(implicit ctx: Context): ThisLambda = { + new MethodTermLambda(params, paramInfosOp, resultTypeOp, flags, registerCallback) } - override final def productArity: Int = 2 - - override final def productElement(n: Int): Any = n match { - case 0 => paramNames - case 1 => resType - case _ => throw new IndexOutOfBoundsException(n.toString) - } + } - def canEqual(that: Any): Boolean = that.isInstanceOf[Lambda] + private[TypeOps] final class MethodTermLambda( + paramTNames: ArraySeq[TastyName], + paramInfosOp: ArraySeq[Symbol] => ArraySeq[Type], + resultTypeOp: () => Type, + flags: TastyFlagSet, + registerCallback: Type => Unit, + )(implicit ctx: Context) + extends TermLambda("MethodTermLambda")(paramTNames, paramInfosOp, resultTypeOp, flags)(registerCallback) { - def canonical: This + protected def canonical(ps: List[Symbol], res: Type): Type = u.MethodType(ps, res) - override final def equals(that: Any): Boolean = that match { - case that: Lambda => - (that.canEqual(self) - && that.paramNames == paramNames - && that.resType == resType) - case _ => false - } + override def canEqual(that: Any): Boolean = that.isInstanceOf[MethodTermLambda] } - object HKTypeLambda extends TypeLambdaCompanion { - def factory(params: List[TastyName.TypeName])(registerCallback: Type => Unit, - paramInfosOp: () => List[Type], resultTypeOp: () => Type)(implicit ctx: Context): LambdaType = - new HKTypeLambda(params)(registerCallback, paramInfosOp, resultTypeOp) - } + object HKTypeLambda extends TypeLambdaFactory { - object PolyType extends TypeLambdaCompanion { - def factory(params: List[TastyName.TypeName])(registerCallback: Type => Unit, - paramInfosOp: () => List[Type], resultTypeOp: () => Type)(implicit ctx: Context): LambdaType = - new PolyTypeLambda(params)(registerCallback, paramInfosOp, resultTypeOp) - } + type ThisLambda = HKTypeLambda - final class MethodTypeCompanion(defaultFlags: TastyFlagSet) extends TermLambdaCompanion { self => - def factory(params: List[TastyName])(registerCallback: Type => Unit, - paramInfosOp: () => List[Type], resultTypeOp: () => Type)(implicit ctx: Context): LambdaType = - new MethodTermLambda(params, defaultFlags)(registerCallback, paramInfosOp, resultTypeOp) + protected def apply( + params: ArraySeq[TastyName.TypeName], + flags: TastyFlagSet, + paramInfosOp: ArraySeq[Symbol] => ArraySeq[Type], + resultTypeOp: () => Type, + registerCallback: Type => Unit, + )(implicit ctx: Context): ThisLambda = { + new HKTypeLambda(params, flags, paramInfosOp, resultTypeOp, registerCallback) + } } - def recThis(tpe: Type): Type = tpe.asInstanceOf[RecType].recThis - def symOfTypeRef(tpe: Type): Symbol = tpe.asInstanceOf[u.TypeRef].sym + private[TypeOps] final class HKTypeLambda( + paramTNames: ArraySeq[TastyName.TypeName], + flags: TastyFlagSet, + paramInfosOp: ArraySeq[Symbol] => ArraySeq[Type], + resultTypeOp: () => Type, + registerCallback: Type => Unit + )(implicit ctx: Context) + extends TypeLambda("HKTypeLambda")(paramTNames, flags, paramInfosOp, resultTypeOp)(registerCallback) { - private[TypeOps] final class RecType(run: RecType => Type)(implicit ctx: Context) extends Type with Product { + final override protected def normaliseResult(resType: Type): Type = lambdaResultType(resType) - override val productPrefix = "RecType" - override val productArity = 2 + protected def canonical(ps: List[Symbol], res: Type): Type = new LambdaPolyType(ps, res) - val refinementClass = ctx.newRefinementClassSymbol - val recThis: Type = u.ThisType(refinementClass) - val parent: Type = run(this) + override def canEqual(that: Any): Boolean = that.isInstanceOf[HKTypeLambda] + } - def canEqual(that: Any): Boolean = that.isInstanceOf[RecType] - def productElement(n: Int): Any = n match { - case 0 => if (parent == null) "" else parent - case 1 => hashCode - case _ => throw new IndexOutOfBoundsException(n.toString) - } + object PolyTypeLambda extends TypeLambdaFactory { - override def equals(that: Any): Boolean = this eq that.asInstanceOf[AnyRef] - override def safeToString: String = s"RecType(rt @ $hashCode => ${if (parent == null) "" else parent})" + type ThisLambda = PolyTypeLambda + protected def apply( + params: ArraySeq[TastyName.TypeName], + flags: TastyFlagSet, + paramInfosOp: ArraySeq[Symbol] => ArraySeq[Type], + resultTypeOp: () => Type, + registerCallback: Type => Unit, + )(implicit ctx: Context): ThisLambda = { + new PolyTypeLambda(params, flags, paramInfosOp, resultTypeOp, registerCallback) + } } - def methodTypeCompanion(initialFlags: TastyFlagSet): MethodTypeCompanion = new MethodTypeCompanion(initialFlags) - - abstract class TermLambdaCompanion - extends LambdaTypeCompanion[TastyName] + private[TypeOps] final class PolyTypeLambda( + paramTNames: ArraySeq[TastyName.TypeName], + flags: TastyFlagSet, + paramInfosOp: ArraySeq[Symbol] => ArraySeq[Type], + resultTypeOp: () => Type, + registerCallback: Type => Unit + )(implicit ctx: Context) + extends TypeLambda("PolyTypeLambda")(paramTNames, flags, paramInfosOp, resultTypeOp)(registerCallback) { - abstract class TypeLambdaCompanion - extends LambdaTypeCompanion[TastyName.TypeName] + protected def canonical(ps: List[Symbol], res: Type): Type = u.PolyType(ps, res) - private[TypeOps] final class MethodTermLambda(paramTNames: List[TastyName], defaultFlags: TastyFlagSet)(registerCallback: MethodTermLambda => Unit, - paramInfosOp: () => List[Type], resultTypeOp: () => Type)(implicit ctx: Context) - extends Type with Lambda with TermLike { methodLambda => - type This = u.MethodType + override def canEqual(that: Any): Boolean = that.isInstanceOf[PolyTypeLambda] + } - val paramNames: List[u.TermName] = paramTNames.map(encodeTermName) + private[TypeOps] abstract class TypeLambda( + kind: String)( + paramTNames: ArraySeq[TastyName.TypeName], + flags: TastyFlagSet, + paramInfosOp: ArraySeq[Symbol] => ArraySeq[Type], + resultTypeOp: () => Type)( + registerCallback: Type => Unit + )(implicit ctx: Context) + extends LambdaType(kind)(paramTNames, paramInfosOp, resultTypeOp, flags)(registerCallback) { + final override def typeParams: List[Symbol] = lambdaParams.toList + final protected def normaliseParam(info: Type): Type = normaliseIfBounds(info) + } + + private[TypeOps] abstract class TermLambda( + kind: String)( + paramTNames: ArraySeq[TastyName], + paramInfosOp: ArraySeq[Symbol] => ArraySeq[Type], + resultTypeOp: () => Type, + flags: TastyFlagSet)( + registerCallback: Type => Unit + )(implicit ctx: Context) + extends LambdaType(kind)(paramTNames, paramInfosOp, resultTypeOp, flags)(registerCallback) { + final override def params: List[Symbol] = lambdaParams.toList + final protected def normaliseParam(info: Type): Type = info + } - override val productPrefix = "MethodTermLambda" + private[TypeOps] abstract class LambdaType( + kind: String)( + paramTNames: ArraySeq[TastyName], + paramInfosOp: ArraySeq[Symbol] => ArraySeq[Type], + resultTypeOp: () => Type, + flags: TastyFlagSet)( + registerCallback: Type => Unit + )(implicit ctx: Context) extends AbstractLambdaType(kind) { - registerCallback(this) + protected def normaliseParam(info: Type): Type + protected def normaliseResult(resType: Type): Type = resType - val paramInfos: List[Type] = paramInfosOp() + final val lambdaParams: ArraySeq[Symbol] = { + val paramInfoDb = new SyncRef(() => paramInfosOp(this.lambdaParams)) + def infoAt(idx: Int) = normaliseParam(paramInfoDb()(idx)) - override val params: List[Symbol] = paramNames.lazyZip(paramInfos).map { - case (name, argInfo) => - ctx.owner.newValueParameter(name, u.NoPosition, newSymbolFlagSet(defaultFlags)).setInfo(argInfo) + paramTNames.zipWithIndex.map { case (tname, idx) => + ctx.newLambdaParameter(tname, flags, idx, infoAt) + } } - val resType: Type = resultTypeOp() - - validateThisLambda() + registerCallback(this) - def canonical: u.MethodType = u.MethodType(params, resType) + final val resType: Type = normaliseResult(resultTypeOp()) - override def canEqual(that: Any): Boolean = that.isInstanceOf[MethodTermLambda] } - private[TypeOps] final class HKTypeLambda(paramTNames: List[TastyName.TypeName])(registerCallback: HKTypeLambda => Unit, - paramInfosOp: () => List[Type], resultTypeOp: () => Type)(implicit ctx: Context) - extends Type with Lambda with TypeLike { + private[TypeOps] abstract class AbstractLambdaType(override val productPrefix: String) + extends Type + with Product + with Serializable { - type This = LambdaPolyType - val paramNames: List[u.TypeName] = paramTNames.map(encodeTypeName) + def lambdaParams: ArraySeq[Symbol] + def resType: Type - override val productPrefix = "HKTypeLambda" + final override def etaExpand: Type = { + lambdaParams.foreach(_.info) // force locally + canonical(lambdaParams.toList, resType) + } - registerCallback(this) + protected def canonical(ps: List[Symbol], res: Type): Type - val paramInfos: List[Type] = paramInfosOp() + override final def productArity: Int = 2 - override val typeParams: List[Symbol] = paramNames.lazyZip(paramInfos).map { - case (name, bounds) => - val argInfo = normaliseIfBounds(bounds) - ctx.owner.newTypeParameter(name, u.NoPosition, FlagSets.Creation.BoundedType).setInfo(argInfo) + override final def productElement(n: Int): Any = n match { + case 0 => lambdaParams + case 1 => resType + case _ => throw new IndexOutOfBoundsException(n.toString) } - val resType: Type = lambdaResultType(resultTypeOp()) + override final def equals(that: Any): Boolean = that match { + case that: AbstractLambdaType => + (that.canEqual(self) + && that.lambdaParams == lambdaParams + && that.resType == resType) + case _ => false + } - validateThisLambda() + } - def canonical: LambdaPolyType = new LambdaPolyType(typeParams, resType) + abstract class LambdaFactory[N <: TastyName] { - override def canEqual(that: Any): Boolean = that.isInstanceOf[HKTypeLambda] - } + type ThisLambda <: LambdaType - private[TypeOps] final class PolyTypeLambda(paramTNames: List[TastyName.TypeName])(registerCallback: PolyTypeLambda => Unit, - paramInfosOp: () => List[Type], resultTypeOp: () => Type)(implicit ctx: Context) - extends Type with Lambda with TypeLike { + protected def apply( + params: ArraySeq[N], + flags: TastyFlagSet, + paramInfosOp: ArraySeq[Symbol] => ArraySeq[Type], + resultTypeOp: () => Type, + registerCallback: Type => Unit, + )(implicit ctx: Context): ThisLambda - type This = u.PolyType + } - val paramNames: List[u.TypeName] = paramTNames.map(encodeTypeName) + object LambdaFactory { + final def parse[N <: TastyName]( + factory: LambdaFactory[N], + params: ArraySeq[N], + flags: TastyFlagSet)( + paramInfosOp: ArraySeq[Symbol] => ArraySeq[Type], + resultTypeOp: () => Type, + registerCallback: Type => Unit, + )(implicit ctx: Context): Type = + factory(params, flags, paramInfosOp, resultTypeOp, registerCallback) + .etaExpand // turn the LambdaType into something the compiler understands + .tap(registerCallback) // we should replace the type at start as it has been expanded + } - override val productPrefix = "PolyTypeLambda" + abstract class TermLambdaFactory extends LambdaFactory[TastyName] + abstract class TypeLambdaFactory extends LambdaFactory[TastyName.TypeName] - registerCallback(this) + def recThis(tpe: Type): Type = tpe.asInstanceOf[RecType].recThis + def symOfTypeRef(tpe: Type): Symbol = tpe.asInstanceOf[u.TypeRef].sym - val paramInfos: List[Type] = paramInfosOp() + private[TypeOps] final class RecType(run: RecType => Type)(implicit ctx: Context) extends Type with Product { - override val typeParams: List[Symbol] = paramNames.lazyZip(paramInfos).map { - case (name, argInfo) => - ctx.owner.newTypeParameter(name, u.NoPosition, FlagSets.Creation.BoundedType).setInfo(argInfo) - } + override val productPrefix = "RecType" + override val productArity = 2 - val resType: Type = resultTypeOp() // potentially need to flatten? (probably not, happens in typer in dotty) + val refinementClass = ctx.newRefinementClassSymbol + val recThis: Type = u.ThisType(refinementClass) + val parent: Type = run(this) - validateThisLambda() + def canEqual(that: Any): Boolean = that.isInstanceOf[RecType] + def productElement(n: Int): Any = n match { + case 0 => if (parent == null) "" else parent + case 1 => hashCode + case _ => throw new IndexOutOfBoundsException(n.toString) + } - def canonical: u.PolyType = u.PolyType(typeParams, resType) + override def equals(that: Any): Boolean = this eq that.asInstanceOf[AnyRef] + override def safeToString: String = s"RecType(rt @ $hashCode => ${if (parent == null) "" else parent})" - override def canEqual(that: Any): Boolean = that.isInstanceOf[PolyTypeLambda] } } diff --git a/test/files/run/tasty-lambdatype-strawman.check b/test/files/run/tasty-lambdatype-strawman.check new file mode 100644 index 000000000000..8c9ed5c9770a --- /dev/null +++ b/test/files/run/tasty-lambdatype-strawman.check @@ -0,0 +1,3 @@ +PolyType([B => TypeBounds(NothingType, AppliedType(ParamRef(CC), [IntType])), CC => PolyType([_ => TypeBounds(NothingType, AnyType)], AnyType)], AppliedType(NamedRef(Bar), [ParamRef(B), ParamRef(CC)])) + +there was a cycle in creating Delta type constructor diff --git a/test/files/run/tasty-lambdatype-strawman.scala b/test/files/run/tasty-lambdatype-strawman.scala new file mode 100644 index 000000000000..67afd04a06e4 --- /dev/null +++ b/test/files/run/tasty-lambdatype-strawman.scala @@ -0,0 +1,168 @@ +import collection.immutable.ArraySeq + +object Test { + + def main(args: Array[String]): Unit = { + + val BarTypeConstructor = // [B <: CC[Int], CC[_]] => Bar[B, CC] + PolyType.from( + params = List( + "B" -> (hk => TypeBounds.upper(AppliedType(hk.ref(1), IntType :: Nil))), + "CC" -> (hk => PolyType.from(List("_" -> (_ => TypeBounds.upper(AnyType))), hk => AnyType)) + ), + res = hk => AppliedType(NamedRef("Bar"), hk.ref(0) :: hk.ref(1) :: Nil) + ) + + println(BarTypeConstructor.debug) + println() + + try { + val DeltaTypeConstructor = // [B <: CC[[I <: B] =>> Any], CC[_[_ <: B]]] =>> Delta[B, CC] + PolyType.from( + params = List( + "B" -> (hk => + TypeBounds.upper( + AppliedType( + tycon = hk.ref(1), + args = PolyType.from(List("I" -> (_ => TypeBounds.upper(hk.ref(0)))), _ => AnyType) :: Nil + ) + ) + ), + "CC" -> (hk => + PolyType.from( + params = List( + "_" -> (_ => + PolyType.from( + params = List( + "_" -> (_ => + // force a cyclic completion - this type is illegal in Dotty + // a completion would be needed here to check the bounds of `CC` + TypeBounds.upper({val ref = hk.ref(0); ref.underlying; ref}) + ) + ), + res = hk => AnyType + ) + ) + ), + res = hk => AnyType + ) + ) + ), + res = hk => AppliedType(NamedRef("Delta"), hk.ref(0) :: hk.ref(1) :: Nil) + ) + } catch { + case err: AssertionError => + assert(err.getMessage.contains("cyclic completion of SyncRef")) + println("there was a cycle in creating Delta type constructor") + } + } +} + +final class SyncRef[A](private var compute: () => A) { + private var out: A = _ + private var entered: Boolean = false + + def apply(): A = { + if (entered) { + assert(out != null, "cyclic completion of SyncRef") + } + else { + entered = true + val result = compute() + compute = null + assert(result != null, "SyncRef is non-nullable") + out = result + } + out + } +} + +sealed abstract class TypeOrCompleter { + def debug: String = this match { + case p: Product => s"${p.productPrefix}${ + def iter(it: Iterator[Any], s: String = "(", e: String = ")"): String = + it.map { + case t: Type => t.debug + case t: Iterable[u] => iter(t.iterator, s = "[", e = "]") + case a => a.toString + }.mkString(s, ", ", e) + val it = p.productIterator + if (!it.hasNext) "" else iter(it) + }" + case _ => toString + } +} + +abstract class Completer extends TypeOrCompleter { + def complete(sym: Symbol): Unit +} + +abstract class Type extends TypeOrCompleter { + def underlying: Type = this +} + +class Symbol(val name: String, private var myInfoOrCompleter: TypeOrCompleter) { self => + + def infoOrCompleter = myInfoOrCompleter + + def info_=(tp: Type): Unit = + myInfoOrCompleter = tp + + def info: Type = myInfoOrCompleter match { + case c: Completer => + c.complete(self) + info + case t: Type => t + } + + override def toString = s"$name => ${infoOrCompleter.debug}" + +} + +case class ParamRef(symbol: Symbol) extends Type { + override def underlying: Type = symbol.info + override def debug: String = s"ParamRef(${symbol.name})" +} + +case class PolyType(params: List[Symbol], resultType: Type) extends Type +case class AppliedType(tycon: Type, args: List[Type]) extends Type +case class TypeBounds(lo: Type, hi: Type) extends Type +object TypeBounds { + def upper(hi: Type) = TypeBounds(NothingType, hi) +} +case object IntType extends Type +case object AnyType extends Type +case object NothingType extends Type +case class NamedRef(fullname: String) extends Type + +object PolyType { + def from(params: List[(String, HKTypeLambda => Type)], res: HKTypeLambda => Type): PolyType = { + val (names, infos0) = params.to(ArraySeq).unzip + val infos = (hk: HKTypeLambda) => () => infos0.map { case op => op(hk) } + new HKTypeLambda(names, infos, res).underlying + } +} + +class HKTypeLambda(paramNames: ArraySeq[String], paramInfosOp: HKTypeLambda => () => ArraySeq[Type], resOp: HKTypeLambda => Type) { thisLambda => + + final val lambdaParams = { + val paramInfoDb = new SyncRef(paramInfosOp(thisLambda)) + paramNames.zipWithIndex.map { case (name, idx) => + new Symbol(name, new Completer { + def complete(sym: Symbol): Unit = { + sym.info = paramInfoDb()(idx) + } + }) + } + } + + final val resType = resOp(thisLambda) + + def ref(idx: Int): ParamRef = new ParamRef(lambdaParams(idx)) + + def underlying: PolyType = { + lambdaParams.foreach(_.info) + new PolyType(lambdaParams.toList, resType) + } + +} diff --git a/test/tasty/pos/src-2/dottyi3149/TestFooChildren.scala b/test/tasty/pos/src-2/dottyi3149/TestFooChildren.scala index d15b84dadfd2..4121d1d869da 100644 --- a/test/tasty/pos/src-2/dottyi3149/TestFooChildren.scala +++ b/test/tasty/pos/src-2/dottyi3149/TestFooChildren.scala @@ -6,7 +6,7 @@ import tastytest._ object TestFooChildren { compiletimeHasNestedChildren[Foo]( "dottyi3149.Foo.Bar", - // "dottyi3149.Foo.dottyi3149$Foo$$localSealedChildProxy", // workaround to represent "dottyi3149.Test.Bar$1", + "dottyi3149.Foo.dottyi3149$Foo$$localSealedChildProxy", // workaround to represent "dottyi3149.Test.Bar$1",k "dottyi3149.Test.O.Bar", "dottyi3149.Test.C.Bar" ) diff --git a/test/tasty/run/src-2/tastytest/TestIssue12420.scala b/test/tasty/run/src-2/tastytest/TestIssue12420.scala new file mode 100644 index 000000000000..d420527b6148 --- /dev/null +++ b/test/tasty/run/src-2/tastytest/TestIssue12420.scala @@ -0,0 +1,19 @@ +package tastytest + +import issue12420._ +import issue12420.{ShareLambda => sl} + +object TestIssue12420 extends Suite("TestIssue12420") { + + def foo = new Foo + def eta = new Eta + + test(assert(foo.bar.id.id == "Foo")) + + test(foo.bar match { case User(UserId(id: String)) => assert(id == "Foo") }) + + test(assert(eta.inner == Boxxy.default)) + + test(assert(new sl.Foo[sl.Bar].foo(new sl.Bar[List]) == "Bar")) + +} diff --git a/test/tasty/run/src-3/tastytest/issue12420/ShareLambda.scala b/test/tasty/run/src-3/tastytest/issue12420/ShareLambda.scala new file mode 100644 index 000000000000..dc64b3889de7 --- /dev/null +++ b/test/tasty/run/src-3/tastytest/issue12420/ShareLambda.scala @@ -0,0 +1,14 @@ +package tastytest.issue12420 + +object ShareLambda { + + class Foo[K[F[X] <: List[X]]] { + def foo[F[X] <: List[X]](x: K[F]): String = x.toString() + } + + // `F[X] <: List[X]` is structurally shared in TASTy and defined in `Foo.K` + class Bar[F[X] <: List[X]] { + override def toString(): String = "Bar" + } + +} diff --git a/test/tasty/run/src-3/tastytest/issue12420/absurd.scala b/test/tasty/run/src-3/tastytest/issue12420/absurd.scala new file mode 100644 index 000000000000..5bc01826c22f --- /dev/null +++ b/test/tasty/run/src-3/tastytest/issue12420/absurd.scala @@ -0,0 +1,10 @@ +package tastytest.issue12420 + +class Boxxy[I <: Int, B <: Boxxy[I, B]] + +object Boxxy { + object default extends Boxxy[0, default.type] +} + +class Qux[I <: Int, B <: Boxxy[I, B]](val inner: B) +class Eta extends Qux(Boxxy.default) diff --git a/test/tasty/run/src-3/tastytest/issue12420/hasId.scala b/test/tasty/run/src-3/tastytest/issue12420/hasId.scala new file mode 100644 index 000000000000..4a883ec7ede2 --- /dev/null +++ b/test/tasty/run/src-3/tastytest/issue12420/hasId.scala @@ -0,0 +1,15 @@ +package tastytest.issue12420 + +trait HasId[+K] { + def id: K +} + +trait Id[+T, K] { + def id: K +} + +case class UserId(id: String) extends Id[User, String] +case class User(id: UserId) extends HasId[UserId] + +class Bar[A <: HasId[Id[A, String]]](val bar: A) +class Foo extends Bar(User(UserId("Foo"))) From 782888a96e56cafc93dfc15f4c2402a1ce3b7076 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Wed, 4 Aug 2021 11:17:34 +1000 Subject: [PATCH 331/769] Align 'show type/AST' help with actual keybinding --- .../scala/tools/nsc/interpreter/shell/LoopCommands.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/repl-frontend/scala/tools/nsc/interpreter/shell/LoopCommands.scala b/src/repl-frontend/scala/tools/nsc/interpreter/shell/LoopCommands.scala index 07c9b8da8d95..863d9d1ee843 100644 --- a/src/repl-frontend/scala/tools/nsc/interpreter/shell/LoopCommands.scala +++ b/src/repl-frontend/scala/tools/nsc/interpreter/shell/LoopCommands.scala @@ -94,7 +94,7 @@ trait LoopCommands { echo("") echo("Useful default key bindings:") echo(" TAB code completion") - echo(" CTRL-SHIFT-T type at cursor, hit again to see the code with all types/implicits inferred.") + echo(" CTRL-T type at cursor, hit again to see the code with all types/implicits inferred.") } def ambiguousError(cmd: String): Result = { matchingCommands(cmd) match { From ddac496dfd75b2000753c46f522fa7ebc20acdfc Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Wed, 4 Aug 2021 14:48:06 +1000 Subject: [PATCH 332/769] Change show-type shortcut to ctrl-alt-T --- .../scala/tools/nsc/interpreter/jline/Reader.scala | 2 +- .../scala/tools/nsc/interpreter/shell/LoopCommands.scala | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/repl-frontend/scala/tools/nsc/interpreter/jline/Reader.scala b/src/repl-frontend/scala/tools/nsc/interpreter/jline/Reader.scala index bff410b8ded6..55040223ebba 100644 --- a/src/repl-frontend/scala/tools/nsc/interpreter/jline/Reader.scala +++ b/src/repl-frontend/scala/tools/nsc/interpreter/jline/Reader.scala @@ -185,7 +185,7 @@ object Reader { // VIINS, VICMD, EMACS val keymap = if (config.viMode) VIINS else EMACS reader.getKeyMaps.put(MAIN, reader.getKeyMaps.get(keymap)); - keyMap.bind(new Reference(ScalaShowType.Name), KeyMap.ctrl('T')) + keyMap.bind(new Reference(ScalaShowType.Name), KeyMap.alt(KeyMap.ctrl('t'))) } def secure(p: java.nio.file.Path): Unit = { try scala.reflect.internal.util.OwnerOnlyChmod.chmodFileOrCreateEmpty(p) diff --git a/src/repl-frontend/scala/tools/nsc/interpreter/shell/LoopCommands.scala b/src/repl-frontend/scala/tools/nsc/interpreter/shell/LoopCommands.scala index 863d9d1ee843..49c985dfdd78 100644 --- a/src/repl-frontend/scala/tools/nsc/interpreter/shell/LoopCommands.scala +++ b/src/repl-frontend/scala/tools/nsc/interpreter/shell/LoopCommands.scala @@ -94,7 +94,7 @@ trait LoopCommands { echo("") echo("Useful default key bindings:") echo(" TAB code completion") - echo(" CTRL-T type at cursor, hit again to see the code with all types/implicits inferred.") + echo(" CTRL-ALT-T show type at cursor, hit again to show code with types/implicits inferred.") } def ambiguousError(cmd: String): Result = { matchingCommands(cmd) match { From 34f1d09c99af8fb5b3c4bea6aa69c2e4ceae63de Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Thu, 5 Aug 2021 10:47:50 +1000 Subject: [PATCH 333/769] Align JNA version with that used in by latest jline --- versions.properties | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/versions.properties b/versions.properties index 0bb7a75f549e..0185ae79d850 100644 --- a/versions.properties +++ b/versions.properties @@ -10,4 +10,4 @@ scala-asm.version=9.1.0-scala-1 # jna.version must be updated together with jline-terminal-jna jline.version=3.20.0 -jna.version=5.3.1 +jna.version=5.8.0 From a85cbf4e77f21eeb2922997b4c81c6cf72750095 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=A9bastien=20Doeraene?= Date: Fri, 6 Aug 2021 13:48:13 +0200 Subject: [PATCH 334/769] Deprecate ad hoc group names with Regex and .r The deprecation was first proposed in https://github.com/scala/scala/pull/4990 but had been rejected because Scala.js did not support inline group names. Now that Scala.js 1.7.0 has been released with full inline group name support, there is no reason to perpetuate this API. Unfortunately, we cannot actually put `@deprecated` on the constructor of Regex with group names, since there is no alternative that does not take any group name. We *could* deprecate it anyway, with the replacement being to use `.r`, but perhaps that goes a bit too far. --- src/library/scala/collection/StringOps.scala | 15 ++++++--- src/library/scala/util/matching/Regex.scala | 34 ++++++++++++++------ test/files/run/t5045.scala | 4 +-- test/scalacheck/t2460.scala | 6 ++-- 4 files changed, 39 insertions(+), 20 deletions(-) diff --git a/src/library/scala/collection/StringOps.scala b/src/library/scala/collection/StringOps.scala index 347282e35717..42a06f6e7ce4 100644 --- a/src/library/scala/collection/StringOps.scala +++ b/src/library/scala/collection/StringOps.scala @@ -158,13 +158,13 @@ object StringOps { } /** Provides extension methods for strings. - * + * * Some of these methods treat strings as a plain collection of [[Char]]s * without any regard for Unicode handling. Unless the user takes Unicode * handling in to account or makes sure the strings don't require such handling, * these methods may result in unpaired or invalidly paired surrogate code * units. - * + * * @define unicodeunaware This method treats a string as a plain sequence of * Char code units and makes no attempt to keep * surrogate pairs or codepoint sequences together. @@ -848,9 +848,13 @@ final class StringOps(private val s: String) extends AnyVal { /** You can follow a string with `.r`, turning it into a `Regex`. E.g. * - * `"""A\w*""".r` is the regular expression for identifiers starting with `A`. + * `"""A\w*""".r` is the regular expression for ASCII-only identifiers starting with `A`. + * + * `"""(?\d\d)-(?\d\d)-(?\d\d\d\d)""".r` matches dates + * and provides its subcomponents through groups named "month", "day" and + * "year". */ - def r: Regex = r() + def r: Regex = new Regex(s) /** You can follow a string with `.r(g1, ... , gn)`, turning it into a `Regex`, * with group names g1 through gn. @@ -861,6 +865,7 @@ final class StringOps(private val s: String) extends AnyVal { * * @param groupNames The names of the groups in the pattern, in the order they appear. */ + @deprecated("use inline group names like (?X) instead", "2.13.7") def r(groupNames: String*): Regex = new Regex(s, groupNames: _*) /** @@ -1430,7 +1435,7 @@ final class StringOps(private val s: String) extends AnyVal { * * @param f the 'split function' mapping the elements of this string to an [[scala.util.Either]] * - * @return a pair of strings: the first one made of those characters returned by `f` that were wrapped in [[scala.util.Left]], + * @return a pair of strings: the first one made of those characters returned by `f` that were wrapped in [[scala.util.Left]], * and the second one made of those wrapped in [[scala.util.Right]]. */ def partitionMap(f: Char => Either[Char,Char]): (String, String) = { diff --git a/src/library/scala/util/matching/Regex.scala b/src/library/scala/util/matching/Regex.scala index 2b8bc69c07c6..eadb9170a192 100644 --- a/src/library/scala/util/matching/Regex.scala +++ b/src/library/scala/util/matching/Regex.scala @@ -219,15 +219,18 @@ class Regex private[matching](val pattern: Pattern, groupNames: String*) extends * val namedYears = for (m <- namedDate findAllMatchIn dates) yield m group "year" * }}} * - * Group names supplied to the constructor are preferred to inline group names - * when retrieving matched groups by name. Not all platforms support inline names. + * Inline group names are preferred over group names supplied to the constructor + * when retrieving matched groups by name. Group names supplied to the constructor + * should be considered deprecated. * * This constructor does not support options as flags, which must be - * supplied as inline flags in the pattern string: `(?idmsux-idmsux)`. + * supplied as inline flags in the pattern string: `(?idmsuxU)`. * * @param regex The regular expression to compile. * @param groupNames Names of capturing groups. */ + // we cannot add the alternative `def this(regex: String)` in a forward binary compatible way: + // @deprecated("use inline group names like (?X) instead", "2.13.7") def this(regex: String, groupNames: String*) = this(Pattern.compile(regex), groupNames: _*) /** Tries to match a [[java.lang.CharSequence]]. @@ -396,7 +399,7 @@ class Regex private[matching](val pattern: Pattern, groupNames: String*) extends def hasNext = matchIterator.hasNext def next(): Match = { matchIterator.next() - new Match(matchIterator.source, matchIterator.matcher, matchIterator.groupNames).force + new Match(matchIterator.source, matchIterator.matcher, matchIterator._groupNames).force } } } @@ -621,6 +624,7 @@ object Regex { val source: CharSequence /** The names of the groups, or an empty sequence if none defined */ + @deprecated("groupNames does not include inline group names, and should not be used anymore", "2.13.7") val groupNames: Seq[String] /** The number of capturing groups in the pattern. @@ -687,7 +691,11 @@ object Regex { if (end(i) >= 0) source.subSequence(end(i), source.length) else null - private[this] lazy val nameToIndex: Map[String, Int] = Map[String, Int]() ++ ("" :: groupNames.toList).zipWithIndex + @scala.annotation.nowarn("msg=deprecated") + private def groupNamesNowarn: Seq[String] = groupNames + + private[this] lazy val nameToIndex: Map[String, Int] = + Map[String, Int]() ++ ("" :: groupNamesNowarn.toList).zipWithIndex /** Returns the group with the given name. * @@ -700,7 +708,7 @@ object Regex { * @throws IllegalArgumentException if the requested group name is not defined */ def group(id: String): String = ( - if (groupNames.isEmpty) + if (groupNamesNowarn.isEmpty) matcher group id else nameToIndex.get(id) match { @@ -716,7 +724,10 @@ object Regex { /** Provides information about a successful match. */ class Match(val source: CharSequence, protected[matching] val matcher: Matcher, - val groupNames: Seq[String]) extends MatchData { + _groupNames: Seq[String]) extends MatchData { + + @deprecated("groupNames does not include inline group names, and should not be used anymore", "2.13.7") + val groupNames: Seq[String] = _groupNames /** The index of the first matched character. */ val start: Int = matcher.start @@ -791,9 +802,12 @@ object Regex { * * @see [[java.util.regex.Matcher]] */ - class MatchIterator(val source: CharSequence, val regex: Regex, val groupNames: Seq[String]) + class MatchIterator(val source: CharSequence, val regex: Regex, private[Regex] val _groupNames: Seq[String]) extends AbstractIterator[String] with Iterator[String] with MatchData { self => + @deprecated("groupNames does not include inline group names, and should not be used anymore", "2.13.7") + val groupNames: Seq[String] = _groupNames + protected[Regex] val matcher = regex.pattern.matcher(source) // 0 = not yet matched, 1 = matched, 2 = advanced to match, 3 = no more matches @@ -855,14 +869,14 @@ object Regex { /** Convert to an iterator that yields MatchData elements instead of Strings. */ def matchData: Iterator[Match] = new AbstractIterator[Match] { def hasNext = self.hasNext - def next() = { self.next(); new Match(source, matcher, groupNames).force } + def next() = { self.next(); new Match(source, matcher, _groupNames).force } } /** Convert to an iterator that yields MatchData elements instead of Strings and has replacement support. */ private[matching] def replacementData = new AbstractIterator[Match] with Replacement { def matcher = self.matcher def hasNext = self.hasNext - def next() = { self.next(); new Match(source, matcher, groupNames).force } + def next() = { self.next(); new Match(source, matcher, _groupNames).force } } } diff --git a/test/files/run/t5045.scala b/test/files/run/t5045.scala index a539e3a4cb19..994469f01303 100644 --- a/test/files/run/t5045.scala +++ b/test/files/run/t5045.scala @@ -4,8 +4,8 @@ object Test extends App { import scala.util.matching.{ Regex, UnanchoredRegex } val dateP1 = """(\d\d\d\d)-(\d\d)-(\d\d)""".r.unanchored - val dateP2 = """(\d\d\d\d)-(\d\d)-(\d\d)""".r("year", "month", "day").unanchored - val dateP3 = new Regex("""(\d\d\d\d)-(\d\d)-(\d\d)""", "year", "month", "day") with UnanchoredRegex + val dateP2 = """(?\d\d\d\d)-(?\d\d)-(?\d\d)""".r.unanchored + val dateP3 = new Regex("""(?\d\d\d\d)-(?\d\d)-(?\d\d)""") with UnanchoredRegex val yearStr = "2011" val dateStr = List(yearStr,"07","15").mkString("-") diff --git a/test/scalacheck/t2460.scala b/test/scalacheck/t2460.scala index 81941a33261f..40c8fb87cd61 100644 --- a/test/scalacheck/t2460.scala +++ b/test/scalacheck/t2460.scala @@ -8,15 +8,15 @@ object SI2460Test extends Properties("Regex : Ticket 2460") { val vowel = Gen.oneOf("a", "z") val numberOfMatch = forAll(vowel) { - (s: String) => "\\s*([a-z])\\s*".r("data").findAllMatchIn((1 to 20).map(_ => s).mkString).size == 20 + (s: String) => "\\s*([a-z])\\s*".r.findAllMatchIn((1 to 20).map(_ => s).mkString).size == 20 } val numberOfGroup = forAll(vowel) { - (s: String) => "\\s*([a-z])\\s*([a-z])\\s*".r("data").findAllMatchIn((1 to 20).map(_ => s).mkString).next().groupCount == 2 + (s: String) => "\\s*([a-z])\\s*([a-z])\\s*".r.findAllMatchIn((1 to 20).map(_ => s).mkString).next().groupCount == 2 } val nameOfGroup = forAll(vowel) { - (s: String) => "([a-z])".r("data").findAllMatchIn(s).next().group("data") == s + (s: String) => "(?[a-z])".r.findAllMatchIn(s).next().group("data") == s } val tests = List( From 90b898626a0f5b0e4f4f76b9b82639dfb01a1ddc Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Fri, 6 Aug 2021 06:14:46 -0700 Subject: [PATCH 335/769] Clean up junit test --- .../scala/tools/testkit/AssertUtil.scala | 3 + .../scala/util/matching/CharRegexTest.scala | 56 +++++++++---------- .../junit/scala/util/matching/RegexTest.scala | 42 +++++++------- 3 files changed, 51 insertions(+), 50 deletions(-) diff --git a/src/testkit/scala/tools/testkit/AssertUtil.scala b/src/testkit/scala/tools/testkit/AssertUtil.scala index 47d41aa29d31..722a23bd7c30 100644 --- a/src/testkit/scala/tools/testkit/AssertUtil.scala +++ b/src/testkit/scala/tools/testkit/AssertUtil.scala @@ -136,6 +136,9 @@ object AssertUtil { throw ae } + def assertCond[A](x: A)(pf: PartialFunction[A, Boolean]): Unit = assertTrue(PartialFunction.cond(x)(pf)) + def assertCondNot[A](x: A)(pf: PartialFunction[A, Boolean]): Unit = assertFalse(PartialFunction.cond(x)(pf)) + def assertFails[U](checkMessage: String => Boolean)(body: => U): Unit = assertThrows[AssertionError](body, checkMessage) /** JUnit-style assertion for `IterableLike.sameElements`. diff --git a/test/junit/scala/util/matching/CharRegexTest.scala b/test/junit/scala/util/matching/CharRegexTest.scala index c2a30830cecc..f78316bd8d53 100644 --- a/test/junit/scala/util/matching/CharRegexTest.scala +++ b/test/junit/scala/util/matching/CharRegexTest.scala @@ -1,54 +1,50 @@ package scala.util.matching -import org.junit.Test +import scala.tools.testkit.AssertUtil.{assertCond, assertCondNot, assertThrows} -import PartialFunction._ +import org.junit.Test /** Regex can match a Char. * If the pattern includes a group, * always return a single char. */ class CharRegexTest { - implicit class Averrable(val b: Boolean) /*extends AnyVal*/ { - def yes(): Unit = assert(b) - def no(): Unit = assert(!b) - } + val c: Char = 'c' // "cat"(0) val d: Char = 'D' // "Dog"(0) - @Test def comparesGroupCorrectly(): Unit = { + @Test def comparesGroupCorrectly: Unit = { val r = """(\p{Lower})""".r - cond(c) { case r(x) => true } .yes() - cond(c) { case r(_) => true } .yes() - cond(c) { case r(_*) => true } .yes() - cond(c) { case r() => true } .no() - - cond(d) { case r(x) => true } .no() - cond(d) { case r(_) => true } .no() - cond(d) { case r(_*) => true } .no() - cond(d) { case r() => true } .no() + assertCond(c) { case r(x) => true } + assertCond(c) { case r(_) => true } + assertCond(c) { case r(_*) => true } + assertCondNot(c) { case r() => true } + + assertCondNot(d) { case r(x) => true } + assertCondNot(d) { case r(_) => true } + assertCondNot(d) { case r(_*) => true } + assertCondNot(d) { case r() => true } } - @Test def comparesNoGroupCorrectly(): Unit = { + @Test def comparesNoGroupCorrectly: Unit = { val rnc = """\p{Lower}""".r - cond(c) { case rnc(x) => true } .no() - cond(c) { case rnc(_) => true } .no() - cond(c) { case rnc(_*) => true } .yes() - cond(c) { case rnc() => true } .yes() - - cond(d) { case rnc(x) => true } .no() - cond(d) { case rnc(_) => true } .no() - cond(d) { case rnc(_*) => true } .no() - cond(d) { case rnc() => true } .no() + assertCondNot(c) { case rnc(x) => true } + assertCondNot(c) { case rnc(_) => true } + assertCond(c) { case rnc(_*) => true } + assertCond(c) { case rnc() => true } + + assertCondNot(d) { case rnc(x) => true } + assertCondNot(d) { case rnc(_) => true } + assertCondNot(d) { case rnc(_*) => true } + assertCondNot(d) { case rnc() => true } } - @Test(expected = classOf[MatchError]) - def failCorrectly(): Unit = { + @Test def failCorrectly: Unit = { val headAndTail = """(\p{Lower})([a-z]+)""".r - val n = "cat"(0) match { + def test = "cat"(0) match { case headAndTail(ht @ _*) => ht.size } - assert(false, s"Match size $n") + assertThrows[MatchError](test) } } diff --git a/test/junit/scala/util/matching/RegexTest.scala b/test/junit/scala/util/matching/RegexTest.scala index 61e3af2ef5bf..09ec4ee533be 100644 --- a/test/junit/scala/util/matching/RegexTest.scala +++ b/test/junit/scala/util/matching/RegexTest.scala @@ -1,15 +1,12 @@ package scala.util.matching -import org.junit.Assert.{ assertThrows => _, _ } +import org.junit.Assert.{assertEquals, assertFalse, assertTrue} import org.junit.Test -import org.junit.runner.RunWith -import org.junit.runners.JUnit4 -import scala.tools.testkit.AssertUtil._ +import scala.tools.testkit.AssertUtil.{assertCond, assertThrows} -@RunWith(classOf[JUnit4]) class RegexTest { - @Test def t8022CharSequence(): Unit = { + @Test def t8022CharSequence: Unit = { val full = """.*: (.)$""".r val text = " When I use this operator: *" // Testing 2.10.x compatibility of the return types of unapplySeq @@ -17,7 +14,7 @@ class RegexTest { assertEquals("*", y) } - @Test def t8022Match(): Unit = { + @Test def t8022Match: Unit = { val R = """(\d)""".r val matchh = R.findFirstMatchIn("a1").get // Testing 2.10.x compatibility of the return types of unapplySeq @@ -25,7 +22,7 @@ class RegexTest { assertEquals("1", y) } - @Test def `t9666: use inline group names`(): Unit = { + @Test def `t9666: use inline group names`: Unit = { val r = new Regex("a(?b*)c") val ms = r findAllIn "stuff abbbc more abc and so on" assertTrue(ms.hasNext) @@ -37,7 +34,8 @@ class RegexTest { assertFalse(ms.hasNext) } - @Test def `t9666: use explicit group names`(): Unit = { + @deprecated("Explicit group names are essentially deprecated", since="2.13.7") + @Test def `t9666: use explicit group names`: Unit = { val r = new Regex("a(b*)c", "Bee") val ms = r findAllIn "stuff abbbc more abc and so on" assertTrue(ms.hasNext) @@ -49,7 +47,8 @@ class RegexTest { assertFalse(ms.hasNext) } - @Test def `t9666: fall back to explicit group names`(): Unit = { + @deprecated("Explicit group names are essentially deprecated", since="2.13.7") + @Test def `t9666: fall back to explicit group names`: Unit = { val r = new Regex("a(?b*)c", "Bee") val ms = r findAllIn "stuff abbbc more abc and so on" assertTrue(ms.hasNext) @@ -67,13 +66,16 @@ class RegexTest { type NoMatch = NoSuchElementException type NoData = IllegalStateException - @Test def `t9666: throw on bad name`(): Unit = { + @Test def `t9666: throw on bad name`: Unit = assertThrows[NoGroup] { val r = new Regex("a(?b*)c") val ms = r findAllIn "stuff abbbc more abc and so on" assertTrue(ms.hasNext) ms group "Bee" } + + @deprecated("Explicit group names are essentially deprecated", since="2.13.7") + @Test def `t9666: throw on bad explicit name`: Unit = { assertThrows[NoGroup] { val r = new Regex("a(?b*)c", "Bar") val ms = r findAllIn "stuff abbbc more abc and so on" @@ -88,7 +90,7 @@ class RegexTest { } } - @Test def `t9827 MatchIterator ergonomics`(): Unit = { + @Test def `t9827 MatchIterator ergonomics`: Unit = { val r = "(ab)(cd)".r val s = "xxxabcdyyyabcdzzz" assertEquals(3, r.findAllIn(s).start) @@ -155,7 +157,7 @@ class RegexTest { } } - @Test def `t10827 matches method`(): Unit = { + @Test def `t10827 matches method`: Unit = { val r = """\d+""".r assertTrue(r.matches("500")) assertFalse(r.matches("foo")) @@ -164,7 +166,7 @@ class RegexTest { assertFalse(r.matches("2foo")) } - @Test def `t10827 matches method for unanchored Regex`(): Unit = { + @Test def `t10827 matches method for unanchored Regex`: Unit = { val r = """\d+""".r.unanchored assertTrue(r.matches("500")) assertFalse(r.matches("abc")) @@ -173,7 +175,7 @@ class RegexTest { assertTrue(r.matches("2foo")) } - @Test def replacementMatching(): Unit = { + @Test def replacementMatching: Unit = { val regex = """\$\{(.+?)\}""".r val replaced = regex.replaceAllIn("Replacing: ${main}. And another method: ${foo}.", (m: util.matching.Regex.Match) => { @@ -190,7 +192,7 @@ class RegexTest { assertEquals("Replacing: main. And another: ${foo}.", replaced3) } - @Test def groupsMatching(): Unit = { + @Test def groupsMatching: Unit = { val Date = """(\d+)/(\d+)/(\d+)""".r for (Regex.Groups(a, b, c) <- Date findFirstMatchIn "1/1/2001 marks the start of the millennium. 31/12/2000 doesn't.") { assertEquals("1", a) @@ -198,13 +200,13 @@ class RegexTest { assertEquals("2001", c) } for (Regex.Groups(a, b, c) <- Date.findAllIn("1/1/2001 marks the start of the millennium. 31/12/2000 doesn't.").matchData) { - assertTrue(a == "1" || a == "31") - assertTrue(b == "1" || b == "12") - assertTrue(c == "2001" || c == "2000") + assertCond(a) { case "1" | "31" => true } + assertCond(b) { case "1" | "12" => true } + assertCond(c) { case "2001" | "2000" => true } } } - @Test def `t6406 no longer unapply any`(): Unit = { + @Test def `t6406 no longer unapply any`: Unit = { val r = "(\\d+)".r val q = """(\d)""".r val ns = List("1,2","x","3,4") From cdbc4b8d4e95acc76f58b8e498b095cd9311bbe3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rafa=C5=82=20Sumis=C5=82awski?= Date: Sun, 8 Aug 2021 14:02:40 +0200 Subject: [PATCH 336/769] optimise ArraySeq.map --- .../scala/collection/immutable/ArraySeq.scala | 10 ++++++- .../immutable/ArraySeqBenchmark.scala | 26 ++++++++++++++++--- 2 files changed, 32 insertions(+), 4 deletions(-) diff --git a/src/library/scala/collection/immutable/ArraySeq.scala b/src/library/scala/collection/immutable/ArraySeq.scala index 943ce9935303..81873f83d930 100644 --- a/src/library/scala/collection/immutable/ArraySeq.scala +++ b/src/library/scala/collection/immutable/ArraySeq.scala @@ -68,7 +68,15 @@ sealed abstract class ArraySeq[+A] ArraySeq.unsafeWrapArray(dest).asInstanceOf[ArraySeq[B]] } - override def map[B](f: A => B): ArraySeq[B] = iterableFactory.tabulate(length)(i => f(apply(i))) + override def map[B](f: A => B): ArraySeq[B] = { + val a = new Array[Any](size) + var i = 0 + while (i < a.length){ + a(i) = f(apply(i)).asInstanceOf[Any] + i += 1 + } + ArraySeq.unsafeWrapArray(a).asInstanceOf[ArraySeq[B]] + } override def prepended[B >: A](elem: B): ArraySeq[B] = ArraySeq.unsafeWrapArray(unsafeArray.prepended[Any](elem)).asInstanceOf[ArraySeq[B]] diff --git a/test/benchmarks/src/main/scala/scala/collection/immutable/ArraySeqBenchmark.scala b/test/benchmarks/src/main/scala/scala/collection/immutable/ArraySeqBenchmark.scala index 0be14aab4ce8..a90bfc9ffb34 100644 --- a/test/benchmarks/src/main/scala/scala/collection/immutable/ArraySeqBenchmark.scala +++ b/test/benchmarks/src/main/scala/scala/collection/immutable/ArraySeqBenchmark.scala @@ -1,8 +1,6 @@ package scala.collection.immutable import java.util.concurrent.TimeUnit -import java.util.Arrays - import org.openjdk.jmh.annotations._ import org.openjdk.jmh.infra.Blackhole @@ -21,6 +19,7 @@ class ArraySeqBenchmark { var size: Int = _ var integersS: ArraySeq[Int] = _ var stringsS: ArraySeq[String] = _ + val newS = Array("a", "b", "c", "d", "e", "f") @Setup(Level.Trial) def initNumbers: Unit = { val integers = (1 to size).toList @@ -68,4 +67,25 @@ class ArraySeqBenchmark { } b.result() } -} + + // newS is used to avoid allocating Strings, while still performing some sort of "mapping". + + @Benchmark def mapSOld(): ArraySeq[AnyRef] = + oldMap(stringsS)(x => newS(x.length)) + + @Benchmark def mapSNew(): ArraySeq[AnyRef] = + stringsS.map(x => newS(x.length)) + + // Mapping an ArraySeq.ofInt results in an ArraySeq.ofRef containing java.lang.Integers. + // Boxing small integers doesn't result in allocations thus the choice of _ & 0xf as the mapping function. + + @Benchmark def mapIOld(): ArraySeq[Int] = + oldMap(integersS)(_ & 0xf) + + @Benchmark def mapINew(): ArraySeq[Int] = + integersS.map(_ & 0xf) + + private def oldMap[A, B](seq: ArraySeq[A])(f: A => B): ArraySeq[B] = + seq.iterableFactory.tabulate(seq.length)(i => f(seq.apply(i))) + +} \ No newline at end of file From 2814148b47ec99e8afc3b971c458c703ed52fb95 Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Mon, 9 Aug 2021 17:15:39 +0200 Subject: [PATCH 337/769] Only issue 'unused nowarn' warnings when there are no errors --- src/compiler/scala/tools/nsc/Global.scala | 5 ++--- src/compiler/scala/tools/nsc/Reporting.scala | 8 ++++---- test/files/neg/t12433.check | 4 ++++ test/files/neg/t12433.scala | 7 +++++++ 4 files changed, 17 insertions(+), 7 deletions(-) create mode 100644 test/files/neg/t12433.check create mode 100644 test/files/neg/t12433.scala diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala index 1fd77e0fe4a6..abfdbe9fe069 100644 --- a/src/compiler/scala/tools/nsc/Global.scala +++ b/src/compiler/scala/tools/nsc/Global.scala @@ -1562,13 +1562,12 @@ class Global(var currentSettings: Settings, reporter0: Reporter) if (settings.YstatisticsEnabled && settings.Ystatistics.contains(phase.name)) printStatisticsFor(phase) - if (!globalPhase.hasNext || reporter.hasErrors) - runReporting.warnUnusedSuppressions() - advancePhase() } profiler.finished() + runReporting.runFinished(hasErrors = reporter.hasErrors) + reporting.summarizeErrors() // val allNamesArray: Array[String] = allNames().map(_.toString).toArray.sorted diff --git a/src/compiler/scala/tools/nsc/Reporting.scala b/src/compiler/scala/tools/nsc/Reporting.scala index cd26e72a7cfd..f113a3789ad2 100644 --- a/src/compiler/scala/tools/nsc/Reporting.scala +++ b/src/compiler/scala/tools/nsc/Reporting.scala @@ -80,11 +80,11 @@ trait Reporting extends internal.Reporting { self: ast.Positions with Compilatio def suppressionExists(pos: Position): Boolean = suppressions.getOrElse(pos.source, Nil).exists(_.annotPos.point == pos.point) - def warnUnusedSuppressions(): Unit = { - // if we stop before typer completes (errors in parser, Ystop), report all suspended messages + def runFinished(hasErrors: Boolean): Unit = { + // report suspended messages (in case the run finished before typer) suspendedMessages.valuesIterator.foreach(_.foreach(issueWarning)) - // scaladoc doesn't run all phases, so not all warnings are emitted - if (settings.warnUnusedNowarn && !settings.isScaladoc) + // report unused nowarns only if all all phases are done. scaladoc doesn't run all phases. + if (!hasErrors && settings.warnUnusedNowarn && !settings.isScaladoc) for { source <- suppressions.keysIterator.toList sups <- suppressions.remove(source) diff --git a/test/files/neg/t12433.check b/test/files/neg/t12433.check new file mode 100644 index 000000000000..ff7288bf8858 --- /dev/null +++ b/test/files/neg/t12433.check @@ -0,0 +1,4 @@ +t12433.scala:5: error: not found: value / + def t1 = / + ^ +1 error diff --git a/test/files/neg/t12433.scala b/test/files/neg/t12433.scala new file mode 100644 index 000000000000..c1975ca848db --- /dev/null +++ b/test/files/neg/t12433.scala @@ -0,0 +1,7 @@ +// scalac: -Wunused:nowarn +import annotation.nowarn +object T { + @deprecated def f = 1 + def t1 = / + @nowarn def t2 = f +} From 95e607189675d3e2d3dc0f3645cb5c42990d0252 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rafa=C5=82=20Sumis=C5=82awski?= Date: Mon, 9 Aug 2021 17:18:17 +0200 Subject: [PATCH 338/769] remove unnecessary asInstanceOf and move benchmark initialisation to setup method --- src/library/scala/collection/immutable/ArraySeq.scala | 2 +- .../scala/scala/collection/immutable/ArraySeqBenchmark.scala | 3 ++- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/src/library/scala/collection/immutable/ArraySeq.scala b/src/library/scala/collection/immutable/ArraySeq.scala index 81873f83d930..ac246bca6f95 100644 --- a/src/library/scala/collection/immutable/ArraySeq.scala +++ b/src/library/scala/collection/immutable/ArraySeq.scala @@ -72,7 +72,7 @@ sealed abstract class ArraySeq[+A] val a = new Array[Any](size) var i = 0 while (i < a.length){ - a(i) = f(apply(i)).asInstanceOf[Any] + a(i) = f(apply(i)) i += 1 } ArraySeq.unsafeWrapArray(a).asInstanceOf[ArraySeq[B]] diff --git a/test/benchmarks/src/main/scala/scala/collection/immutable/ArraySeqBenchmark.scala b/test/benchmarks/src/main/scala/scala/collection/immutable/ArraySeqBenchmark.scala index a90bfc9ffb34..dab019b0b288 100644 --- a/test/benchmarks/src/main/scala/scala/collection/immutable/ArraySeqBenchmark.scala +++ b/test/benchmarks/src/main/scala/scala/collection/immutable/ArraySeqBenchmark.scala @@ -19,13 +19,14 @@ class ArraySeqBenchmark { var size: Int = _ var integersS: ArraySeq[Int] = _ var stringsS: ArraySeq[String] = _ - val newS = Array("a", "b", "c", "d", "e", "f") + var newS: Array[String] = _ @Setup(Level.Trial) def initNumbers: Unit = { val integers = (1 to size).toList val strings = integers.map(_.toString) integersS = ArraySeq.unsafeWrapArray(integers.toArray) stringsS = ArraySeq.unsafeWrapArray(strings.toArray) + newS = Array("a", "b", "c", "d", "e", "f") } @Benchmark def sortedStringOld(bh: Blackhole): Unit = From 64ea1d1f7822322b5a08f02226516aa88143802d Mon Sep 17 00:00:00 2001 From: Guillaume Martres Date: Mon, 9 Aug 2021 14:38:44 +0200 Subject: [PATCH 339/769] Deprecate using Scala 3 hard keywords as identifiers --- .../scala/tools/nsc/ast/parser/Parsers.scala | 2 ++ .../scala/tools/nsc/ast/parser/Scanners.scala | 2 ++ .../scala/reflect/internal/StdNames.scala | 5 +++++ src/reflect/scala/reflect/io/ZipArchive.scala | 6 +++--- .../scala/reflect/runtime/JavaMirrors.scala | 6 +++--- test/files/neg/scala3-keywords.check | 21 +++++++++++++++++++ test/files/neg/scala3-keywords.scala | 19 +++++++++++++++++ .../run/reflection-java-crtp/Main_2.scala | 4 ++-- .../scala/collection/FactoriesTest.scala | 4 ++-- 9 files changed, 59 insertions(+), 10 deletions(-) create mode 100644 test/files/neg/scala3-keywords.check create mode 100644 test/files/neg/scala3-keywords.scala diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala index adc577f54c86..796b906142ac 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala @@ -1277,6 +1277,8 @@ self => def ident(skipIt: Boolean): Name = ( if (isIdent) { val name = in.name.encode + if (in.token != BACKQUOTED_IDENT && scala3Keywords.contains(name)) + deprecationWarning(in.offset, s"Wrap `$name` in backticks to use it as an identifier, it will become a keyword in Scala 3.", "2.13.7") in.nextToken() name } diff --git a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala index b40ad37f6bf2..8010fd2756a0 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala @@ -1526,6 +1526,8 @@ trait Scanners extends ScannersCommon { final val softModifierNames = Set(nme.open, nme.infix) + final val scala3Keywords = Set(nme.`enum`, nme.`export`, nme.`given`) + // Token representation ---------------------------------------------------- /** Returns the string representation of given token. */ diff --git a/src/reflect/scala/reflect/internal/StdNames.scala b/src/reflect/scala/reflect/internal/StdNames.scala index 926fca90e649..00a2cc0603dd 100644 --- a/src/reflect/scala/reflect/internal/StdNames.scala +++ b/src/reflect/scala/reflect/internal/StdNames.scala @@ -675,6 +675,11 @@ trait StdNames { // Scala 3 import syntax val as: NameType = nameType("as") + // Scala 3 hard keywords + val `enum`: NameType = nameType("enum") + val `export`: NameType = nameType("export") + val `given`: NameType = nameType("given") + // Scala 3 soft keywords val infix: NameType = nameType("infix") val open: NameType = nameType("open") diff --git a/src/reflect/scala/reflect/io/ZipArchive.scala b/src/reflect/scala/reflect/io/ZipArchive.scala index 7d20a100d5d1..a101656e3d17 100644 --- a/src/reflect/scala/reflect/io/ZipArchive.scala +++ b/src/reflect/scala/reflect/io/ZipArchive.scala @@ -240,11 +240,11 @@ final class FileZipArchive(file: JFile, release: Option[String]) extends ZipArch val root = new DirEntry(RootEntry) dirs.put(RootEntry, root) val zipFile = openZipFile() - val enum = zipFile.entries() + val entries = zipFile.entries() try { - while (enum.hasMoreElements) { - val zipEntry = enum.nextElement + while (entries.hasMoreElements) { + val zipEntry = entries.nextElement if (!zipEntry.getName.startsWith("META-INF/versions/")) { if (!zipEntry.isDirectory) { val dir = getDir(dirs, zipEntry) diff --git a/src/reflect/scala/reflect/runtime/JavaMirrors.scala b/src/reflect/scala/reflect/runtime/JavaMirrors.scala index 4e227174901b..d0f318bedd37 100644 --- a/src/reflect/scala/reflect/runtime/JavaMirrors.scala +++ b/src/reflect/scala/reflect/runtime/JavaMirrors.scala @@ -195,9 +195,9 @@ private[scala] trait JavaMirrors extends internal.SymbolTable with api.JavaUnive object AnnotationClass { def unapply(x: jClass[_]) = x.isAnnotation } object ConstantArg { - def enumToSymbol(enum: Enum[_]): Symbol = { - val staticPartOfEnum = classToScala(enum.getClass).companionSymbol - staticPartOfEnum.info.declaration(TermName(enum.name)) + def enumToSymbol(`enum`: Enum[_]): Symbol = { + val staticPartOfEnum = classToScala(`enum`.getClass).companionSymbol + staticPartOfEnum.info.declaration(TermName(`enum`.name)) } def unapply(schemaAndValue: (jClass[_], Any)): Option[Any] = schemaAndValue match { diff --git a/test/files/neg/scala3-keywords.check b/test/files/neg/scala3-keywords.check new file mode 100644 index 000000000000..d4b12b623977 --- /dev/null +++ b/test/files/neg/scala3-keywords.check @@ -0,0 +1,21 @@ +scala3-keywords.scala:13: warning: Wrap `enum` in backticks to use it as an identifier, it will become a keyword in Scala 3. + val enum: Int = 1 // error + ^ +scala3-keywords.scala:14: warning: Wrap `export` in backticks to use it as an identifier, it will become a keyword in Scala 3. + val export: Int = 1 // error + ^ +scala3-keywords.scala:15: warning: Wrap `given` in backticks to use it as an identifier, it will become a keyword in Scala 3. + val given: Int = 1 // error + ^ +scala3-keywords.scala:16: warning: Wrap `given` in backticks to use it as an identifier, it will become a keyword in Scala 3. + def foo(given: Int) = {} // error + ^ +scala3-keywords.scala:17: warning: Wrap `export` in backticks to use it as an identifier, it will become a keyword in Scala 3. + def bla[export <: Int] = {} // error + ^ +scala3-keywords.scala:19: warning: Wrap `enum` in backticks to use it as an identifier, it will become a keyword in Scala 3. +class enum // error + ^ +error: No warnings can be incurred under -Werror. +6 warnings +1 error diff --git a/test/files/neg/scala3-keywords.scala b/test/files/neg/scala3-keywords.scala new file mode 100644 index 000000000000..d3be6d148541 --- /dev/null +++ b/test/files/neg/scala3-keywords.scala @@ -0,0 +1,19 @@ +// scalac: -deprecation -Xfatal-warnings +// +class A { + val `enum`: Int = 1 + val `export`: Int = 1 + val `given`: Int = 1 + def foo(`given`: Int) = {} + def bla[`export` <: Int] = { + class `enum` + } +} +class B { + val enum: Int = 1 // error + val export: Int = 1 // error + val given: Int = 1 // error + def foo(given: Int) = {} // error + def bla[export <: Int] = {} // error +} +class enum // error diff --git a/test/files/run/reflection-java-crtp/Main_2.scala b/test/files/run/reflection-java-crtp/Main_2.scala index 3199eaf5ffdb..b9361131023c 100644 --- a/test/files/run/reflection-java-crtp/Main_2.scala +++ b/test/files/run/reflection-java-crtp/Main_2.scala @@ -1,8 +1,8 @@ object Test extends App { import scala.reflect.runtime.universe._ - val enum = typeOf[JavaSimpleEnumeration_1].baseClasses(1).asClass + val `enum` = typeOf[JavaSimpleEnumeration_1].baseClasses(1).asClass // make sure that the E's in Enum> are represented by the same symbol - val e1 = enum.typeParams(0).asType + val e1 = `enum`.typeParams(0).asType val TypeBounds(_, TypeRef(_, _, List(TypeRef(_, e2: TypeSymbol, _)))) = e1.info println(e1, e2, e1 eq e2) } diff --git a/test/junit/scala/collection/FactoriesTest.scala b/test/junit/scala/collection/FactoriesTest.scala index 34ebc1286870..6eb4ccd8779a 100644 --- a/test/junit/scala/collection/FactoriesTest.scala +++ b/test/junit/scala/collection/FactoriesTest.scala @@ -215,11 +215,11 @@ class FactoriesTest { im.BitSet(1, 2, 3) ) - object enum extends Enumeration { + object `enum` extends Enumeration { val x, y, z = Value } - val enumValues = enum.values + val enumValues = `enum`.values sortedFactoryFromIterableOnceReturnsSameReference(SortedSet, im.SortedSet)(enumValues) From 0cecad3598b63e83bf41d3393e37013d6640ecf8 Mon Sep 17 00:00:00 2001 From: Guillaume Martres Date: Mon, 9 Aug 2021 14:17:42 +0200 Subject: [PATCH 340/769] Allow `case` in pattern bindings even without -Xsource:3 In #9558 (which shipped with 2.13.6) we added support for `case` bindings under -Xsource:3. Since this parser change does not break any existing code and since IntelliJ and scalameta/metals now understand this syntax in Scala 2 code, it should be safe to enable it by default to further ease cross-compilation between Scala 2 and 3. --- spec/06-expressions.md | 14 +++++++---- spec/13-syntax-summary.md | 2 +- .../scala/tools/nsc/ast/parser/Parsers.scala | 6 ++--- .../neg/for-comprehension-case-future.check | 7 ------ .../neg/for-comprehension-case-future.scala | 24 ------------------- test/files/neg/for-comprehension-case.check | 14 ++++------- test/files/neg/for-comprehension-case.scala | 16 +++++++++---- 7 files changed, 29 insertions(+), 54 deletions(-) delete mode 100644 test/files/neg/for-comprehension-case-future.check delete mode 100644 test/files/neg/for-comprehension-case-future.scala diff --git a/spec/06-expressions.md b/spec/06-expressions.md index 0387ce17e7f8..49687a2bf97e 100644 --- a/spec/06-expressions.md +++ b/spec/06-expressions.md @@ -919,7 +919,7 @@ A semicolon preceding the `while` symbol of a do loop expression is ignored. Expr1 ::= ‘for’ (‘(’ Enumerators ‘)’ | ‘{’ Enumerators ‘}’) {nl} [‘yield’] Expr Enumerators ::= Generator {semi Generator} -Generator ::= Pattern1 ‘<-’ Expr {[semi] Guard | semi Pattern1 ‘=’ Expr} +Generator ::= [‘case’] Pattern1 ‘<-’ Expr {[semi] Guard | semi Pattern1 ‘=’ Expr} Guard ::= ‘if’ PostfixExpr ``` @@ -929,9 +929,15 @@ A _for comprehension_ `for (´\mathit{enums}\,´) yield ´e´` evaluates expression ´e´ for each binding generated by the enumerators ´\mathit{enums}´ and collects the results. An enumerator sequence always starts with a generator; this can be followed by further generators, value -definitions, or guards. A _generator_ `´p´ <- ´e´` -produces bindings from an expression ´e´ which is matched in some way -against pattern ´p´. A _value definition_ `´p´ = ´e´` +definitions, or guards. + +A _generator_ `´p´ <- ´e´` produces bindings from an expression ´e´ which is +matched in some way against pattern ´p´. Optionally, `case` can appear in front +of a generator pattern, this has no meaning in Scala 2 but will be [required in +Scala 3 if `p` is not +irrefutable](https://docs.scala-lang.org/scala3/reference/changed-features/pattern-bindings.html). + +A _value definition_ `´p´ = ´e´` binds the value name ´p´ (or several names in a pattern ´p´) to the result of evaluating the expression ´e´. A _guard_ `if ´e´` contains a boolean expression which restricts diff --git a/spec/13-syntax-summary.md b/spec/13-syntax-summary.md index 1f54d346a3b2..cda92a3b3e5f 100644 --- a/spec/13-syntax-summary.md +++ b/spec/13-syntax-summary.md @@ -184,7 +184,7 @@ grammar: | (Bindings | ([‘implicit’] id | ‘_’) ‘:’ CompoundType) ‘=>’ Block Enumerators ::= Generator {semi Generator} - Generator ::= Pattern1 ‘<-’ Expr {[semi] Guard | semi Pattern1 ‘=’ Expr} + Generator ::= [‘case’] Pattern1 ‘<-’ Expr {[semi] Guard | semi Pattern1 ‘=’ Expr} CaseClauses ::= CaseClause { CaseClause } CaseClause ::= ‘case’ Pattern [Guard] ‘=>’ Block diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala index adc577f54c86..5f7456044802 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala @@ -1993,16 +1993,14 @@ self => } /** {{{ - * Generator ::= Pattern1 (`<-` | `=`) Expr [Guard] + * Generator ::= [`case`] Pattern1 (`<-` | `=`) Expr [Guard] * }}} */ def generator(eqOK: Boolean, allowNestedIf: Boolean = true): List[Tree] = { val start = in.offset val hasCase = in.token == CASE - if (hasCase) { - if (!currentRun.isScala3) syntaxError(in.offset, s"`case` keyword in for comprehension requires the -Xsource:3 flag.") + if (hasCase) in.skipCASE() - } val hasVal = in.token == VAL if (hasVal) diff --git a/test/files/neg/for-comprehension-case-future.check b/test/files/neg/for-comprehension-case-future.check deleted file mode 100644 index 9ce9a9456882..000000000000 --- a/test/files/neg/for-comprehension-case-future.check +++ /dev/null @@ -1,7 +0,0 @@ -for-comprehension-case-future.scala:22: error: '<-' expected but '=' found. - case y = x + 1 - ^ -for-comprehension-case-future.scala:23: error: illegal start of simple expression - } yield x + y - ^ -2 errors diff --git a/test/files/neg/for-comprehension-case-future.scala b/test/files/neg/for-comprehension-case-future.scala deleted file mode 100644 index 05602e537759..000000000000 --- a/test/files/neg/for-comprehension-case-future.scala +++ /dev/null @@ -1,24 +0,0 @@ -// scalac: -Xsource:3 -// -class A { - // ok - val a = - for { - case Some(x) <- List(Some(1), None) - y = x + 1 - } yield x + y - - // ok - val b = - for { - Some(x) <- List(Some(1), None) - Some(y) <- List(None, Some(2)) - } yield x+y - - // fail - val c = - for { - case Some(x) <- List(Some(1), None) - case y = x + 1 - } yield x + y -} diff --git a/test/files/neg/for-comprehension-case.check b/test/files/neg/for-comprehension-case.check index 2e86e5d367b0..549e4943e344 100644 --- a/test/files/neg/for-comprehension-case.check +++ b/test/files/neg/for-comprehension-case.check @@ -1,13 +1,7 @@ -for-comprehension-case.scala:5: error: `case` keyword in for comprehension requires the -Xsource:3 flag. - case Some(x) <- List(Some(1), None) - ^ -for-comprehension-case.scala:12: error: `case` keyword in for comprehension requires the -Xsource:3 flag. - case y = x + 1 - ^ -for-comprehension-case.scala:12: error: '<-' expected but '=' found. +for-comprehension-case.scala:20: error: '<-' expected but '=' found. case y = x + 1 ^ -for-comprehension-case.scala:13: error: illegal start of simple expression - } yield x+y +for-comprehension-case.scala:21: error: illegal start of simple expression + } yield x + y ^ -4 errors +2 errors diff --git a/test/files/neg/for-comprehension-case.scala b/test/files/neg/for-comprehension-case.scala index 55e8d44a40e3..d6b14eb91a90 100644 --- a/test/files/neg/for-comprehension-case.scala +++ b/test/files/neg/for-comprehension-case.scala @@ -1,14 +1,22 @@ class A { - // fail + // ok val a = for { case Some(x) <- List(Some(1), None) - } yield x + y = x + 1 + } yield x + y - // fail + // ok val b = for { Some(x) <- List(Some(1), None) - case y = x + 1 + Some(y) <- List(None, Some(2)) } yield x+y + + // fail + val c = + for { + case Some(x) <- List(Some(1), None) + case y = x + 1 + } yield x + y } From a5bc093a0bd66660726547715270d9fdbe632ac0 Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Wed, 11 Aug 2021 11:22:07 +0200 Subject: [PATCH 341/769] Fix `isStaticAnnotation` for un-initialized Java annotations Java annotations are identified by flag since 2.13.0 (https://github.com/scala/scala/pull/6869). If the annotation's `typeSymbol` still has a lazy `ClassfileLoader` info, the flags are not there yet. This leads to spurious API changes and recompilations in zinc (sbt/zinc#998). --- src/reflect/scala/reflect/internal/Symbols.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala index 6ff5b453b12f..6594c4dce4f6 100644 --- a/src/reflect/scala/reflect/internal/Symbols.scala +++ b/src/reflect/scala/reflect/internal/Symbols.scala @@ -124,7 +124,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => def isJavaEnum: Boolean = hasJavaEnumFlag def isJavaAnnotation: Boolean = hasJavaAnnotationFlag def isStaticAnnotation: Boolean = - hasJavaAnnotationFlag || isNonBottomSubClass(StaticAnnotationClass) && this != NowarnClass + initialize.hasJavaAnnotationFlag || isNonBottomSubClass(StaticAnnotationClass) && this != NowarnClass def newNestedSymbol(name: Name, pos: Position, newFlags: Long, isClass: Boolean): Symbol = name match { case n: TermName => newTermSymbol(n, pos, newFlags) From e4b467a1b6447ecd0b228612b0a28f9feac9064a Mon Sep 17 00:00:00 2001 From: Georgi Krastev Date: Wed, 14 Jul 2021 01:36:39 +0200 Subject: [PATCH 342/769] Not-private fields are not final in bytecode They are effectively final. Specialization marks fields in the parent class as not-private in order to initialize them in the specialized class. But if we mark them as final that leads to IllegalAccessError. --- .../scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala | 2 +- test/files/run/t4511.check | 1 + test/files/run/t4511.scala | 9 +++++++++ 3 files changed, 11 insertions(+), 1 deletion(-) create mode 100644 test/files/run/t4511.check create mode 100644 test/files/run/t4511.scala diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala b/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala index e29cd3e02492..f219f2f9a850 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala @@ -761,6 +761,6 @@ abstract class BTypesFromSymbols[G <: Global](val global: G) extends BTypes { javaFlags(sym) | ( if (sym hasAnnotation TransientAttr) asm.Opcodes.ACC_TRANSIENT else 0) | ( if (sym hasAnnotation VolatileAttr) asm.Opcodes.ACC_VOLATILE else 0) | - ( if (sym.isMutable) 0 else asm.Opcodes.ACC_FINAL) + ( if (sym.isMutable || sym.hasFlag(symtab.Flags.notPRIVATE)) 0 else asm.Opcodes.ACC_FINAL) } } diff --git a/test/files/run/t4511.check b/test/files/run/t4511.check new file mode 100644 index 000000000000..a1e2647d215e --- /dev/null +++ b/test/files/run/t4511.check @@ -0,0 +1 @@ +? diff --git a/test/files/run/t4511.scala b/test/files/run/t4511.scala new file mode 100644 index 000000000000..e014b16b8fa8 --- /dev/null +++ b/test/files/run/t4511.scala @@ -0,0 +1,9 @@ +class B[@specialized(Int) T](t: T) { + val a = t + val b = "?" +} + +object Test { + def main(args: Array[String]): Unit = + println(new B(42).b) +} From 758cdca762600598779e92730d449b18216b53e0 Mon Sep 17 00:00:00 2001 From: Georgi Krastev Date: Fri, 16 Jul 2021 22:21:09 +0200 Subject: [PATCH 343/769] Make fields assigned in specialized constructors mutable --- .../nsc/backend/jvm/BTypesFromSymbols.scala | 2 +- .../tools/nsc/transform/Constructors.scala | 72 ++++++++++--------- .../tools/nsc/transform/SpecializeTypes.scala | 2 +- 3 files changed, 41 insertions(+), 35 deletions(-) diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala b/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala index f219f2f9a850..e29cd3e02492 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala @@ -761,6 +761,6 @@ abstract class BTypesFromSymbols[G <: Global](val global: G) extends BTypes { javaFlags(sym) | ( if (sym hasAnnotation TransientAttr) asm.Opcodes.ACC_TRANSIENT else 0) | ( if (sym hasAnnotation VolatileAttr) asm.Opcodes.ACC_VOLATILE else 0) | - ( if (sym.isMutable || sym.hasFlag(symtab.Flags.notPRIVATE)) 0 else asm.Opcodes.ACC_FINAL) + ( if (sym.isMutable) 0 else asm.Opcodes.ACC_FINAL) } } diff --git a/src/compiler/scala/tools/nsc/transform/Constructors.scala b/src/compiler/scala/tools/nsc/transform/Constructors.scala index f6dfa26851de..7fc7919efa41 100644 --- a/src/compiler/scala/tools/nsc/transform/Constructors.scala +++ b/src/compiler/scala/tools/nsc/transform/Constructors.scala @@ -363,39 +363,43 @@ abstract class Constructors extends Statics with Transform with TypingTransforme adapter.transform(tree) } + def rewriteUnspecialized(assignee: Symbol, stat: Tree): Tree = { + assert(ctorParams(genericClazz).length == primaryConstrParams.length, "Bad param len") + // this is just to make private fields public + (new specializeTypes.ImplementationAdapter(ctorParams(genericClazz), primaryConstrParams, null, true))(stat) + // also make assigned fields mutable so they don't end up final in bytecode + // and mark the specialized class constructor for a release fence addition + if (assignee.isField) { + assignee.setFlag(MUTABLE) + clazz.primaryConstructor.updateAttachment(ConstructorNeedsFence) + } + + val rewritten = rewriteArrayUpdate(stat) + // statements coming from the original class need retyping in the current context + debuglog("retyping " + rewritten) + val duplicator = new specializeTypes.Duplicator(Map.empty) + val context = localTyper.context1.asInstanceOf[duplicator.Context] + duplicator.retyped(context, rewritten, genericClazz, clazz, Map.empty) + } + log("merging: " + originalStats.mkString("\n") + "\nwith\n" + specializedStats.mkString("\n")) - for (s <- originalStats; stat = s.duplicate) yield { + for (stat <- originalStats) yield { log("merge: looking at " + stat) - val stat1 = stat match { - case Assign(sel @ Select(This(_), field), _) => - specializedAssignFor(sel.symbol).getOrElse(stat) - case _ => stat - } - if (stat1 ne stat) { - log("replaced " + stat + " with " + stat1) - specBuf -= stat1 + stat.duplicate match { + case assign @ Assign(select @ Select(This(_), _), _) => + val assignee = select.symbol + specializedAssignFor(assignee) match { + case Some(specialized) => + log("replaced " + assign + " with " + specialized) + specBuf -= specialized + specialized + case None => + rewriteUnspecialized(assignee, assign) + } + case other => + rewriteUnspecialized(NoSymbol, other) } - - if (stat1 eq stat) { - assert(ctorParams(genericClazz).length == primaryConstrParams.length, "Bad param len") - // this is just to make private fields public - (new specializeTypes.ImplementationAdapter(ctorParams(genericClazz), primaryConstrParams, null, true))(stat1) - - val stat2 = rewriteArrayUpdate(stat1) - // statements coming from the original class need retyping in the current context - debuglog("retyping " + stat2) - - val d = new specializeTypes.Duplicator(Map[Symbol, Type]()) - d.retyped(localTyper.context1.asInstanceOf[d.Context], - stat2, - genericClazz, - clazz, - Map.empty) - } else - stat1 } -// if (specBuf.nonEmpty) -// println("residual specialized constructor statements: " + specBuf) } /* Add an 'if' around the statements coming after the super constructor. This @@ -759,18 +763,20 @@ abstract class Constructors extends Statics with Transform with TypingTransforme } else (Nil, remainingConstrStats) + val specializedStats = guardSpecializedInitializer(remainingConstrStatsDelayedInit) val fence = if (needFenceForDelayedInit || clazz.primaryConstructor.hasAttachment[ConstructorNeedsFence.type]) { val tree = localTyper.typedPos(clazz.primaryConstructor.pos)(gen.mkMethodCall(RuntimeStaticsModule, nme.releaseFence, Nil)) tree :: Nil } else Nil // Assemble final constructor - val primaryConstructor = deriveDefDef(primaryConstr)(_ => { + val primaryConstructor = deriveDefDef(primaryConstr) { _ => treeCopy.Block( primaryConstrBody, - paramInits ::: constructorPrefix ::: uptoSuperStats ::: guardSpecializedInitializer(remainingConstrStatsDelayedInit) ::: fence, - primaryConstrBody.expr) - }) + paramInits ::: constructorPrefix ::: uptoSuperStats ::: specializedStats ::: fence, + primaryConstrBody.expr + ) + } if ((exitingPickler(clazz.isAnonymousClass) || clazz.originalOwner.isTerm) && omittableAccessor.exists(_.isOuterField) && !constructorStats.exists(_.exists { case i: Ident if i.symbol.isOuterParam => true; case _ => false})) primaryConstructor.symbol.updateAttachment(OuterArgCanBeElided) diff --git a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala index 8679414ef129..8c2369eb165c 100644 --- a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala +++ b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala @@ -1480,7 +1480,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { * in order to be accessible from specialized subclasses. */ override def transform(tree: Tree): Tree = tree match { - case Select(qual, name) => + case Select(_, _) => val sym = tree.symbol if (sym.isPrivate) debuglog( "seeing private member %s, currentClass: %s, owner: %s, isAccessible: %b, isLocalName: %b".format( From 489c5c90cc8d1e60bcaf97c9b449ccc59b88c5c0 Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Mon, 16 Aug 2021 17:43:38 +0200 Subject: [PATCH 344/769] Mark constructors that need a releseFence in specialize --- .../tools/nsc/transform/Constructors.scala | 4 +- .../tools/nsc/transform/SpecializeTypes.scala | 223 +++++++++--------- .../tools/nsc/backend/jvm/BytecodeTest.scala | 22 ++ 3 files changed, 140 insertions(+), 109 deletions(-) diff --git a/src/compiler/scala/tools/nsc/transform/Constructors.scala b/src/compiler/scala/tools/nsc/transform/Constructors.scala index 7fc7919efa41..d24618ce507f 100644 --- a/src/compiler/scala/tools/nsc/transform/Constructors.scala +++ b/src/compiler/scala/tools/nsc/transform/Constructors.scala @@ -369,10 +369,8 @@ abstract class Constructors extends Statics with Transform with TypingTransforme (new specializeTypes.ImplementationAdapter(ctorParams(genericClazz), primaryConstrParams, null, true))(stat) // also make assigned fields mutable so they don't end up final in bytecode // and mark the specialized class constructor for a release fence addition - if (assignee.isField) { + if (assignee.isField) assignee.setFlag(MUTABLE) - clazz.primaryConstructor.updateAttachment(ConstructorNeedsFence) - } val rewritten = rewriteArrayUpdate(stat) // statements coming from the original class need retyping in the current context diff --git a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala index 8c2369eb165c..20f3b8c59486 100644 --- a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala +++ b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala @@ -748,124 +748,135 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { sym.isSetter && sym.getterIn(sym.owner).isStable && (sym.hasFlag(SYNTHESIZE_IMPL_IN_SUBCLASS) || isTraitValSetter(sym.nextOverriddenSymbol)) - for (m <- normMembers if needsSpecialization(fullEnv, m) && satisfiable(fullEnv)) { - if (!m.isDeferred) - addConcreteSpecMethod(m) - // specialized members have to be overridable. - if (m.isPrivate) - m.resetFlag(PRIVATE).setFlag(PROTECTED) - - if (m.isConstructor) { - val specCtor = enterMember(cloneInSpecializedClass(m, x => x)) - info(specCtor) = Forward(m) - } - else if (isNormalizedMember(m)) { // methods added by normalization - val NormalizedMember(original) = info(m): @unchecked - if (nonConflicting(env ++ typeEnv(m))) { - if (info(m).degenerate) { - debuglog("degenerate normalized member " + m.defString) - val specMember = enterMember(cloneInSpecializedClass(m, _ & ~DEFERRED)) - - info(specMember) = Implementation(original) - typeEnv(specMember) = env ++ typeEnv(m) - } else { - val om = forwardToOverload(m) - debuglog("normalizedMember " + m + " om: " + om + " " + pp(typeEnv(om))) - } + for (m <- normMembers) { + if (!needsSpecialization(fullEnv, m)) { + if (m.isValue && !m.isMutable && !m.isMethod && !m.isDeferred && !m.isLazy) { + // non-specialized `val` fields are made mutable (in Constructors) and assigned from the + // constructors of specialized subclasses. See PR scala/scala#9704. + clazz.primaryConstructor.updateAttachment(ConstructorNeedsFence) + sClass.primaryConstructor.updateAttachment(ConstructorNeedsFence) } - else - debuglog("conflicting env for " + m + " env: " + env) - } - else if (m.isDeferred && m.isSpecialized) { // abstract methods - val specMember = enterMember(cloneInSpecializedClass(m, _ | DEFERRED)) - // debuglog("deferred " + specMember.fullName + " remains abstract") - - info(specMember) = Abstract(specMember) - // was: new Forward(specMember) { - // override def target = m.owner.info.member(specializedName(m, env)) - // } - } else if (m.hasFlag(SUPERACCESSOR)) { // basically same as abstract case - // we don't emit a specialized overload for the super accessor because we can't jump back and forth - // between specialized and non-specialized methods during an invokespecial for the super call, - // so, we must jump immediately into the non-specialized world to find our super - val specMember = enterMember(cloneInSpecializedClass(m, f => f)) - - // rebindSuper in mixins knows how to rejigger this - // (basically it skips this specialized class in the base class seq, and then also never rebinds to a specialized method) - specMember.asInstanceOf[TermSymbol].referenced = m.alias - - info(specMember) = SpecialSuperAccessor(specMember) - } else if (m.isMethod && !m.isDeferred && (!m.isAccessor || m.isLazy || isTraitValSetter(m))) { // other concrete methods - forwardToOverload(m) - } else if (m.isValue && !m.isMethod) { // concrete value definition - def mkAccessor(field: Symbol, name: Name) = { - val newFlags = (SPECIALIZED | m.getterIn(clazz).flags) & ~(LOCAL | CASEACCESSOR | PARAMACCESSOR) - // we rely on the super class to initialize param accessors - val sym = sClass.newMethod(name.toTermName, field.pos, newFlags) - info(sym) = SpecializedAccessor(field) - sym + } else if (satisfiable(fullEnv)) { + if (!m.isDeferred) + addConcreteSpecMethod(m) + // specialized members have to be overridable. + if (m.isPrivate) + m.resetFlag(PRIVATE).setFlag(PROTECTED) + + if (m.isConstructor) { + val specCtor = enterMember(cloneInSpecializedClass(m, x => x)) + info(specCtor) = Forward(m) } - def overrideIn(clazz: Symbol, sym: Symbol) = { - val newFlags = (sym.flags | OVERRIDE | SPECIALIZED) & ~(DEFERRED | CASEACCESSOR | PARAMACCESSOR) - val sym1 = sym.cloneSymbol(clazz, newFlags) - sym1.modifyInfo(_.asSeenFrom(clazz.tpe, sym1.owner)) + else if (isNormalizedMember(m)) { // methods added by normalization + val NormalizedMember(original) = info(m): @unchecked + if (nonConflicting(env ++ typeEnv(m))) { + if (info(m).degenerate) { + debuglog("degenerate normalized member " + m.defString) + val specMember = enterMember(cloneInSpecializedClass(m, _ & ~DEFERRED)) + + info(specMember) = Implementation(original) + typeEnv(specMember) = env ++ typeEnv(m) + } else { + val om = forwardToOverload(m) + debuglog("normalizedMember " + m + " om: " + om + " " + pp(typeEnv(om))) + } + } + else + debuglog("conflicting env for " + m + " env: " + env) } - val specVal = specializedOverload(sClass, m, env) + else if (m.isDeferred && m.isSpecialized) { // abstract methods + val specMember = enterMember(cloneInSpecializedClass(m, _ | DEFERRED)) + // debuglog("deferred " + specMember.fullName + " remains abstract") + + info(specMember) = Abstract(specMember) + // was: new Forward(specMember) { + // override def target = m.owner.info.member(specializedName(m, env)) + // } + } else if (m.hasFlag(SUPERACCESSOR)) { // basically same as abstract case + // we don't emit a specialized overload for the super accessor because we can't jump back and forth + // between specialized and non-specialized methods during an invokespecial for the super call, + // so, we must jump immediately into the non-specialized world to find our super + val specMember = enterMember(cloneInSpecializedClass(m, f => f)) + + // rebindSuper in mixins knows how to rejigger this + // (basically it skips this specialized class in the base class seq, and then also never rebinds to a specialized method) + specMember.asInstanceOf[TermSymbol].referenced = m.alias + + info(specMember) = SpecialSuperAccessor(specMember) + } else if (m.isMethod && !m.isDeferred && (!m.isAccessor || m.isLazy || isTraitValSetter(m))) { // other concrete methods + forwardToOverload(m) + } else if (m.isValue && !m.isMethod) { // concrete value definition + def mkAccessor(field: Symbol, name: Name) = { + val newFlags = (SPECIALIZED | m.getterIn(clazz).flags) & ~(LOCAL | CASEACCESSOR | PARAMACCESSOR) + // we rely on the super class to initialize param accessors + val sym = sClass.newMethod(name.toTermName, field.pos, newFlags) + info(sym) = SpecializedAccessor(field) + sym + } - addConcreteSpecMethod(m) - specVal.asInstanceOf[TermSymbol].setAlias(m) + def overrideIn(clazz: Symbol, sym: Symbol) = { + val newFlags = (sym.flags | OVERRIDE | SPECIALIZED) & ~(DEFERRED | CASEACCESSOR | PARAMACCESSOR) + val sym1 = sym.cloneSymbol(clazz, newFlags) + sym1.modifyInfo(_.asSeenFrom(clazz.tpe, sym1.owner)) + } - enterMember(specVal) - // create accessors + val specVal = specializedOverload(sClass, m, env) - if (m.isLazy) { - // no getters needed (we'll specialize the compute method and accessor separately), can stay private - // m.setFlag(PRIVATE) -- TODO: figure out how to leave the non-specialized lazy var private - // (the implementation needs it to be visible while duplicating and retypechecking, - // but it really could be private in bytecode) - specVal.setFlag(PRIVATE) - } - else if (nme.isLocalName(m.name)) { - val specGetter = mkAccessor(specVal, specVal.getterName) setInfo MethodType(Nil, specVal.info) - val origGetter = overrideIn(sClass, m.getterIn(clazz)) - info(origGetter) = Forward(specGetter) - enterMember(specGetter) - enterMember(origGetter) - debuglog("specialize accessor in %s: %s -> %s".format(sClass.name.decode, origGetter.name.decode, specGetter.name.decode)) - - clazz.caseFieldAccessors.find(_.name.startsWith(m.name)) foreach { cfa => - val cfaGetter = overrideIn(sClass, cfa) - info(cfaGetter) = SpecializedAccessor(specVal) - enterMember(cfaGetter) - debuglog("override case field accessor %s -> %s".format(m.name.decode, cfaGetter.name.decode)) + addConcreteSpecMethod(m) + specVal.asInstanceOf[TermSymbol].setAlias(m) + + enterMember(specVal) + // create accessors + + if (m.isLazy) { + // no getters needed (we'll specialize the compute method and accessor separately), can stay private + // m.setFlag(PRIVATE) -- TODO: figure out how to leave the non-specialized lazy var private + // (the implementation needs it to be visible while duplicating and retypechecking, + // but it really could be private in bytecode) + specVal.setFlag(PRIVATE) } + else if (nme.isLocalName(m.name)) { + val specGetter = mkAccessor(specVal, specVal.getterName) setInfo MethodType(Nil, specVal.info) + val origGetter = overrideIn(sClass, m.getterIn(clazz)) + info(origGetter) = Forward(specGetter) + enterMember(specGetter) + enterMember(origGetter) + debuglog("specialize accessor in %s: %s -> %s".format(sClass.name.decode, origGetter.name.decode, specGetter.name.decode)) + + clazz.caseFieldAccessors.find(_.name.startsWith(m.name)) foreach { cfa => + val cfaGetter = overrideIn(sClass, cfa) + info(cfaGetter) = SpecializedAccessor(specVal) + enterMember(cfaGetter) + debuglog("override case field accessor %s -> %s".format(m.name.decode, cfaGetter.name.decode)) + } - if (specVal.isVariable && m.setterIn(clazz) != NoSymbol) { - val specSetter = mkAccessor(specVal, specGetter.setterName) - .resetFlag(STABLE) - specSetter.setInfo(MethodType(specSetter.newSyntheticValueParams(List(specVal.info)), - UnitTpe)) - val origSetter = overrideIn(sClass, m.setterIn(clazz)) - info(origSetter) = Forward(specSetter) - enterMember(specSetter) - enterMember(origSetter) + if (specVal.isVariable && m.setterIn(clazz) != NoSymbol) { + val specSetter = mkAccessor(specVal, specGetter.setterName) + .resetFlag(STABLE) + specSetter.setInfo(MethodType(specSetter.newSyntheticValueParams(List(specVal.info)), + UnitTpe)) + val origSetter = overrideIn(sClass, m.setterIn(clazz)) + info(origSetter) = Forward(specSetter) + enterMember(specSetter) + enterMember(origSetter) + } + } + else { // if there are no accessors, specialized methods will need to access this field in specialized subclasses + m.resetFlag(PRIVATE) + specVal.resetFlag(PRIVATE) + debuglog("no accessors for %s/%s, specialized methods must access field in subclass".format( + m.name.decode, specVal.name.decode)) } } - else { // if there are no accessors, specialized methods will need to access this field in specialized subclasses - m.resetFlag(PRIVATE) - specVal.resetFlag(PRIVATE) - debuglog("no accessors for %s/%s, specialized methods must access field in subclass".format( - m.name.decode, specVal.name.decode)) + else if (m.isClass) { + val specClass: Symbol = cloneInSpecializedClass(m, x => x) + typeEnv(specClass) = fullEnv + specClass setName specializedName(specClass, fullEnv).toTypeName + enterMember(specClass) + debuglog("entered specialized class " + specClass.fullName) + info(specClass) = SpecializedInnerClass(m, fullEnv) } } - else if (m.isClass) { - val specClass: Symbol = cloneInSpecializedClass(m, x => x) - typeEnv(specClass) = fullEnv - specClass setName specializedName(specClass, fullEnv).toTypeName - enterMember(specClass) - debuglog("entered specialized class " + specClass.fullName) - info(specClass) = SpecializedInnerClass(m, fullEnv) - } } sClass } diff --git a/test/junit/scala/tools/nsc/backend/jvm/BytecodeTest.scala b/test/junit/scala/tools/nsc/backend/jvm/BytecodeTest.scala index 4bc7e2035e2e..8e5cdd220c56 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/BytecodeTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/BytecodeTest.scala @@ -373,4 +373,26 @@ class BytecodeTest extends BytecodeTesting { t(foo, List(("Ljava/lang/String;", "value", 0))) t(abcde, List(("Ljava/lang/String;", "value1", 0), ("J", "value2", 1), ("D", "value3", 3), ("I", "value4", 5), ("D", "value5", 6))) } + + @Test + def nonSpecializedValFence(): Unit = { + def code(u1: String) = + s"""abstract class Speck[@specialized(Int) T](t: T) { + | val a = t + | $u1 + | lazy val u2 = "?" + | var u3 = "?" + | val u4: String + | var u5: String + |} + |""".stripMargin + + for (u1 <- "" :: List("", "private", "private[this]", "protected").map(mod => s"$mod val u1 = \"?\"")) { + for (c <- compileClasses(code(u1)).map(getMethod(_, ""))) + if (u1.isEmpty) + assertDoesNotInvoke(c, "releaseFence") + else + assertInvoke(c, "scala/runtime/Statics", "releaseFence") + } + } } From 31f4f77880bc76351bae55c402f384a457428499 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Tue, 17 Aug 2021 16:30:19 +1000 Subject: [PATCH 345/769] Fix ArraySeq/Vector array sharing optimization in Vector.from --- src/library/scala/collection/immutable/Vector.scala | 2 +- test/junit/scala/collection/immutable/VectorTest.scala | 8 ++++++++ 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/src/library/scala/collection/immutable/Vector.scala b/src/library/scala/collection/immutable/Vector.scala index cc91b68902d7..9f76576f2dba 100644 --- a/src/library/scala/collection/immutable/Vector.scala +++ b/src/library/scala/collection/immutable/Vector.scala @@ -43,7 +43,7 @@ object Vector extends StrictOptimizedSeqFactory[Vector] { if (knownSize == 0) empty[E] else if (knownSize > 0 && knownSize <= WIDTH) { val a1: Arr1 = it match { - case as: ArraySeq.ofRef[_] if as.elemTag == classOf[AnyRef] => + case as: ArraySeq.ofRef[_] if as.elemTag.runtimeClass == classOf[AnyRef] => as.unsafeArray.asInstanceOf[Arr1] case it: Iterable[E] => val a1 = new Arr1(knownSize) diff --git a/test/junit/scala/collection/immutable/VectorTest.scala b/test/junit/scala/collection/immutable/VectorTest.scala index f6ae171d70fd..685100f4c821 100644 --- a/test/junit/scala/collection/immutable/VectorTest.scala +++ b/test/junit/scala/collection/immutable/VectorTest.scala @@ -58,6 +58,14 @@ class VectorTest { assertSame(m, Vector.apply(m: _*)) } + @Test def factoryReuseArraySet(): Unit = { + val arraySeq = ArraySeq[AnyRef]("a", "b") + val vectorFromArraySeq = Vector.from(arraySeq) + val prefix1Field = classOf[Vector[_]].getDeclaredField("prefix1") + prefix1Field.setAccessible(true) + assertSame(arraySeq.unsafeArray, prefix1Field.get(vectorFromArraySeq)) + } + @Test def checkSearch(): Unit = SeqTests.checkSearch(Vector(0 to 1000: _*), 15, implicitly[Ordering[Int]]) @Test From 2c3e8b627f47bc219dc8adbbfca0925c9e4b76a7 Mon Sep 17 00:00:00 2001 From: Guillaume Martres Date: Tue, 17 Aug 2021 17:05:21 +0200 Subject: [PATCH 346/769] Only deprecate using a Scala 3 keyword as an identifier for definitions See discussion in #9722. --- .../scala/tools/nsc/ast/parser/Parsers.scala | 13 +++++++++++-- test/files/neg/scala3-keywords.check | 12 ++++++------ test/files/neg/scala3-keywords.scala | 4 +++- 3 files changed, 20 insertions(+), 9 deletions(-) diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala index 796b906142ac..06795a6fa808 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala @@ -714,6 +714,10 @@ self => if (isRawIdent && in.name == raw.QMARK) deprecationWarning(in.offset, "using `?` as a type name will require backticks in the future.", "2.13.6") + def checkKeywordDefinition() = + if (isRawIdent && scala3Keywords.contains(in.name)) + deprecationWarning(in.offset, + s"Wrap `${in.name}` in backticks to use it as an identifier, it will become a keyword in Scala 3.", "2.13.7") def isIdent = in.token == IDENTIFIER || in.token == BACKQUOTED_IDENT def isMacro = in.token == IDENTIFIER && in.name == nme.MACROkw @@ -1277,8 +1281,6 @@ self => def ident(skipIt: Boolean): Name = ( if (isIdent) { val name = in.name.encode - if (in.token != BACKQUOTED_IDENT && scala3Keywords.contains(name)) - deprecationWarning(in.offset, s"Wrap `$name` in backticks to use it as an identifier, it will become a keyword in Scala 3.", "2.13.7") in.nextToken() name } @@ -2520,6 +2522,7 @@ self => if (caseParam) mods |= Flags.CASEACCESSOR } val nameOffset = in.offset + checkKeywordDefinition() val name = ident() var bynamemod = 0L val tpt = { @@ -2569,6 +2572,7 @@ self => } val nameOffset = in.offset checkQMarkDefinition() + checkKeywordDefinition() // TODO AM: freshTermName(o2p(in.skipToken()), "_$$"), will need to update test suite val pname: TypeName = wildcardOrIdent().toTypeName val param = atPos(start, nameOffset) { @@ -2784,6 +2788,7 @@ self => def patDefOrDcl(pos : Int, mods: Modifiers): List[Tree] = { var newmods = mods in.nextToken() + checkKeywordDefinition() val lhs = commaSeparated(stripParens(noSeq.pattern2())) val tp = typedOpt() val (rhs, rhsPos) = @@ -2879,6 +2884,7 @@ self => } else { val nameOffset = in.offset + checkKeywordDefinition() val name = identOrMacro() funDefRest(start, nameOffset, mods, name) } @@ -2990,6 +2996,7 @@ self => in.nextToken() newLinesOpt() atPos(start, in.offset) { + checkKeywordDefinition() val name = identForType() // @M! a type alias as well as an abstract type may declare type parameters val tparams = typeParamClauseOpt(name, null) @@ -3051,6 +3058,7 @@ self => */ def classDef(start: Offset, mods: Modifiers): ClassDef = { in.nextToken() + checkKeywordDefinition() val nameOffset = in.offset val name = identForType() atPos(start, if (name == tpnme.ERROR) start else nameOffset) { @@ -3086,6 +3094,7 @@ self => def objectDef(start: Offset, mods: Modifiers, isPackageObject: Boolean = false): ModuleDef = { in.nextToken() val nameOffset = in.offset + checkKeywordDefinition() val name = ident() val tstart = in.offset atPos(start, if (name == nme.ERROR) start else nameOffset) { diff --git a/test/files/neg/scala3-keywords.check b/test/files/neg/scala3-keywords.check index d4b12b623977..7f3a2155509a 100644 --- a/test/files/neg/scala3-keywords.check +++ b/test/files/neg/scala3-keywords.check @@ -1,19 +1,19 @@ -scala3-keywords.scala:13: warning: Wrap `enum` in backticks to use it as an identifier, it will become a keyword in Scala 3. +scala3-keywords.scala:15: warning: Wrap `enum` in backticks to use it as an identifier, it will become a keyword in Scala 3. val enum: Int = 1 // error ^ -scala3-keywords.scala:14: warning: Wrap `export` in backticks to use it as an identifier, it will become a keyword in Scala 3. +scala3-keywords.scala:16: warning: Wrap `export` in backticks to use it as an identifier, it will become a keyword in Scala 3. val export: Int = 1 // error ^ -scala3-keywords.scala:15: warning: Wrap `given` in backticks to use it as an identifier, it will become a keyword in Scala 3. +scala3-keywords.scala:17: warning: Wrap `given` in backticks to use it as an identifier, it will become a keyword in Scala 3. val given: Int = 1 // error ^ -scala3-keywords.scala:16: warning: Wrap `given` in backticks to use it as an identifier, it will become a keyword in Scala 3. +scala3-keywords.scala:18: warning: Wrap `given` in backticks to use it as an identifier, it will become a keyword in Scala 3. def foo(given: Int) = {} // error ^ -scala3-keywords.scala:17: warning: Wrap `export` in backticks to use it as an identifier, it will become a keyword in Scala 3. +scala3-keywords.scala:19: warning: Wrap `export` in backticks to use it as an identifier, it will become a keyword in Scala 3. def bla[export <: Int] = {} // error ^ -scala3-keywords.scala:19: warning: Wrap `enum` in backticks to use it as an identifier, it will become a keyword in Scala 3. +scala3-keywords.scala:21: warning: Wrap `enum` in backticks to use it as an identifier, it will become a keyword in Scala 3. class enum // error ^ error: No warnings can be incurred under -Werror. diff --git a/test/files/neg/scala3-keywords.scala b/test/files/neg/scala3-keywords.scala index d3be6d148541..23fbce36dc4c 100644 --- a/test/files/neg/scala3-keywords.scala +++ b/test/files/neg/scala3-keywords.scala @@ -2,11 +2,13 @@ // class A { val `enum`: Int = 1 + println(enum) val `export`: Int = 1 val `given`: Int = 1 - def foo(`given`: Int) = {} + def foo(`given`: Int) = given def bla[`export` <: Int] = { class `enum` + new enum } } class B { From 63893beb66b29fd90648f5751c647d1cc283c2f6 Mon Sep 17 00:00:00 2001 From: Seth Tisue Date: Tue, 17 Aug 2021 19:27:44 -0700 Subject: [PATCH 347/769] remove Whitesource Lightbend no longer uses it, and no replacement has yet been chosen --- build.sbt | 12 ++---------- project/plugins.sbt | 2 -- src/intellij/scala.ipr.SAMPLE | 8 -------- 3 files changed, 2 insertions(+), 20 deletions(-) diff --git a/build.sbt b/build.sbt index 4bded6e96dbb..6b5ada594743 100644 --- a/build.sbt +++ b/build.sbt @@ -299,7 +299,6 @@ val disablePublishing = Seq[Setting[_]]( // The above is enough for Maven repos but it doesn't prevent publishing of ivy.xml files publish := {}, publishLocal := {}, - whitesourceIgnore := true ) lazy val setJarLocation: Setting[_] = @@ -429,10 +428,8 @@ lazy val compiler = configureAsSubproject(project) name := "scala-compiler", description := "Scala Compiler", libraryDependencies ++= Seq(antDep, asmDep), - // These are only needed for the POM. (And, note that the jansi dependency is a fiction - // for WhiteSource purposes; the JLine JAR contains a shaded jansi, but WhiteSource - // won't know about that unless we tell it.) - libraryDependencies ++= Seq(scalaXmlDep, jlineDep % "optional", jansiDep % "optional"), + // These are only needed for the POM. + libraryDependencies ++= Seq(scalaXmlDep, jlineDep % "optional"), buildCharacterPropertiesFile := (Compile / resourceManaged).value / "scala-buildcharacter.properties", (Compile / resourceGenerators) += generateBuildCharacterPropertiesFile.map(file => Seq(file)).taskValue, // this a way to make sure that classes from interactive and scaladoc projects @@ -1334,11 +1331,6 @@ def findJar(files: Seq[Attributed[File]], dep: ModuleID): Option[Attributed[File files.find(_.get(moduleID.key).map(extract _) == Some(extract(dep))) } -// WhiteSource -whitesourceProduct := "Lightbend Reactive Platform" -whitesourceAggregateProjectName := "scala-2.12-stable" -whitesourceIgnoredScopes := Vector("test", "scala-tool") - Global / excludeLintKeys := (Global / excludeLintKeys).value ++ Set(scalaSource, javaSource, resourceDirectory) { diff --git a/project/plugins.sbt b/project/plugins.sbt index 2fc7b95e8495..77018c1b4bba 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -33,5 +33,3 @@ concurrentRestrictions in Global := Seq( addSbtPlugin("de.heikoseeberger" % "sbt-header" % "5.0.0") addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.4.2") - -addSbtPlugin("com.lightbend" % "sbt-whitesource" % "0.1.16") diff --git a/src/intellij/scala.ipr.SAMPLE b/src/intellij/scala.ipr.SAMPLE index 673c7eec2349..c05b1fab7184 100644 --- a/src/intellij/scala.ipr.SAMPLE +++ b/src/intellij/scala.ipr.SAMPLE @@ -482,18 +482,10 @@ - - - - - - - - From d8515fae1fb075b066715779282ce5ad0304352e Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Wed, 18 Aug 2021 18:27:20 +0200 Subject: [PATCH 348/769] fix scala/bug#12399: add tests --- test/tasty/run/src-2/a/Main.scala | 11 +++++++++ .../src-2/tastytest/TestOpaquesPackage.scala | 24 +++++++++++++++++++ .../run/src-3/tastytest/opaques/package.scala | 7 ++++++ 3 files changed, 42 insertions(+) create mode 100644 test/tasty/run/src-2/a/Main.scala create mode 100644 test/tasty/run/src-2/tastytest/TestOpaquesPackage.scala create mode 100644 test/tasty/run/src-3/tastytest/opaques/package.scala diff --git a/test/tasty/run/src-2/a/Main.scala b/test/tasty/run/src-2/a/Main.scala new file mode 100644 index 000000000000..d86cf6564d57 --- /dev/null +++ b/test/tasty/run/src-2/a/Main.scala @@ -0,0 +1,11 @@ +package a + +import tastytest.opaques.Offset + +final case class A(off: Offset) + +object Main { + def foo(): Unit = { + assert(A(Offset(10)).off == Offset(10)) + } +} diff --git a/test/tasty/run/src-2/tastytest/TestOpaquesPackage.scala b/test/tasty/run/src-2/tastytest/TestOpaquesPackage.scala new file mode 100644 index 000000000000..f5a7d10c58a7 --- /dev/null +++ b/test/tasty/run/src-2/tastytest/TestOpaquesPackage.scala @@ -0,0 +1,24 @@ +package tastytest.opaques { + import tastytest.opaques.Offset + import a.A + + class Test1 { + import tastytest._ + + def test(): Unit = { + assert(A(Offset(10)) === A(Offset(10))) + } + + } +} + + +package tastytest { + + object TestOpaquesPackage extends Suite("TestOpaquesPackage") { + + test(new opaques.Test1().test()) + test(a.Main.foo()) + + } +} diff --git a/test/tasty/run/src-3/tastytest/opaques/package.scala b/test/tasty/run/src-3/tastytest/opaques/package.scala new file mode 100644 index 000000000000..9a2866892e1b --- /dev/null +++ b/test/tasty/run/src-3/tastytest/opaques/package.scala @@ -0,0 +1,7 @@ +package tastytest + +package object opaques { + opaque type Offset = Long + object Offset: + def apply(o: Long): Offset = o +} From 32539b2ae3339d6653caa26712b04406c12484d8 Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Wed, 18 Aug 2021 17:12:12 +0200 Subject: [PATCH 349/769] fix scala/bug#12409: tasty - fix case class apply default params --- .../tools/nsc/typechecker/NamesDefaults.scala | 30 +++++++++++++++++-- .../tastytest/TestCaseClassDefault.scala | 14 +++++++++ .../src-3/tastytest/CaseClassDefault.scala | 16 ++++++++++ 3 files changed, 57 insertions(+), 3 deletions(-) create mode 100644 test/tasty/run/src-2/tastytest/TestCaseClassDefault.scala create mode 100644 test/tasty/run/src-3/tastytest/CaseClassDefault.scala diff --git a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala index 67a7107ac084..0e169c0d80b9 100644 --- a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala +++ b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala @@ -476,9 +476,33 @@ trait NamesDefaults { self: Analyzer => def defaultGetter(param: Symbol, context: Context): Symbol = { val i = param.owner.paramss.flatten.indexWhere(p => p.name == param.name) + 1 if (i > 0) { - val defGetterName = nme.defaultGetterName(param.owner.name, i) - if (param.owner.isConstructor) { - val mod = companionSymbolOf(param.owner.owner, context) + + def isScala3SyntheticApply(meth: Symbol): Boolean = { + // According to rules in Scala 3, a synthetic method named `apply` + // should use `` as the prefix of its default getters, + // i.e. reuse the constructor's default getters. + // We add some more precision - also verify that `apply` + // is defined in a module which has a case class companion + + def isModuleWithCaseClassCompanion(owner: Symbol) = ( + owner.isModuleClass + && linkedClassOfClassOf(owner, context).isCaseClass + ) + + (meth.isScala3Defined + && meth.isSynthetic + && meth.name == nme.apply + && isModuleWithCaseClassCompanion(meth.owner)) + } + + val scala3SynthApply = isScala3SyntheticApply(param.owner) + val defGetterName = { + val methodName = if (scala3SynthApply) nme.CONSTRUCTOR else param.owner.name + nme.defaultGetterName(methodName, i) + } + if (scala3SynthApply || param.owner.isConstructor) { + val scope = param.owner.owner + val mod = if (scala3SynthApply) scope else companionSymbolOf(scope, context) mod.info.member(defGetterName) } else { diff --git a/test/tasty/run/src-2/tastytest/TestCaseClassDefault.scala b/test/tasty/run/src-2/tastytest/TestCaseClassDefault.scala new file mode 100644 index 000000000000..21a924142eae --- /dev/null +++ b/test/tasty/run/src-2/tastytest/TestCaseClassDefault.scala @@ -0,0 +1,14 @@ +package tastytest + +object TestCaseClassDefault extends Suite("TestCaseClassDefault") { + + test(assert(CaseClassDefault.apply().value === 23)) + + test { + val i = new CaseClassDefault.Inner() + assert(i.Local.apply().value === 47) + } + + test(assert(CaseClassDefault.FakeCaseClass.apply().value === 97)) + +} diff --git a/test/tasty/run/src-3/tastytest/CaseClassDefault.scala b/test/tasty/run/src-3/tastytest/CaseClassDefault.scala new file mode 100644 index 000000000000..4e08c03851f1 --- /dev/null +++ b/test/tasty/run/src-3/tastytest/CaseClassDefault.scala @@ -0,0 +1,16 @@ +package tastytest + +case class CaseClassDefault(value: Int = 23) + +object CaseClassDefault { + + class Inner { + case class Local(value: Int = 47) + } + + class FakeCaseClass(val value: Int = 47) + object FakeCaseClass { + def apply(value: Int = 97): FakeCaseClass = new FakeCaseClass(value) + } + +} From 9f3f4c33d9c8f781a98e81131023da03bee89b11 Mon Sep 17 00:00:00 2001 From: Seth Tisue Date: Wed, 11 Aug 2021 18:29:39 -0700 Subject: [PATCH 350/769] Update and reorganize benchmarks readme to be less confusing We have exactly one custom runner, so the emphasis that the old structure put on custom runners was misleading. It's better to foreground the normal case. --- test/benchmarks/README.md | 77 ++++++++++++++++++++++++--------------- 1 file changed, 47 insertions(+), 30 deletions(-) diff --git a/test/benchmarks/README.md b/test/benchmarks/README.md index 45f8e142be9e..71d0462889d4 100644 --- a/test/benchmarks/README.md +++ b/test/benchmarks/README.md @@ -3,50 +3,62 @@ This directory is used by the `bench` subproject of the Scala sbt build. It makes use of the [sbt plugin](https://github.com/ktoso/sbt-jmh) for [JMH](https://openjdk.java.net/projects/code-tools/jmh/). -## Running a benchmark +## About the benchmarks -Benchmarks are built with the bootstrap compiler ("starr") using the library built from the `library` project ("quick"). -If you want to test compiler changes you need to bootstrap with the new compiler. +Benchmarks are built with the reference compiler ("starr") using the library built from the `library` project ("quick"). +If you want to test compiler changes you need to bootstrap a new compiler. -You'll then need to know the fully-qualified name of the benchmark runner class. -The benchmarking classes are organized under `src/main/scala`, +The benchmarking classes are organized under `test/benchmarks/src/main/scala`, in the same package hierarchy as the classes that they test. -Assuming that we're benchmarking `scala.collection.mutable.OpenHashMap`, -the benchmark runner would likely be named `scala.collection.mutable.OpenHashMapRunner`. -Using this example, one would simply run - bench/jmh:runMain scala.collection.mutable.OpenHashMapRunner +The benchmarking classes use the same package hierarchy as the classes that they test +in order to make it easy to expose members of the class under test in package-private scope, +should that be necessary for benchmarking. -in the Scala sbt build. +There are two types of classes in the source directory: +those suffixed `Benchmark`, and a few that are suffixed `Runner`. +(The latter are described below, under "Custom runners".) -The JMH results can be found under `../../target/jmh-results/` (i.e. the main Scala build's `target`, -not the one that contains the benchmark class files). `jmh-results` gets deleted on an sbt `bench/clean`, -so you should copy these files out of `target` if you wish to preserve them. +## Running a normal benchmark -## Creating a benchmark and runner +Use `bench/Jmh/run` and provide the fully qualified name of the benchmark +class: -The benchmarking classes use the same package hierarchy as the classes that they test -in order to make it easy to expose, in package scope, members of the class under test, -should that be necessary for benchmarking. + bench/Jmh/run scala.collection.mutable.ListBufferBenchmark -There are two types of classes in the source directory: -those suffixed `Benchmark` and those suffixed `Runner`. -The former are benchmarks that can be run directly using `bench/jmh:run`; -however, they are normally run from a corresponding class of the latter type, -which is run using `bench/jmh:runMain` (as described above). -This …`Runner` class is useful for setting appropriate JMH command options, +Results are printed to standard output. + +## Custom runners + +Some benchmarks have custom runners. A custom runner +can be useful for setting appropriate JMH command options, and for processing the JMH results into files that can be read by other tools, such as Gnuplot. -The `benchmark.JmhRunner` trait should be woven into any runner class, for the standard behavior that it provides. +Assuming that we're benchmarking `scala.collection.mutable.OpenHashMap`, +the custom runner (if there is one) would likely be named +`scala.collection.mutable.OpenHashMapRunner`. +Using this example, one would run + + bench/Jmh/runMain scala.collection.mutable.OpenHashMapRunner + +in the Scala sbt build. + +Custom runner results are written to `../../target/jmh-results/` (i.e. the main Scala build's `target`, +not the one that contains the benchmark class files). `jmh-results` gets deleted on an sbt `bench/clean`, +so you should copy these files out of `target` if you wish to preserve them. + +If you want to make your own custom runner, extend the `benchmark.JmhRunner` trait, for the standard behavior that it provides. This includes creating output files in a subdirectory of `target/jmh-results` derived from the fully-qualified package name of the `Runner` class. ## Some useful HotSpot options -Adding these to the `jmh:run` or `jmh:runMain` command line may help if you're using the HotSpot (Oracle, OpenJDK) compiler. + +Adding these to the `Jmh/run` or `Jmh/runMain` command line may help if you're using the HotSpot (Oracle, OpenJDK) compiler. They require prefixing with `-jvmArgs`. -See [the Java documentation](https://docs.oracle.com/javase/8/docs/technotes/tools/unix/java.html) for more options. +See [the Java documentation](https://docs.oracle.com/javase/8/docs/technotes/tools/unix/java.html) for more options. ### Viewing JIT compilation events + Adding `-XX:+PrintCompilation` shows when Java methods are being compiled or deoptimized. At the most basic level, these messages will tell you whether the code that you're measuring is still being tuned, @@ -54,16 +66,20 @@ so that you know whether you're running enough warm-up iterations. See [Kris Mok's notes](https://gist.github.com/rednaxelafx/1165804#file-notes-md) to interpret the output in detail. ### Consider GC events + If you're not explicitly performing `System.gc()` calls outside of your benchmarking code, you should add the JVM option `-verbose:gc` to understand the effect that GCs may be having on your tests. ### "Diagnostic" options + These require the `-XX:+UnlockDiagnosticVMOptions` JVM option. #### Viewing inlining events + Add `-XX:+PrintInlining`. #### Viewing the disassembled code + If you're running OpenJDK or Oracle JVM, you may need to install the disassembler library (`hsdis-amd64.so` for the `amd64` architecture). In Debian, this is available in @@ -84,16 +100,16 @@ To show it for _all_ methods, add `-XX:+PrintAssembly`. ### Using JITWatch -[JITWatch](https://github.com/AdoptOpenJDK/jitwatch) is useful to understand how the JVM has JIT compiled +[JITWatch](https://github.com/AdoptOpenJDK/jitwatch) is useful to understand how the JVM has JIT-compiled code. If you install `hsdis`, as described above, machine code disassembly is also created. You can generate the `hotspot.log` file for a benchmark run by adding the [required JVM options](https://github.com/AdoptOpenJDK/jitwatch/wiki/Building-hsdis) -to JMH benchmark execution: +to JMH benchmark execution: ``` -sbt:root> bench/jmh:run scala.collection.mutable.ArrayOpsBenchmark.insertInteger -psize=1000 -f1 -jvmArgs -XX:+UnlockDiagnosticVMOptions -jvmArgs -XX:+TraceClassLoading -jvmArgs -XX:+LogCompilation -jvmArgs -XX:LogFile=target/hotspot.log -jvmArgs -XX:+PrintAssembly +sbt:root> bench/Jmh/run scala.collection.mutable.ArrayOpsBenchmark.insertInteger -psize=1000 -f1 -jvmArgs -XX:+UnlockDiagnosticVMOptions -jvmArgs -XX:+TraceClassLoading -jvmArgs -XX:+LogCompilation -jvmArgs -XX:LogFile=target/hotspot.log -jvmArgs -XX:+PrintAssembly ... [info] Loaded disassembler from /Users/jz/.jabba/jdk/1.8.172/Contents/Home/jre/lib/hsdis-amd64.dylib [info] Decoding compiled method 0x0000000113f60bd0: @@ -114,7 +130,7 @@ sbt:root> bench/jmh:run scala.collection.mutable.ArrayOpsBenchmark.insertInteger JITWatch requires configuration of the class and source path. We generate that with a custom task in our build: ``` -sbt> bench/jmh:jitwatchConfigFile +sbt> bench/Jmh/jitwatchConfigFile ... jmh ... @@ -128,6 +144,7 @@ sbt> ^C Follow instructions in the output above and start gleaning insights! ## Useful reading + * [OpenJDK advice on microbenchmarks](https://wiki.openjdk.java.net/display/HotSpot/MicroBenchmarks) * Brian Goetz's "Java theory and practice" articles: * "[Dynamic compilation and performance measurement](https://www.ibm.com/developerworks/java/library/j-jtp12214/)" From 882b1b02e24af31641ed1926e5b9bd46cc72ccd0 Mon Sep 17 00:00:00 2001 From: Seth Tisue Date: Wed, 11 Aug 2021 19:50:17 -0700 Subject: [PATCH 351/769] Add ArrayBufferBenchmark Copy-pasted from `ListBufferBenchmark`, but based on the specifics of the code changes in this PR, I added benchmarks for `addAll` and `reverseIterator`, and modified the `insertAll` benchmark so it's measuring `ArrayBuffer`-into-`ArrayBuffer` insertion (rather than `Seq`-into-`ArrayBuffer`). --- .../mutable/ArrayBufferBenchmark.scala | 97 +++++++++++++++++++ 1 file changed, 97 insertions(+) create mode 100644 test/benchmarks/src/main/scala/scala/collection/mutable/ArrayBufferBenchmark.scala diff --git a/test/benchmarks/src/main/scala/scala/collection/mutable/ArrayBufferBenchmark.scala b/test/benchmarks/src/main/scala/scala/collection/mutable/ArrayBufferBenchmark.scala new file mode 100644 index 000000000000..aafa899e3442 --- /dev/null +++ b/test/benchmarks/src/main/scala/scala/collection/mutable/ArrayBufferBenchmark.scala @@ -0,0 +1,97 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.mutable + +import java.util.concurrent.TimeUnit + +import org.openjdk.jmh.annotations._ +import org.openjdk.jmh.infra._ + +@BenchmarkMode(Array(Mode.AverageTime)) +@Fork(2) +@Threads(1) +@Warmup(iterations = 15) +@Measurement(iterations = 15) +@OutputTimeUnit(TimeUnit.NANOSECONDS) +@State(Scope.Benchmark) +class ArrayBufferBenchmark { + @Param(Array(/*"0", "1",*/ "10", "100", "1000", "10000")) + var size: Int = _ + + var ref: ArrayBuffer[Int] = _ + + @Setup(Level.Trial) def init: Unit = { + ref = new ArrayBuffer + for(i <- 0 until size) ref += i + } + + @Benchmark def filterInPlace(bh: Blackhole): Unit = { + val b = ref.clone() + b.filterInPlace(_ % 2 == 0) + bh.consume(b) + } + + @Benchmark def update(bh: Blackhole): Unit = { + val b = ref.clone() + var i = 0 + while(i < size) { + b.update(i, -1) + i += 2 + } + bh.consume(b) + } + + @Benchmark def addAll(bh: Blackhole): Unit = { + val b1 = ref.clone() + val b2 = ref.clone() + var i = 0 + b1.addAll(b2) + bh.consume(b1) + } + + @Benchmark def flatMapInPlace1(bh: Blackhole): Unit = { + val b = ref.clone() + val seq = Seq(0,0) + b.flatMapInPlace { _ => seq } + bh.consume(b) + } + + @Benchmark def iteratorA(bh: Blackhole): Unit = { + val b = ref.clone() + var n = 0 + for (x <- b.iterator) n += x + bh.consume(n) + bh.consume(b) + } + + @Benchmark def iteratorB(bh: Blackhole): Unit = { + val b = ref.clone() + bh.consume(b.iterator.toVector) + bh.consume(b) + } + + @Benchmark def reverseIteratorA(bh: Blackhole): Unit = { + val b = ref.clone() + var n = 0 + for (x <- b.reverseIterator) n += x + bh.consume(n) + bh.consume(b) + } + + @Benchmark def reverseIteratorB(bh: Blackhole): Unit = { + val b = ref.clone() + bh.consume(b.reverseIterator.toVector) + bh.consume(b) + } + +} From 7f14a79f260f6cd230174bb2e9b7db52cecadef0 Mon Sep 17 00:00:00 2001 From: NthPortal Date: Mon, 21 Sep 2020 01:22:15 -0400 Subject: [PATCH 352/769] [bug#12121] Add test for inserting an ArrayBuffer into itself --- src/library/scala/collection/mutable/ArrayBuffer.scala | 4 ++++ .../scala/collection/mutable/ArrayBufferTest.scala | 10 +++++++++- 2 files changed, 13 insertions(+), 1 deletion(-) diff --git a/src/library/scala/collection/mutable/ArrayBuffer.scala b/src/library/scala/collection/mutable/ArrayBuffer.scala index 269d564c4c37..e9c19ff11392 100644 --- a/src/library/scala/collection/mutable/ArrayBuffer.scala +++ b/src/library/scala/collection/mutable/ArrayBuffer.scala @@ -167,6 +167,10 @@ class ArrayBuffer[A] private (initialElements: Array[AnyRef], initialSize: Int) size0 = size0 + elemsLength elems match { case elems: ArrayBuffer[_] => + // if `elems eq this`, this works because `elems.array eq this.array`, + // we didn't overwrite the values being inserted after moving them in + // the previous copy a few lines up, and `System.arraycopy` will + // effectively "read" all the values before overwriting any of them. Array.copy(elems.array, 0, array, index, elemsLength) case _ => var i = 0 diff --git a/test/junit/scala/collection/mutable/ArrayBufferTest.scala b/test/junit/scala/collection/mutable/ArrayBufferTest.scala index 8f7ae6fe1fb2..fcdd04cc3875 100644 --- a/test/junit/scala/collection/mutable/ArrayBufferTest.scala +++ b/test/junit/scala/collection/mutable/ArrayBufferTest.scala @@ -4,7 +4,7 @@ import org.junit.Test import org.junit.Assert.{assertEquals, assertTrue} import scala.annotation.nowarn -import scala.tools.testkit.AssertUtil.{assertThrows, fail} +import scala.tools.testkit.AssertUtil.{assertSameElements, assertThrows, fail} import scala.tools.testkit.ReflectUtil.{getMethodAccessible, _} class ArrayBufferTest { @@ -447,4 +447,12 @@ class ArrayBufferTest { assertEquals(32, resizeDown(64, 30)) assertEquals(21, resizeDown(42, 17)) } + + // scala/bug#12121 + @Test + def insertAll_self(): Unit = { + val buf = ArrayBuffer(1, 2, 3) + buf.insertAll(1, buf) + assertSameElements(List(1, 1, 2, 3, 2, 3), buf) + } } From 3e1aad3a6310f3b305fc0c17422d2ae0392742bb Mon Sep 17 00:00:00 2001 From: NthPortal Date: Wed, 2 Sep 2020 14:52:29 -0400 Subject: [PATCH 353/769] [bug#12009] Make ArrayBuffer's iterator fail-fast Make `ArrayBuffer`'s iterator fail-fast when the buffer is mutated after the iterator's creation. --- project/MimaFilters.scala | 25 +++- src/library/scala/collection/IndexedSeq.scala | 10 +- .../scala/collection/IndexedSeqView.scala | 26 ++-- .../collection/mutable/ArrayBuffer.scala | 100 +++++++++++---- .../mutable/CheckedIndexedSeqView.scala | 117 ++++++++++++++++++ .../mutable/MutationTrackingTest.scala | 66 ++++++++-- 6 files changed, 281 insertions(+), 63 deletions(-) create mode 100644 src/library/scala/collection/mutable/CheckedIndexedSeqView.scala diff --git a/project/MimaFilters.scala b/project/MimaFilters.scala index 4b13a302e298..fa4443c37009 100644 --- a/project/MimaFilters.scala +++ b/project/MimaFilters.scala @@ -31,6 +31,29 @@ object MimaFilters extends AutoPlugin { ProblemFilters.exclude[DirectMissingMethodProblem]("scala.Predef#ArrayCharSequence.isEmpty"), ProblemFilters.exclude[DirectMissingMethodProblem]("scala.runtime.ArrayCharSequence.isEmpty"), + // #9425 Node is private[collection] + ProblemFilters.exclude[DirectMissingMethodProblem]("scala.collection.mutable.HashMap#Node.foreachEntry"), + + // Fixes for scala/bug#12009 + ProblemFilters.exclude[DirectMissingMethodProblem]("scala.collection.mutable.ArrayBufferView.this"), // private[mutable] + ProblemFilters.exclude[FinalClassProblem]("scala.collection.IndexedSeqView$IndexedSeqViewIterator"), // private[collection] + ProblemFilters.exclude[FinalClassProblem]("scala.collection.IndexedSeqView$IndexedSeqViewReverseIterator"), // private[collection] + ProblemFilters.exclude[MissingClassProblem]("scala.collection.mutable.CheckedIndexedSeqView"), // private[mutable] + ProblemFilters.exclude[MissingClassProblem]("scala.collection.mutable.CheckedIndexedSeqView$"), // private[mutable] + ProblemFilters.exclude[MissingClassProblem]("scala.collection.mutable.CheckedIndexedSeqView$CheckedIterator"), // private[mutable] + ProblemFilters.exclude[MissingClassProblem]("scala.collection.mutable.CheckedIndexedSeqView$CheckedReverseIterator"), // private[mutable] + ProblemFilters.exclude[MissingClassProblem]("scala.collection.mutable.CheckedIndexedSeqView$Id"), // private[mutable] + ProblemFilters.exclude[MissingClassProblem]("scala.collection.mutable.CheckedIndexedSeqView$Appended"), // private[mutable] + ProblemFilters.exclude[MissingClassProblem]("scala.collection.mutable.CheckedIndexedSeqView$Prepended"), // private[mutable] + ProblemFilters.exclude[MissingClassProblem]("scala.collection.mutable.CheckedIndexedSeqView$Concat"), // private[mutable] + ProblemFilters.exclude[MissingClassProblem]("scala.collection.mutable.CheckedIndexedSeqView$Take"), // private[mutable] + ProblemFilters.exclude[MissingClassProblem]("scala.collection.mutable.CheckedIndexedSeqView$TakeRight"), // private[mutable] + ProblemFilters.exclude[MissingClassProblem]("scala.collection.mutable.CheckedIndexedSeqView$Drop"), // private[mutable] + ProblemFilters.exclude[MissingClassProblem]("scala.collection.mutable.CheckedIndexedSeqView$DropRight"), // private[mutable] + ProblemFilters.exclude[MissingClassProblem](s"scala.collection.mutable.CheckedIndexedSeqView$$Map"), // private[mutable] + ProblemFilters.exclude[MissingClassProblem]("scala.collection.mutable.CheckedIndexedSeqView$Reverse"), // private[mutable] + ProblemFilters.exclude[MissingClassProblem]("scala.collection.mutable.CheckedIndexedSeqView$Slice"), // private[mutable] + // #8835 ProblemFilters.exclude[ReversedMissingMethodProblem]("scala.reflect.runtime.SynchronizedOps#SynchronizedBaseTypeSeq.scala$reflect$runtime$SynchronizedOps$SynchronizedBaseTypeSeq$$super$maxDepthOfElems"), @@ -42,7 +65,7 @@ object MimaFilters extends AutoPlugin { ProblemFilters.exclude[IncompatibleMethTypeProblem]("scala.reflect.io.FileZipArchive#LeakyEntry.this"), ProblemFilters.exclude[MissingClassProblem]("scala.reflect.io.FileZipArchive$zipFilePool$"), - ) + ) override val buildSettings = Seq( mimaFailOnNoPrevious := false, // we opt everything out, knowing we only check library/reflect diff --git a/src/library/scala/collection/IndexedSeq.scala b/src/library/scala/collection/IndexedSeq.scala index 18b66b710b07..65a30efe4030 100644 --- a/src/library/scala/collection/IndexedSeq.scala +++ b/src/library/scala/collection/IndexedSeq.scala @@ -47,15 +47,7 @@ trait IndexedSeqOps[+A, +CC[_], +C] extends Any with SeqOps[A, CC, C] { self => s.asInstanceOf[S with EfficientSplit] } - override def reverseIterator: Iterator[A] = new AbstractIterator[A] { - private[this] var i = self.length - def hasNext: Boolean = 0 < i - def next(): A = - if (0 < i) { - i -= 1 - self(i) - } else Iterator.empty.next() - } + override def reverseIterator: Iterator[A] = view.reverseIterator override def foldRight[B](z: B)(op: (A, B) => B): B = { val it = reverseIterator diff --git a/src/library/scala/collection/IndexedSeqView.scala b/src/library/scala/collection/IndexedSeqView.scala index a1b3d4d5e32b..692486b1e088 100644 --- a/src/library/scala/collection/IndexedSeqView.scala +++ b/src/library/scala/collection/IndexedSeqView.scala @@ -49,14 +49,15 @@ trait IndexedSeqView[+A] extends IndexedSeqOps[A, View, View[A]] with SeqView[A] object IndexedSeqView { @SerialVersionUID(3L) - private final class IndexedSeqViewIterator[A](self: IndexedSeqView[A]) extends AbstractIterator[A] with Serializable { + private[collection] class IndexedSeqViewIterator[A](self: IndexedSeqView[A]) extends AbstractIterator[A] with Serializable { private[this] var current = 0 - private[this] var remainder = self.size + private[this] var remainder = self.length override def knownSize: Int = remainder - def hasNext = remainder > 0 + @inline private[this] def _hasNext: Boolean = remainder > 0 + def hasNext: Boolean = _hasNext def next(): A = - if (hasNext) { - val r = self.apply(current) + if (_hasNext) { + val r = self(current) current += 1 remainder -= 1 r @@ -82,18 +83,18 @@ object IndexedSeqView { } } @SerialVersionUID(3L) - private final class IndexedSeqViewReverseIterator[A](self: IndexedSeqView[A]) extends AbstractIterator[A] with Serializable { - private[this] var pos = self.size - 1 - private[this] var remainder = self.size - def hasNext: Boolean = remainder > 0 + private[collection] class IndexedSeqViewReverseIterator[A](self: IndexedSeqView[A]) extends AbstractIterator[A] with Serializable { + private[this] var pos = self.length - 1 + private[this] var remainder = self.length + @inline private[this] def _hasNext: Boolean = remainder > 0 + def hasNext: Boolean = _hasNext def next(): A = - if (pos < 0) throw new NoSuchElementException - else { + if (_hasNext) { val r = self(pos) pos -= 1 remainder -= 1 r - } + } else Iterator.empty.next() override def drop(n: Int): Iterator[A] = { if (n > 0) { @@ -103,7 +104,6 @@ object IndexedSeqView { this } - override def sliceIterator(from: Int, until: Int): Iterator[A] = { val startCutoff = pos val untilCutoff = startCutoff - remainder + 1 diff --git a/src/library/scala/collection/mutable/ArrayBuffer.scala b/src/library/scala/collection/mutable/ArrayBuffer.scala index e9c19ff11392..e60f50587fa9 100644 --- a/src/library/scala/collection/mutable/ArrayBuffer.scala +++ b/src/library/scala/collection/mutable/ArrayBuffer.scala @@ -39,6 +39,7 @@ import scala.util.chaining._ * @define mayNotTerminateInf * @define willNotTerminateInf */ +@SerialVersionUID(-1582447879429021880L) class ArrayBuffer[A] private (initialElements: Array[AnyRef], initialSize: Int) extends AbstractBuffer[A] with IndexedBuffer[A] @@ -51,6 +52,8 @@ class ArrayBuffer[A] private (initialElements: Array[AnyRef], initialSize: Int) def this(initialSize: Int) = this(new Array[AnyRef](initialSize max 1), 0) + @transient private[this] var mutationCount: Int = 0 + protected[collection] var array: Array[AnyRef] = initialElements protected var size0 = initialSize @@ -62,14 +65,17 @@ class ArrayBuffer[A] private (initialElements: Array[AnyRef], initialSize: Int) override def knownSize: Int = super[IndexedSeqOps].knownSize /** Ensure that the internal array has at least `n` cells. */ - protected def ensureSize(n: Int): Unit = + protected def ensureSize(n: Int): Unit = { + mutationCount += 1 array = ArrayBuffer.ensureSize(array, size0, n) + } def sizeHint(size: Int): Unit = if(size > length && size >= 1) ensureSize(size) /** Reduce length to `n`, nulling out all dropped elements */ private def reduceToSize(n: Int): Unit = { + mutationCount += 1 Arrays.fill(array, n, size0, null) size0 = n } @@ -79,7 +85,10 @@ class ArrayBuffer[A] private (initialElements: Array[AnyRef], initialSize: Int) * which may replace the array by a shorter one. * This allows releasing some unused memory. */ - def trimToSize(): Unit = resize(length) + def trimToSize(): Unit = { + mutationCount += 1 + resize(length) + } /** Trims the `array` buffer size down to either a power of 2 * or Int.MaxValue while keeping first `requiredLength` elements. @@ -99,12 +108,13 @@ class ArrayBuffer[A] private (initialElements: Array[AnyRef], initialSize: Int) def update(@deprecatedName("n", "2.13.0") index: Int, elem: A): Unit = { checkWithinBounds(index, index + 1) + mutationCount += 1 array(index) = elem.asInstanceOf[AnyRef] } def length = size0 - override def view: ArrayBufferView[A] = new ArrayBufferView(array, size0) + override def view: ArrayBufferView[A] = new ArrayBufferView(array, size0, () => mutationCount) override def iterableFactory: SeqFactory[ArrayBuffer] = ArrayBuffer @@ -136,9 +146,12 @@ class ArrayBuffer[A] private (initialElements: Array[AnyRef], initialSize: Int) override def addAll(elems: IterableOnce[A]): this.type = { elems match { case elems: ArrayBuffer[_] => - ensureSize(length + elems.length) - Array.copy(elems.array, 0, array, length, elems.length) - size0 = length + elems.length + val elemsLength = elems.size0 + if (elemsLength > 0) { + ensureSize(length + elemsLength) + Array.copy(elems.array, 0, array, length, elemsLength) + size0 = length + elemsLength + } case _ => super.addAll(elems) } this @@ -162,23 +175,25 @@ class ArrayBuffer[A] private (initialElements: Array[AnyRef], initialSize: Int) elems match { case elems: collection.Iterable[A] => val elemsLength = elems.size - ensureSize(length + elemsLength) - Array.copy(array, index, array, index + elemsLength, size0 - index) - size0 = size0 + elemsLength - elems match { - case elems: ArrayBuffer[_] => - // if `elems eq this`, this works because `elems.array eq this.array`, - // we didn't overwrite the values being inserted after moving them in - // the previous copy a few lines up, and `System.arraycopy` will - // effectively "read" all the values before overwriting any of them. - Array.copy(elems.array, 0, array, index, elemsLength) - case _ => - var i = 0 - val it = elems.iterator - while (i < elemsLength) { - this(index + i) = it.next() - i += 1 - } + if (elemsLength > 0) { + ensureSize(length + elemsLength) + Array.copy(array, index, array, index + elemsLength, size0 - index) + size0 = size0 + elemsLength + elems match { + case elems: ArrayBuffer[_] => + // if `elems eq this`, this works because `elems.array eq this.array`, + // we didn't overwrite the values being inserted after moving them in + // the previous copy a few lines up, and `System.arraycopy` will + // effectively "read" all the values before overwriting any of them. + Array.copy(elems.array, 0, array, index, elemsLength) + case _ => + var i = 0 + val it = elems.iterator + while (i < elemsLength) { + this(index + i) = it.next() + i += 1 + } + } } case _ => insertAll(index, ArrayBuffer.from(elems)) @@ -234,7 +249,10 @@ class ArrayBuffer[A] private (initialElements: Array[AnyRef], initialSize: Int) * @return modified input $coll sorted according to the ordering `ord`. */ override def sortInPlace[B >: A]()(implicit ord: Ordering[B]): this.type = { - if (length > 1) scala.util.Sorting.stableSort(array.asInstanceOf[Array[B]], 0, length) + if (length > 1) { + mutationCount += 1 + scala.util.Sorting.stableSort(array.asInstanceOf[Array[B]], 0, length) + } this } } @@ -299,8 +317,36 @@ object ArrayBuffer extends StrictOptimizedSeqFactory[ArrayBuffer] { } } -final class ArrayBufferView[A](val array: Array[AnyRef], val length: Int) extends AbstractIndexedSeqView[A] { - @throws[ArrayIndexOutOfBoundsException] - def apply(n: Int) = if (n < length) array(n).asInstanceOf[A] else throw new IndexOutOfBoundsException(s"$n is out of bounds (min 0, max ${length - 1})") +final class ArrayBufferView[A] private[mutable](val array: Array[AnyRef], val length: Int, mutationCount: () => Int) + extends AbstractIndexedSeqView[A] { + @deprecated("never intended to be public; call ArrayBuffer#view instead", since = "2.13.6") + def this(array: Array[AnyRef], length: Int) = { + // this won't actually track mutation, but it would be a pain to have the implementation + // check if we have a method to get the current mutation count or not on every method and + // change what it does based on that. hopefully no one ever calls this. + this(array, length, () => 0) + } + + @throws[IndexOutOfBoundsException] + def apply(n: Int): A = if (n < length) array(n).asInstanceOf[A] else throw new IndexOutOfBoundsException(s"$n is out of bounds (min 0, max ${length - 1})") override protected[this] def className = "ArrayBufferView" + + // we could inherit all these from `CheckedIndexedSeqView`, except this class is public + override def iterator: Iterator[A] = new CheckedIndexedSeqView.CheckedIterator(this, mutationCount()) + override def reverseIterator: Iterator[A] = new CheckedIndexedSeqView.CheckedReverseIterator(this, mutationCount()) + + override def appended[B >: A](elem: B): IndexedSeqView[B] = new CheckedIndexedSeqView.Appended(this, elem)(mutationCount) + override def prepended[B >: A](elem: B): IndexedSeqView[B] = new CheckedIndexedSeqView.Prepended(elem, this)(mutationCount) + override def take(n: Int): IndexedSeqView[A] = new CheckedIndexedSeqView.Take(this, n)(mutationCount) + override def takeRight(n: Int): IndexedSeqView[A] = new CheckedIndexedSeqView.TakeRight(this, n)(mutationCount) + override def drop(n: Int): IndexedSeqView[A] = new CheckedIndexedSeqView.Drop(this, n)(mutationCount) + override def dropRight(n: Int): IndexedSeqView[A] = new CheckedIndexedSeqView.DropRight(this, n)(mutationCount) + override def map[B](f: A => B): IndexedSeqView[B] = new CheckedIndexedSeqView.Map(this, f)(mutationCount) + override def reverse: IndexedSeqView[A] = new CheckedIndexedSeqView.Reverse(this)(mutationCount) + override def slice(from: Int, until: Int): IndexedSeqView[A] = new CheckedIndexedSeqView.Slice(this, from, until)(mutationCount) + override def tapEach[U](f: A => U): IndexedSeqView[A] = new CheckedIndexedSeqView.Map(this, { (a: A) => f(a); a})(mutationCount) + + override def concat[B >: A](suffix: IndexedSeqView.SomeIndexedSeqOps[B]): IndexedSeqView[B] = new CheckedIndexedSeqView.Concat(this, suffix)(mutationCount) + override def appendedAll[B >: A](suffix: IndexedSeqView.SomeIndexedSeqOps[B]): IndexedSeqView[B] = new CheckedIndexedSeqView.Concat(this, suffix)(mutationCount) + override def prependedAll[B >: A](prefix: IndexedSeqView.SomeIndexedSeqOps[B]): IndexedSeqView[B] = new CheckedIndexedSeqView.Concat(prefix, this)(mutationCount) } diff --git a/src/library/scala/collection/mutable/CheckedIndexedSeqView.scala b/src/library/scala/collection/mutable/CheckedIndexedSeqView.scala new file mode 100644 index 000000000000..b9598904375d --- /dev/null +++ b/src/library/scala/collection/mutable/CheckedIndexedSeqView.scala @@ -0,0 +1,117 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection +package mutable + +private[mutable] trait CheckedIndexedSeqView[+A] extends IndexedSeqView[A] { + protected val mutationCount: () => Int + + override def iterator: Iterator[A] = new CheckedIndexedSeqView.CheckedIterator(this, mutationCount()) + override def reverseIterator: Iterator[A] = new CheckedIndexedSeqView.CheckedReverseIterator(this, mutationCount()) + + override def appended[B >: A](elem: B): IndexedSeqView[B] = new CheckedIndexedSeqView.Appended(this, elem)(mutationCount) + override def prepended[B >: A](elem: B): IndexedSeqView[B] = new CheckedIndexedSeqView.Prepended(elem, this)(mutationCount) + override def take(n: Int): IndexedSeqView[A] = new CheckedIndexedSeqView.Take(this, n)(mutationCount) + override def takeRight(n: Int): IndexedSeqView[A] = new CheckedIndexedSeqView.TakeRight(this, n)(mutationCount) + override def drop(n: Int): IndexedSeqView[A] = new CheckedIndexedSeqView.Drop(this, n)(mutationCount) + override def dropRight(n: Int): IndexedSeqView[A] = new CheckedIndexedSeqView.DropRight(this, n)(mutationCount) + override def map[B](f: A => B): IndexedSeqView[B] = new CheckedIndexedSeqView.Map(this, f)(mutationCount) + override def reverse: IndexedSeqView[A] = new CheckedIndexedSeqView.Reverse(this)(mutationCount) + override def slice(from: Int, until: Int): IndexedSeqView[A] = new CheckedIndexedSeqView.Slice(this, from, until)(mutationCount) + override def tapEach[U](f: A => U): IndexedSeqView[A] = new CheckedIndexedSeqView.Map(this, { (a: A) => f(a); a})(mutationCount) + + override def concat[B >: A](suffix: IndexedSeqView.SomeIndexedSeqOps[B]): IndexedSeqView[B] = new CheckedIndexedSeqView.Concat(this, suffix)(mutationCount) + override def appendedAll[B >: A](suffix: IndexedSeqView.SomeIndexedSeqOps[B]): IndexedSeqView[B] = new CheckedIndexedSeqView.Concat(this, suffix)(mutationCount) + override def prependedAll[B >: A](prefix: IndexedSeqView.SomeIndexedSeqOps[B]): IndexedSeqView[B] = new CheckedIndexedSeqView.Concat(prefix, this)(mutationCount) +} + +private[mutable] object CheckedIndexedSeqView { + import IndexedSeqView.SomeIndexedSeqOps + + @SerialVersionUID(3L) + private[mutable] class CheckedIterator[A](self: IndexedSeqView[A], mutationCount: => Int) + extends IndexedSeqView.IndexedSeqViewIterator[A](self) { + private[this] val expectedCount = mutationCount + override def hasNext: Boolean = { + MutationTracker.checkMutationsForIteration(expectedCount, mutationCount) + super.hasNext + } + } + + @SerialVersionUID(3L) + private[mutable] class CheckedReverseIterator[A](self: IndexedSeqView[A], mutationCount: => Int) + extends IndexedSeqView.IndexedSeqViewReverseIterator[A](self) { + private[this] val expectedCount = mutationCount + override def hasNext: Boolean = { + MutationTracker.checkMutationsForIteration(expectedCount, mutationCount) + super.hasNext + } + } + + @SerialVersionUID(3L) + class Id[+A](underlying: SomeIndexedSeqOps[A])(protected val mutationCount: () => Int) + extends IndexedSeqView.Id(underlying) with CheckedIndexedSeqView[A] + + @SerialVersionUID(3L) + class Appended[+A](underlying: SomeIndexedSeqOps[A], elem: A)(protected val mutationCount: () => Int) + extends IndexedSeqView.Appended(underlying, elem) with CheckedIndexedSeqView[A] + + @SerialVersionUID(3L) + class Prepended[+A](elem: A, underlying: SomeIndexedSeqOps[A])(protected val mutationCount: () => Int) + extends IndexedSeqView.Prepended(elem, underlying) with CheckedIndexedSeqView[A] + + @SerialVersionUID(3L) + class Concat[A](prefix: SomeIndexedSeqOps[A], suffix: SomeIndexedSeqOps[A])(protected val mutationCount: () => Int) + extends IndexedSeqView.Concat[A](prefix, suffix) with CheckedIndexedSeqView[A] + + @SerialVersionUID(3L) + class Take[A](underlying: SomeIndexedSeqOps[A], n: Int)(protected val mutationCount: () => Int) + extends IndexedSeqView.Take(underlying, n) with CheckedIndexedSeqView[A] + + @SerialVersionUID(3L) + class TakeRight[A](underlying: SomeIndexedSeqOps[A], n: Int)(protected val mutationCount: () => Int) + extends IndexedSeqView.TakeRight(underlying, n) with CheckedIndexedSeqView[A] + + @SerialVersionUID(3L) + class Drop[A](underlying: SomeIndexedSeqOps[A], n: Int)(protected val mutationCount: () => Int) + extends IndexedSeqView.Drop[A](underlying, n) with CheckedIndexedSeqView[A] + + @SerialVersionUID(3L) + class DropRight[A](underlying: SomeIndexedSeqOps[A], n: Int)(protected val mutationCount: () => Int) + extends IndexedSeqView.DropRight[A](underlying, n) with CheckedIndexedSeqView[A] + + @SerialVersionUID(3L) + class Map[A, B](underlying: SomeIndexedSeqOps[A], f: A => B)(protected val mutationCount: () => Int) + extends IndexedSeqView.Map(underlying, f) with CheckedIndexedSeqView[B] + + @SerialVersionUID(3L) + class Reverse[A](underlying: SomeIndexedSeqOps[A])(protected val mutationCount: () => Int) + extends IndexedSeqView.Reverse[A](underlying) with CheckedIndexedSeqView[A] { + override def reverse: IndexedSeqView[A] = underlying match { + case x: IndexedSeqView[A] => x + case _ => super.reverse + } + } + + @SerialVersionUID(3L) + class Slice[A](underlying: SomeIndexedSeqOps[A], from: Int, until: Int)(protected val mutationCount: () => Int) + extends AbstractIndexedSeqView[A] with CheckedIndexedSeqView[A] { + protected val lo = from max 0 + protected val hi = (until max 0) min underlying.length + protected val len = (hi - lo) max 0 + @throws[IndexOutOfBoundsException] + def apply(i: Int): A = underlying(lo + i) + def length: Int = len + } +} diff --git a/test/junit/scala/collection/mutable/MutationTrackingTest.scala b/test/junit/scala/collection/mutable/MutationTrackingTest.scala index 9ff9511320e3..c5a03270f01a 100644 --- a/test/junit/scala/collection/mutable/MutationTrackingTest.scala +++ b/test/junit/scala/collection/mutable/MutationTrackingTest.scala @@ -18,34 +18,40 @@ import java.util.ConcurrentModificationException import org.junit.Test import scala.annotation.nowarn +import scala.annotation.unchecked.{uncheckedVariance => uV} import scala.tools.testkit.AssertUtil.assertThrows abstract class MutationTrackingTest[+C <: Iterable[_]](factory: Factory[Int, C]) { - private def runOp(op: C => Any, viewOrIterator: C => IterableOnceOps[_, AnyConstr, _]): Unit = { - val coll = (factory.newBuilder += 1 += 2 += 3 += 4).result() + private[this] type VoI = C => IterableOnceOps[_, AnyConstr, _] + // if you do bad things with this by returning a different builder, it WILL bite you + protected[this] type BuildSequence = Builder[Int, C @uV] => Builder[Int, C @uV] + protected[this] val defaultBuildSequence: BuildSequence = _ += 1 += 2 += 3 += 4 + + private[this] def runOp(op: C => Any, bs: BuildSequence, viewOrIterator: VoI): Unit = { + val coll = bs(factory.newBuilder).result() val it = viewOrIterator(coll) op(coll) it.foreach(_ => ()) } - private def runOpMaybeThrowing(op: C => Any, - throws: Boolean, - viewOrIterator: C => IterableOnceOps[_, AnyConstr, _]): Unit = { - if (throws) assertThrows[ConcurrentModificationException](runOp(op, viewOrIterator), _ contains "iteration") - else runOp(op, viewOrIterator) + private[this] def runOpMaybeThrowing(op: C => Any, bs: BuildSequence, throws: Boolean, viewOrIterator: VoI): Unit = { + if (throws) assertThrows[ConcurrentModificationException](runOp(op, bs, viewOrIterator), _ contains "iteration") + else runOp(op, bs, viewOrIterator) } - private def runOpForViewAndIterator(op: C => Any, throws: Boolean): Unit = { - runOp(op, _.view) // never throws - runOpMaybeThrowing(op, throws, _.iterator) - runOpMaybeThrowing(op, throws, _.view.iterator) + private[this] def runOpForViewAndIterator(op: C => Any, bs: BuildSequence, throws: Boolean): Unit = { + runOp(op, bs, _.view) // never throws + runOpMaybeThrowing(op, bs, throws, _.iterator) + runOpMaybeThrowing(op, bs, throws, _.view.iterator) } /** Checks that no exception is thrown by an operation. */ - def checkFine(op: C => Any): Unit = runOpForViewAndIterator(op, throws = false) + protected[this] def checkFine(op: C => Any, buildSequence: BuildSequence = defaultBuildSequence): Unit = + runOpForViewAndIterator(op, buildSequence, throws = false) /** Checks that an exception is thrown by an operation. */ - def checkThrows(op: C => Any): Unit = runOpForViewAndIterator(op, throws = true) + protected[this] def checkThrows(op: C => Any, buildSequence: BuildSequence = defaultBuildSequence): Unit = + runOpForViewAndIterator(op, buildSequence, throws = true) @Test def nop(): Unit = checkFine { _ => () } @@ -94,6 +100,29 @@ object MutationTrackingTest { def transform(): Unit = checkThrows { _.transform(_ + 1) } } + trait IndexedSeqTest { self: MutationTrackingTest[IndexedSeq[Int]] => + @Test + def mapInPlace(): Unit = checkThrows { _.mapInPlace(_ + 1) } + + @Test + def sortInPlace(): Unit = { + checkThrows { _.sortInPlace() } + checkFine (_.sortInPlace(), _ += 1) + } + + @Test + def sortInPlaceWith(): Unit = { + checkThrows { _.sortInPlaceWith(_ > _) } + checkFine (_.sortInPlaceWith(_ > _), _ += 1) + } + + @Test + def sortInPlaceBy(): Unit = { + checkThrows { _.sortInPlaceBy(_ * -1) } + checkFine (_.sortInPlaceBy(_ * -1), _ += 1) + } + } + trait BufferTest extends GrowableTest with ShrinkableTest with SeqTest { self: MutationTrackingTest[Buffer[Int]] => @Test def insert(): Unit = checkThrows { _.insert(0, 5) } @@ -210,4 +239,15 @@ package MutationTrackingTestImpl { @Test def filterInPlace(): Unit = checkThrows { _.filterInPlace(_ => true) } } + + class ArrayBufferTest extends MutationTrackingTest(ArrayBuffer) with BufferTest with IndexedSeqTest { + @Test + def clearAndShrink(): Unit = checkThrows { _ clearAndShrink 2 } + + @Test + def trimToSize(): Unit = checkThrows { _.trimToSize() } + + @Test + def sizeHint(): Unit = checkThrows { _ sizeHint 16 } + } } From 215d029b330eb5bbff97c6398309a8c3a9853e94 Mon Sep 17 00:00:00 2001 From: NthPortal Date: Tue, 10 Aug 2021 23:20:12 -0400 Subject: [PATCH 354/769] Do not compute size in TrieMap#isEmpty Do not compute size in `TrieMap#isEmpty`. Override `TrieMap#knownSize`. --- .../scala/collection/concurrent/MainNode.java | 3 ++ .../scala/collection/concurrent/TrieMap.scala | 41 +++++++++++-------- .../collection/concurrent/TrieMapTest.scala | 36 ++++++++++++++++ 3 files changed, 63 insertions(+), 17 deletions(-) diff --git a/src/library/scala/collection/concurrent/MainNode.java b/src/library/scala/collection/concurrent/MainNode.java index 11c09bb2fe5b..f7f022974e9e 100644 --- a/src/library/scala/collection/concurrent/MainNode.java +++ b/src/library/scala/collection/concurrent/MainNode.java @@ -24,6 +24,9 @@ abstract class MainNode extends BasicNode { public abstract int cachedSize(Object ct); + // standard contract + public abstract int knownSize(); + public boolean CAS_PREV(MainNode oldval, MainNode nval) { return updater.compareAndSet(this, oldval, nval); } diff --git a/src/library/scala/collection/concurrent/TrieMap.scala b/src/library/scala/collection/concurrent/TrieMap.scala index 474cbc1317a7..bb8e3bcef52a 100644 --- a/src/library/scala/collection/concurrent/TrieMap.scala +++ b/src/library/scala/collection/concurrent/TrieMap.scala @@ -398,10 +398,11 @@ private[collection] final class INode[K, V](bn: MainNode[K, V], g: Gen, equiv: E def isNullInode(ct: TrieMap[K, V]) = GCAS_READ(ct) eq null - def cachedSize(ct: TrieMap[K, V]): Int = { - val m = GCAS_READ(ct) - m.cachedSize(ct) - } + def cachedSize(ct: TrieMap[K, V]): Int = + GCAS_READ(ct).cachedSize(ct) + + def knownSize(ct: TrieMap[K, V]): Int = + GCAS_READ(ct).knownSize() /* this is a quiescent method! */ def string(lev: Int) = "%sINode -> %s".format(" " * lev, mainnode match { @@ -438,6 +439,8 @@ private[concurrent] final class FailedNode[K, V](p: MainNode[K, V]) extends Main def cachedSize(ct: AnyRef): Int = throw new UnsupportedOperationException + def knownSize: Int = throw new UnsupportedOperationException + override def toString = "FailedNode(%s)".format(p) } @@ -456,7 +459,7 @@ private[collection] final class SNode[K, V](final val k: K, final val v: V, fina def string(lev: Int) = (" " * lev) + "SNode(%s, %s, %x)".format(k, v, hc) } - +// Tomb Node, used to ensure proper ordering during removals private[collection] final class TNode[K, V](final val k: K, final val v: V, final val hc: Int) extends MainNode[K, V] with KVNode[K, V] { def copy = new TNode(k, v, hc) @@ -464,10 +467,11 @@ private[collection] final class TNode[K, V](final val k: K, final val v: V, fina def copyUntombed = new SNode(k, v, hc) def kvPair = (k, v) def cachedSize(ct: AnyRef): Int = 1 + def knownSize: Int = 1 def string(lev: Int) = (" " * lev) + "TNode(%s, %s, %x, !)".format(k, v, hc) } - +// List Node, leaf node that handles hash collisions private[collection] final class LNode[K, V](val entries: List[(K, V)], equiv: Equiv[K]) extends MainNode[K, V] { @@ -492,7 +496,7 @@ private[collection] final class LNode[K, V](val entries: List[(K, V)], equiv: Eq def removed(k: K, ct: TrieMap[K, V]): MainNode[K, V] = { val updmap = entries.filterNot(entry => equiv.equiv(entry._1, k)) - if (updmap.size > 1) new LNode(updmap, equiv) + if (updmap.sizeIs > 1) new LNode(updmap, equiv) else { val (k, v) = updmap.iterator.next() new TNode(k, v, ct.computeHash(k)) // create it tombed so that it gets compressed on subsequent accesses @@ -503,14 +507,16 @@ private[collection] final class LNode[K, V](val entries: List[(K, V)], equiv: Eq def cachedSize(ct: AnyRef): Int = entries.size + def knownSize: Int = -1 // shouldn't ever be empty, and the size of a list is not known + def string(lev: Int) = (" " * lev) + "LNode(%s)".format(entries.mkString(", ")) } - +// Ctrie Node, contains bitmap and array of references to branch nodes private[collection] final class CNode[K, V](val bitmap: Int, val array: Array[BasicNode], val gen: Gen) extends CNodeBase[K, V] { // this should only be called from within read-only snapshots - def cachedSize(ct: AnyRef) = { + def cachedSize(ct: AnyRef): Int = { val currsz = READ_SIZE() if (currsz != -1) currsz else { @@ -520,6 +526,8 @@ private[collection] final class CNode[K, V](val bitmap: Int, val array: Array[Ba } } + def knownSize: Int = READ_SIZE() // this should only ever return -1 if unknown + // lends itself towards being parallelizable by choosing // a random starting offset in the array // => if there are concurrent size computations, they start @@ -676,6 +684,7 @@ private[concurrent] case class RDCSS_Descriptor[K, V](old: INode[K, V], expected * * For details, see: [[http://lampwww.epfl.ch/~prokopec/ctries-snapshot.pdf]] */ +@SerialVersionUID(-5212455458703321708L) final class TrieMap[K, V] private (r: AnyRef, rtupd: AtomicReferenceFieldUpdater[TrieMap[K, V], AnyRef], hashf: Hashing[K], ef: Equiv[K]) extends scala.collection.mutable.AbstractMap[K, V] with scala.collection.concurrent.Map[K, V] @@ -1002,16 +1011,14 @@ final class TrieMap[K, V] private (r: AnyRef, rtupd: AtomicReferenceFieldUpdater // END extra overrides /////////////////////////////////////////////////////////////////// - - private def cachedSize() = { - val r = RDCSS_READ_ROOT() - r.cachedSize(this) - } - override def size: Int = if (nonReadOnly) readOnlySnapshot().size - else cachedSize() - override def isEmpty: Boolean = size == 0 + else RDCSS_READ_ROOT().cachedSize(this) + override def knownSize: Int = + if (nonReadOnly) -1 + else RDCSS_READ_ROOT().knownSize(this) + override def isEmpty: Boolean = + (if (nonReadOnly) readOnlySnapshot() else this).sizeIs == 0 // sizeIs checks knownSize override protected[this] def className = "TrieMap" } diff --git a/test/junit/scala/collection/concurrent/TrieMapTest.scala b/test/junit/scala/collection/concurrent/TrieMapTest.scala index 287e914dc6bf..fa4b9cea443c 100644 --- a/test/junit/scala/collection/concurrent/TrieMapTest.scala +++ b/test/junit/scala/collection/concurrent/TrieMapTest.scala @@ -5,6 +5,7 @@ import org.junit.Assert.assertEquals import scala.util.hashing.Hashing import scala.tools.testkit.AssertUtil.assertThrows +import scala.util.chaining._ @deprecated("Tests deprecated API", since="2.13") class TrieMapTest { @@ -659,4 +660,39 @@ class TrieMapTest { assertEquals(hashMap4.updateWith(2)(noneAnytime), None) assertEquals(hashMap4, TrieMap(1 -> "a")) } + + @Test + def knownSizeConsistency(): Unit = { + def check(tm: TrieMap[_, _]): Unit = { + def msg = s"for ${tm.toString()}" + val snapshot = tm.readOnlySnapshot() + val initialKS = snapshot.knownSize + val size = snapshot.size + assert(initialKS == -1 || initialKS == size, msg) + val laterKS = snapshot.knownSize + assert(laterKS == -1 || laterKS == size, msg) + assert(laterKS >= initialKS, msg) // assert we haven't forgotten the size + } + + check(TrieMap.empty) + check(TrieMap()) + check(TrieMap("k" -> "v")) + check(TrieMap.empty[String, String].tap(_("k") = "v")) + check(TrieMap.empty[String, String].tap(_.put("k", "v"))) + check(TrieMap.from((1 to 5).map(x => x -> x))) + check(TrieMap.from((1 to 10).map(x => x -> x))) + check(TrieMap.from((1 to 100).map(x => x -> x))) + } + + @Test + def isEmptyCorrectness(): Unit = { + assert(TrieMap.empty.isEmpty) + assert(TrieMap().isEmpty) + assert(!TrieMap("k" -> "v").isEmpty) + assert(!TrieMap.empty[String, String].tap(_("k") = "v").isEmpty) + assert(!TrieMap.empty[String, String].tap(_.put("k", "v")).isEmpty) + assert(!TrieMap.from((1 to 5).map(x => x -> x)).isEmpty) + assert(!TrieMap.from((1 to 10).map(x => x -> x)).isEmpty) + assert(!TrieMap.from((1 to 100).map(x => x -> x)).isEmpty) + } } From b13ebd7c97738708187dd3e7b2390fa93767270e Mon Sep 17 00:00:00 2001 From: NthPortal Date: Fri, 20 Aug 2021 21:19:05 -0400 Subject: [PATCH 355/769] Fix concurrent.Map#{filterInPlace,mapValuesInPlace} Fix the behaviour of `concurrent.Map#filterInPlace` and `concurrent.Map#mapValuesInPlace` to respect atomic entry changes. --- project/MimaFilters.scala | 8 ++- .../scala/collection/concurrent/Map.scala | 18 +++++++ .../scala/collection/concurrent/TrieMap.scala | 2 +- .../scala/collection/mutable/Map.scala | 25 ++++++---- .../concurrent/ConcurrentMapTestHelper.scala | 50 +++++++++++++++++++ .../concurrent/ConcurrentMapTester.scala | 27 ++++++++++ .../collection/concurrent/TrieMapTest.scala | 10 ++++ .../convert/JConcurrentMapWrapperTest.scala | 30 +++++++++++ 8 files changed, 157 insertions(+), 13 deletions(-) create mode 100644 test/junit/scala/collection/concurrent/ConcurrentMapTestHelper.scala create mode 100644 test/junit/scala/collection/concurrent/ConcurrentMapTester.scala create mode 100644 test/junit/scala/collection/convert/JConcurrentMapWrapperTest.scala diff --git a/project/MimaFilters.scala b/project/MimaFilters.scala index 4b13a302e298..db6622643cdb 100644 --- a/project/MimaFilters.scala +++ b/project/MimaFilters.scala @@ -42,7 +42,13 @@ object MimaFilters extends AutoPlugin { ProblemFilters.exclude[IncompatibleMethTypeProblem]("scala.reflect.io.FileZipArchive#LeakyEntry.this"), ProblemFilters.exclude[MissingClassProblem]("scala.reflect.io.FileZipArchive$zipFilePool$"), - ) + // #9727 + ProblemFilters.exclude[DirectMissingMethodProblem]("scala.collection.concurrent.TrieMap.filterInPlaceImpl"), + ProblemFilters.exclude[DirectMissingMethodProblem]("scala.collection.concurrent.TrieMap.mapValuesInPlaceImpl"), + ProblemFilters.exclude[DirectMissingMethodProblem]("scala.collection.convert.JavaCollectionWrappers#JConcurrentMapWrapper.filterInPlaceImpl"), + ProblemFilters.exclude[DirectMissingMethodProblem]("scala.collection.convert.JavaCollectionWrappers#JConcurrentMapWrapper.mapValuesInPlaceImpl"), + + ) override val buildSettings = Seq( mimaFailOnNoPrevious := false, // we opt everything out, knowing we only check library/reflect diff --git a/src/library/scala/collection/concurrent/Map.scala b/src/library/scala/collection/concurrent/Map.scala index ec75b87883f4..07d571e73d2b 100644 --- a/src/library/scala/collection/concurrent/Map.scala +++ b/src/library/scala/collection/concurrent/Map.scala @@ -131,4 +131,22 @@ trait Map[K, V] extends scala.collection.mutable.Map[K, V] { case _ => this.updateWithAux(key)(remappingFunction) } } + + private[collection] def filterInPlaceImpl(p: (K, V) => Boolean): this.type = { + val it = iterator + while (it.hasNext) { + val (k, v) = it.next() + if (p(k, v)) remove(k, v) + } + this + } + + private[collection] def mapValuesInPlaceImpl(f: (K, V) => V): this.type = { + val it = iterator + while (it.hasNext) { + val (k, v) = it.next() + replace(k, v, f(k, v)) + } + this + } } diff --git a/src/library/scala/collection/concurrent/TrieMap.scala b/src/library/scala/collection/concurrent/TrieMap.scala index bb8e3bcef52a..ca7681b115c1 100644 --- a/src/library/scala/collection/concurrent/TrieMap.scala +++ b/src/library/scala/collection/concurrent/TrieMap.scala @@ -153,7 +153,7 @@ private[collection] final class INode[K, V](bn: MainNode[K, V], g: Gen, equiv: E * KEY_ABSENT - key wasn't there, insert only, do not overwrite * KEY_PRESENT - key was there, overwrite only, do not insert * other value `v` - only overwrite if the current value is this - * @param hc the hashcode of `k`` + * @param hc the hashcode of `k` * * @return null if unsuccessful, Option[V] otherwise (indicating previous value bound to the key) */ diff --git a/src/library/scala/collection/mutable/Map.scala b/src/library/scala/collection/mutable/Map.scala index 8312e7647c4a..27278c67286c 100644 --- a/src/library/scala/collection/mutable/Map.scala +++ b/src/library/scala/collection/mutable/Map.scala @@ -171,17 +171,19 @@ trait MapOps[K, V, +CC[X, Y] <: MapOps[X, Y, CC, _], +C <: MapOps[K, V, CC, C]] * @param p The test predicate */ def filterInPlace(p: (K, V) => Boolean): this.type = { - if (nonEmpty) { - val array = this.toArray[Any] // scala/bug#7269 toArray avoids ConcurrentModificationException - val arrayLength = array.length - var i = 0 - while (i < arrayLength) { - val (k, v) = array(i).asInstanceOf[(K, V)] - if (!p(k, v)) { - this -= k + if (!isEmpty) this match { + case tm: concurrent.Map[_, _] => tm.asInstanceOf[concurrent.Map[K, V]].filterInPlaceImpl(p) + case _ => + val array = this.toArray[Any] // scala/bug#7269 toArray avoids ConcurrentModificationException + val arrayLength = array.length + var i = 0 + while (i < arrayLength) { + val (k, v) = array(i).asInstanceOf[(K, V)] + if (!p(k, v)) { + this -= k + } + i += 1 } - i += 1 - } } this } @@ -197,8 +199,9 @@ trait MapOps[K, V, +CC[X, Y] <: MapOps[X, Y, CC, _], +C <: MapOps[K, V, CC, C]] * @return the map itself. */ def mapValuesInPlace(f: (K, V) => V): this.type = { - if (nonEmpty) this match { + if (!isEmpty) this match { case hm: mutable.HashMap[_, _] => hm.asInstanceOf[mutable.HashMap[K, V]].mapValuesInPlaceImpl(f) + case tm: concurrent.Map[_, _] => tm.asInstanceOf[concurrent.Map[K, V]].mapValuesInPlaceImpl(f) case _ => val array = this.toArray[Any] val arrayLength = array.length diff --git a/test/junit/scala/collection/concurrent/ConcurrentMapTestHelper.scala b/test/junit/scala/collection/concurrent/ConcurrentMapTestHelper.scala new file mode 100644 index 000000000000..c9a0ef77f500 --- /dev/null +++ b/test/junit/scala/collection/concurrent/ConcurrentMapTestHelper.scala @@ -0,0 +1,50 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.concurrent + +import scala.concurrent.duration.SECONDS + +object ConcurrentMapTestHelper { + def genericTest_filterInPlace(newMap: => Map[String, Int]): Unit = { + val tester = new ConcurrentMapTester(newMap += "k" -> 0) + + tester.runTasks(5, SECONDS)( + _.filterInPlace((_, v) => { + SECONDS.sleep(2) + v > 0 + }), + map => { + SECONDS.sleep(1) + map("k") = 1 + }, + ) + + tester.assertContainsEntry("k", 1) // can get `0` if incorrectly implemented + } + + def genericTest_mapValuesInPlace(newMap: => Map[String, Int]): Unit = { + val tester = new ConcurrentMapTester(newMap += "k" -> 0) + tester.runTasks(5, SECONDS)( + _.mapValuesInPlace((_, v) => { + SECONDS.sleep(2) + v + 5 + }), + map => { + SECONDS.sleep(1) + map("k") = 1 + }, + ) + + tester.assertExistsEntry("k", x => x == 1 || x == 6) // can get `5` if incorrectly implemented + } +} diff --git a/test/junit/scala/collection/concurrent/ConcurrentMapTester.scala b/test/junit/scala/collection/concurrent/ConcurrentMapTester.scala new file mode 100644 index 000000000000..baea8d2f7fad --- /dev/null +++ b/test/junit/scala/collection/concurrent/ConcurrentMapTester.scala @@ -0,0 +1,27 @@ +package scala.collection.concurrent + +import java.util.concurrent.Executors +import scala.concurrent.duration.TimeUnit + +class ConcurrentMapTester[K, V](map: Map[K, V]) { + def runTasks(executionTimeout: Long, unit: TimeUnit)(tasks: (Map[K, V] => Unit)*): Unit = { + val exec = Executors.newCachedThreadPool() + for (task <- tasks) exec.execute(() => task(map)) + exec.shutdown() + exec.awaitTermination(executionTimeout, unit) + } + + @throws[AssertionError] + def assertContainsEntry(k: K, v: V): Unit = { + val value = map.get(k) + assert(value.isDefined, s"map does not contain key '$k'") + assert(value.contains(v), s"key '$k' is mapped to '${value.get}', not to '$v'") + } + + @throws[AssertionError] + def assertExistsEntry(k: K, p: V => Boolean): Unit = { + val value = map.get(k) + assert(value.isDefined, s"map does not contain key '$k'") + assert(value.exists(p), s"key '$k' is mapped to '${value.get}', which does not match the predicate") + } +} diff --git a/test/junit/scala/collection/concurrent/TrieMapTest.scala b/test/junit/scala/collection/concurrent/TrieMapTest.scala index fa4b9cea443c..46f5fe0ff763 100644 --- a/test/junit/scala/collection/concurrent/TrieMapTest.scala +++ b/test/junit/scala/collection/concurrent/TrieMapTest.scala @@ -58,6 +58,16 @@ class TrieMapTest { check(List(("k", "v")))(_.view.mapValues(x => x)) } + @Test + def filterInPlace(): Unit = { + ConcurrentMapTestHelper.genericTest_filterInPlace(TrieMap.empty) + } + + @Test + def mapValuesInPlace(): Unit = { + ConcurrentMapTestHelper.genericTest_mapValuesInPlace(TrieMap.empty) + } + @Test def customHashingAndEquiv_10481(): Unit = { val h = new Hashing[Int] { def hash(i: Int) = i % 4 } diff --git a/test/junit/scala/collection/convert/JConcurrentMapWrapperTest.scala b/test/junit/scala/collection/convert/JConcurrentMapWrapperTest.scala new file mode 100644 index 000000000000..b4712207ea01 --- /dev/null +++ b/test/junit/scala/collection/convert/JConcurrentMapWrapperTest.scala @@ -0,0 +1,30 @@ +package scala.collection.convert + +import org.junit.Test + +import java.util.concurrent.{ConcurrentHashMap, ConcurrentSkipListMap} + +import scala.collection.concurrent.ConcurrentMapTestHelper +import scala.jdk.CollectionConverters._ + +class JConcurrentMapWrapperTest { + @Test + def CHM_filterInPlace(): Unit = { + ConcurrentMapTestHelper.genericTest_filterInPlace(new ConcurrentHashMap[String, Int].asScala) + } + + @Test + def CHM_mapValuesInPlace(): Unit = { + ConcurrentMapTestHelper.genericTest_mapValuesInPlace(new ConcurrentHashMap[String, Int].asScala) + } + + @Test + def CSLM_filterInPlace(): Unit = { + ConcurrentMapTestHelper.genericTest_filterInPlace(new ConcurrentSkipListMap[String, Int].asScala) + } + + @Test + def CSLM_mapValuesInPlace(): Unit = { + ConcurrentMapTestHelper.genericTest_mapValuesInPlace(new ConcurrentSkipListMap[String, Int].asScala) + } +} From 3af1547623e7280b7ee0d1797793cd4fecf1f8ab Mon Sep 17 00:00:00 2001 From: Guillaume Martres Date: Tue, 10 Aug 2021 15:15:42 +0200 Subject: [PATCH 356/769] Allow `import x.{*, given}` under -Xsource:3 Imagine a Scala 3 library containing: ``` object A { val a: Int = 1 given b: Int = 2 } ``` To import all members of `A` from Scala 2, we write `import A.*`, but to do the same from Scala 3, we need to write `import A.{*, given}` instead. This complicates cross-compilation for projects which depend on Scala 3 libraries (unless these libraries exclusively use `implicit` which is not something we want to encourage). This commit remedies this by allowing `import x.{*, given}` (and `import x.{given, *}`), this is easy to do since we can just pretend the user wrote `import x.*` which will give us both regular and given members in Scala 2 code and therefore match the semantics of Scala 3. --- src/compiler/scala/tools/nsc/ast/parser/Parsers.scala | 11 ++++++++++- test/files/pos/import-future.scala | 2 +- 2 files changed, 11 insertions(+), 2 deletions(-) diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala index 0d6ab2c7209c..b43d4764f543 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala @@ -2700,7 +2700,16 @@ self => * }}} */ def importSelectors(): List[ImportSelector] = { - val selectors = inBracesOrNil(commaSeparated(importSelector())) + val selectors0 = inBracesOrNil(commaSeparated(importSelector())) + + // Treat an import of `*, given` or `given, *` as if it was an import of `*` + // since the former in Scala 3 has the same semantics as the latter in Scala 2. + val selectors = + if (currentRun.isScala3 && selectors0.exists(_.isWildcard)) + selectors0.filterNot(sel => sel.name == nme.`given` && sel.rename == sel.name) + else + selectors0 + for (t <- selectors.init if t.isWildcard) syntaxError(t.namePos, "Wildcard import must be in last position") selectors } diff --git a/test/files/pos/import-future.scala b/test/files/pos/import-future.scala index 1c0c3410f36a..5b215d907a58 100644 --- a/test/files/pos/import-future.scala +++ b/test/files/pos/import-future.scala @@ -26,7 +26,7 @@ class C { object starring { - import scala.concurrent.*, duration.{Duration as D, *}, ExecutionContext.Implicits.* + import scala.concurrent.{*, given}, duration.{given, Duration as D, *}, ExecutionContext.Implicits.* val f = Future(42) val r = Await.result(f, D.Inf) From c16fc01d385dafea8ba9f1c80666c42218d214f7 Mon Sep 17 00:00:00 2001 From: Guillaume Martres Date: Tue, 10 Aug 2021 15:15:42 +0200 Subject: [PATCH 357/769] Allow `import x.{*, given}` under -Xsource:3 Imagine a Scala 3 library containing: ``` object A { val a: Int = 1 given b: Int = 2 } ``` To import all members of `A` from Scala 2, we write `import A.*`, but to do the same from Scala 3, we need to write `import A.{*, given}` instead. This complicates cross-compilation for projects which depend on Scala 3 libraries (unless these libraries exclusively use `implicit` which is not something we want to encourage). This commit remedies this by allowing `import x.{*, given}` (and `import x.{given, *}`), this is easy to do since we can just pretend the user wrote `import x.*` which will give us both regular and given members in Scala 2 code and therefore match the semantics of Scala 3. --- src/compiler/scala/tools/nsc/ast/parser/Parsers.scala | 11 ++++++++++- src/reflect/scala/reflect/internal/StdNames.scala | 3 +++ test/files/pos/import-future.scala | 8 ++++++++ 3 files changed, 21 insertions(+), 1 deletion(-) diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala index 012ee9cacb31..2e12f43c547e 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala @@ -2618,7 +2618,16 @@ self => * }}} */ def importSelectors(): List[ImportSelector] = { - val selectors = inBracesOrNil(commaSeparated(importSelector())) + val selectors0 = inBracesOrNil(commaSeparated(importSelector())) + + // Treat an import of `*, given` or `given, *` as if it was an import of `*` + // since the former in Scala 3 has the same semantics as the latter in Scala 2. + val selectors = + if (currentRun.isScala3 && selectors0.exists(_.name eq nme.WILDCARD)) + selectors0.filterNot(sel => sel.name == nme.`given` && sel.rename == sel.name) + else + selectors0 + selectors.init foreach { case ImportSelector(nme.WILDCARD, pos, _, _) => syntaxError(pos, "Wildcard import must be in last position") case _ => () diff --git a/src/reflect/scala/reflect/internal/StdNames.scala b/src/reflect/scala/reflect/internal/StdNames.scala index 84d42b562f5e..18adb6bad639 100644 --- a/src/reflect/scala/reflect/internal/StdNames.scala +++ b/src/reflect/scala/reflect/internal/StdNames.scala @@ -640,6 +640,9 @@ trait StdNames { val infix: NameType = "infix" val open: NameType = "open" + // Scala 3 hard keywords + val `given`: NameType = "given" + // Compiler utilized names val AnnotatedType: NameType = "AnnotatedType" diff --git a/test/files/pos/import-future.scala b/test/files/pos/import-future.scala index cfaff804af02..5b215d907a58 100644 --- a/test/files/pos/import-future.scala +++ b/test/files/pos/import-future.scala @@ -23,3 +23,11 @@ class C { import mut.* val ab = ArrayBuffer(1) } + +object starring { + + import scala.concurrent.{*, given}, duration.{given, Duration as D, *}, ExecutionContext.Implicits.* + + val f = Future(42) + val r = Await.result(f, D.Inf) +} From cd3693d3ac09634442598202089fbf1302d61842 Mon Sep 17 00:00:00 2001 From: Guillaume Martres Date: Tue, 24 Aug 2021 18:28:04 +0200 Subject: [PATCH 358/769] Require backticks when defining a type called `?` Upgrade the deprecation warning from 2.13.6 into an error (but only at definition site and not use site for now), the ultimate goal would be to allow and encourage `?` as a wildcard in Scala 2 by default so we can repurpose `_` in Scala 3 without causing too much disruption. --- .../scala/tools/nsc/ast/parser/Parsers.scala | 3 +- test/files/neg/qmark-deprecated.check | 39 +++++++++---------- test/files/pos/wildcards-future.scala | 3 +- 3 files changed, 21 insertions(+), 24 deletions(-) diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala index 0d6ab2c7209c..61234bfae9fc 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala @@ -712,8 +712,7 @@ self => "`?` in a type will be interpreted as a wildcard in the future, wrap it in backticks to keep the current meaning.", "2.13.6") def checkQMarkDefinition() = if (isRawIdent && in.name == raw.QMARK) - deprecationWarning(in.offset, - "using `?` as a type name will require backticks in the future.", "2.13.6") + syntaxError(in.offset, "using `?` as a type name requires backticks.") def checkKeywordDefinition() = if (isRawIdent && scala3Keywords.contains(in.name)) deprecationWarning(in.offset, diff --git a/test/files/neg/qmark-deprecated.check b/test/files/neg/qmark-deprecated.check index f1b7f333478a..d28da0369e3e 100644 --- a/test/files/neg/qmark-deprecated.check +++ b/test/files/neg/qmark-deprecated.check @@ -1,21 +1,30 @@ -qmark-deprecated.scala:4: warning: using `?` as a type name will require backticks in the future. +qmark-deprecated.scala:4: error: using `?` as a type name requires backticks. class Foo[?] // error ^ -qmark-deprecated.scala:6: warning: using `?` as a type name will require backticks in the future. +qmark-deprecated.scala:6: error: using `?` as a type name requires backticks. class Bar[M[?] <: List[?]] // errors ^ -qmark-deprecated.scala:6: warning: `?` in a type will be interpreted as a wildcard in the future, wrap it in backticks to keep the current meaning. -class Bar[M[?] <: List[?]] // errors - ^ -qmark-deprecated.scala:10: warning: using `?` as a type name will require backticks in the future. +qmark-deprecated.scala:10: error: using `?` as a type name requires backticks. class ? { val x = 1 } // error ^ -qmark-deprecated.scala:16: warning: using `?` as a type name will require backticks in the future. +qmark-deprecated.scala:16: error: using `?` as a type name requires backticks. trait ? // error ^ -qmark-deprecated.scala:22: warning: using `?` as a type name will require backticks in the future. +qmark-deprecated.scala:22: error: using `?` as a type name requires backticks. type ? = Int // error ^ +qmark-deprecated.scala:33: error: using `?` as a type name requires backticks. + def bar1[?] = {} // error + ^ +qmark-deprecated.scala:35: error: using `?` as a type name requires backticks. + def bar3[M[?]] = {} // error + ^ +qmark-deprecated.scala:38: error: using `?` as a type name requires backticks. + type A[?] = Int // error + ^ +qmark-deprecated.scala:6: warning: `?` in a type will be interpreted as a wildcard in the future, wrap it in backticks to keep the current meaning. +class Bar[M[?] <: List[?]] // errors + ^ qmark-deprecated.scala:27: warning: `?` in a type will be interpreted as a wildcard in the future, wrap it in backticks to keep the current meaning. val x: Array[?] = new Array[?](0) // errors ^ @@ -28,15 +37,5 @@ qmark-deprecated.scala:30: warning: `?` in a type will be interpreted as a wildc qmark-deprecated.scala:30: warning: `?` in a type will be interpreted as a wildcard in the future, wrap it in backticks to keep the current meaning. def foo1[T <: Array[?]](x: T): Array[?] = x // errors ^ -qmark-deprecated.scala:33: warning: using `?` as a type name will require backticks in the future. - def bar1[?] = {} // error - ^ -qmark-deprecated.scala:35: warning: using `?` as a type name will require backticks in the future. - def bar3[M[?]] = {} // error - ^ -qmark-deprecated.scala:38: warning: using `?` as a type name will require backticks in the future. - type A[?] = Int // error - ^ -error: No warnings can be incurred under -Werror. -13 warnings -1 error +5 warnings +8 errors diff --git a/test/files/pos/wildcards-future.scala b/test/files/pos/wildcards-future.scala index 928cab3648b0..c9afdea17bab 100644 --- a/test/files/pos/wildcards-future.scala +++ b/test/files/pos/wildcards-future.scala @@ -9,8 +9,7 @@ object Test { case _ => x } - // Only allowed in Scala 3 under -source 3.0-migration - type ? = Int + type `?` = Int val xs2: List[`?`] = List(1) val xs3: List[Int] = xs2 From f2f9ae935473373e96e8895457a77e5ad578b07c Mon Sep 17 00:00:00 2001 From: Guillaume Martres Date: Tue, 24 Aug 2021 18:34:11 +0200 Subject: [PATCH 359/769] Advertise that -Xsource:3 can be used to enable ? as a wildcard --- src/compiler/scala/tools/nsc/ast/parser/Parsers.scala | 2 +- test/files/neg/qmark-deprecated.check | 10 +++++----- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala index 61234bfae9fc..b70398c5618f 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala @@ -709,7 +709,7 @@ self => def checkQMarkUsage() = if (!settings.isScala3 && isRawIdent && in.name == raw.QMARK) deprecationWarning(in.offset, - "`?` in a type will be interpreted as a wildcard in the future, wrap it in backticks to keep the current meaning.", "2.13.6") + "Wrap `?` in backticks to continue to use it as an identifier, or use `-Xsource:3` to use it as a wildcard like in Scala 3.", "2.13.6") def checkQMarkDefinition() = if (isRawIdent && in.name == raw.QMARK) syntaxError(in.offset, "using `?` as a type name requires backticks.") diff --git a/test/files/neg/qmark-deprecated.check b/test/files/neg/qmark-deprecated.check index d28da0369e3e..2d96f788ab9d 100644 --- a/test/files/neg/qmark-deprecated.check +++ b/test/files/neg/qmark-deprecated.check @@ -22,19 +22,19 @@ qmark-deprecated.scala:35: error: using `?` as a type name requires backticks. qmark-deprecated.scala:38: error: using `?` as a type name requires backticks. type A[?] = Int // error ^ -qmark-deprecated.scala:6: warning: `?` in a type will be interpreted as a wildcard in the future, wrap it in backticks to keep the current meaning. +qmark-deprecated.scala:6: warning: Wrap `?` in backticks to continue to use it as an identifier, or use `-Xsource:3` to use it as a wildcard like in Scala 3. class Bar[M[?] <: List[?]] // errors ^ -qmark-deprecated.scala:27: warning: `?` in a type will be interpreted as a wildcard in the future, wrap it in backticks to keep the current meaning. +qmark-deprecated.scala:27: warning: Wrap `?` in backticks to continue to use it as an identifier, or use `-Xsource:3` to use it as a wildcard like in Scala 3. val x: Array[?] = new Array[?](0) // errors ^ -qmark-deprecated.scala:27: warning: `?` in a type will be interpreted as a wildcard in the future, wrap it in backticks to keep the current meaning. +qmark-deprecated.scala:27: warning: Wrap `?` in backticks to continue to use it as an identifier, or use `-Xsource:3` to use it as a wildcard like in Scala 3. val x: Array[?] = new Array[?](0) // errors ^ -qmark-deprecated.scala:30: warning: `?` in a type will be interpreted as a wildcard in the future, wrap it in backticks to keep the current meaning. +qmark-deprecated.scala:30: warning: Wrap `?` in backticks to continue to use it as an identifier, or use `-Xsource:3` to use it as a wildcard like in Scala 3. def foo1[T <: Array[?]](x: T): Array[?] = x // errors ^ -qmark-deprecated.scala:30: warning: `?` in a type will be interpreted as a wildcard in the future, wrap it in backticks to keep the current meaning. +qmark-deprecated.scala:30: warning: Wrap `?` in backticks to continue to use it as an identifier, or use `-Xsource:3` to use it as a wildcard like in Scala 3. def foo1[T <: Array[?]](x: T): Array[?] = x // errors ^ 5 warnings From 4bfd6ae43bb1ecaecb8efc1c70f2e5cf61315e72 Mon Sep 17 00:00:00 2001 From: NthPortal Date: Thu, 26 Aug 2021 15:55:18 -0400 Subject: [PATCH 360/769] Fix inverted condition in s.c.c.Map#filterInPlaceImpl --- src/library/scala/collection/concurrent/Map.scala | 2 +- .../collection/concurrent/ConcurrentMapTestHelper.scala | 9 +++++---- .../collection/concurrent/ConcurrentMapTester.scala | 6 ++++++ 3 files changed, 12 insertions(+), 5 deletions(-) diff --git a/src/library/scala/collection/concurrent/Map.scala b/src/library/scala/collection/concurrent/Map.scala index 07d571e73d2b..ed9e6f3f3e43 100644 --- a/src/library/scala/collection/concurrent/Map.scala +++ b/src/library/scala/collection/concurrent/Map.scala @@ -136,7 +136,7 @@ trait Map[K, V] extends scala.collection.mutable.Map[K, V] { val it = iterator while (it.hasNext) { val (k, v) = it.next() - if (p(k, v)) remove(k, v) + if (!p(k, v)) remove(k, v) } this } diff --git a/test/junit/scala/collection/concurrent/ConcurrentMapTestHelper.scala b/test/junit/scala/collection/concurrent/ConcurrentMapTestHelper.scala index c9a0ef77f500..717f60a8329b 100644 --- a/test/junit/scala/collection/concurrent/ConcurrentMapTestHelper.scala +++ b/test/junit/scala/collection/concurrent/ConcurrentMapTestHelper.scala @@ -16,7 +16,7 @@ import scala.concurrent.duration.SECONDS object ConcurrentMapTestHelper { def genericTest_filterInPlace(newMap: => Map[String, Int]): Unit = { - val tester = new ConcurrentMapTester(newMap += "k" -> 0) + val tester = new ConcurrentMapTester(newMap += "k1" -> 0 += "k2" -> 0) tester.runTasks(5, SECONDS)( _.filterInPlace((_, v) => { @@ -25,11 +25,12 @@ object ConcurrentMapTestHelper { }), map => { SECONDS.sleep(1) - map("k") = 1 + map("k1") = 1 }, ) - tester.assertContainsEntry("k", 1) // can get `0` if incorrectly implemented + tester.assertContainsEntry("k1", 1) // can get `0` if racy implementation + tester.assertDoesNotContain("k2") } def genericTest_mapValuesInPlace(newMap: => Map[String, Int]): Unit = { @@ -45,6 +46,6 @@ object ConcurrentMapTestHelper { }, ) - tester.assertExistsEntry("k", x => x == 1 || x == 6) // can get `5` if incorrectly implemented + tester.assertExistsEntry("k", x => x == 1 || x == 6) // can get `5` if racy implementation } } diff --git a/test/junit/scala/collection/concurrent/ConcurrentMapTester.scala b/test/junit/scala/collection/concurrent/ConcurrentMapTester.scala index baea8d2f7fad..f88c51a3a1c0 100644 --- a/test/junit/scala/collection/concurrent/ConcurrentMapTester.scala +++ b/test/junit/scala/collection/concurrent/ConcurrentMapTester.scala @@ -24,4 +24,10 @@ class ConcurrentMapTester[K, V](map: Map[K, V]) { assert(value.isDefined, s"map does not contain key '$k'") assert(value.exists(p), s"key '$k' is mapped to '${value.get}', which does not match the predicate") } + + @throws[AssertionError] + def assertDoesNotContain(k: K): Unit = { + val value = map.get(k) + assert(value.isEmpty, s"key '$k' is not empty and is mapped to '${value.get}'") + } } From aeda5d14b8743437180bc78b3527e4f6836207e0 Mon Sep 17 00:00:00 2001 From: nwk37011 Date: Fri, 27 Aug 2021 16:31:52 +0900 Subject: [PATCH 361/769] VM.RELEASE_FENCE catches NoSuchMethodException for java.lang.invoke.VarHandle.releaseFence invocation --- src/library/scala/runtime/Statics.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/library/scala/runtime/Statics.java b/src/library/scala/runtime/Statics.java index 09288e09fbfd..886d000592ef 100644 --- a/src/library/scala/runtime/Statics.java +++ b/src/library/scala/runtime/Statics.java @@ -159,7 +159,7 @@ private static MethodHandle mkHandle() { MethodHandles.Lookup lookup = MethodHandles.lookup(); try { return lookup.findStatic(Class.forName("java.lang.invoke.VarHandle"), "releaseFence", MethodType.methodType(Void.TYPE)); - } catch (ClassNotFoundException e) { + } catch (NoSuchMethodException | ClassNotFoundException e) { try { Class unsafeClass = Class.forName("sun.misc.Unsafe"); return lookup.findVirtual(unsafeClass, "storeFence", MethodType.methodType(void.class)).bindTo(findUnsafe(unsafeClass)); @@ -168,7 +168,7 @@ private static MethodHandle mkHandle() { error.addSuppressed(e); throw error; } - } catch (NoSuchMethodException | IllegalAccessException e) { + } catch (IllegalAccessException e) { throw new ExceptionInInitializerError(e); } } From 9be10ed31ac3b1fac12a5776f7e1f0f3df745c3c Mon Sep 17 00:00:00 2001 From: Guillaume Martres Date: Mon, 9 Aug 2021 14:17:42 +0200 Subject: [PATCH 362/769] Allow `case` in pattern bindings even without -Xsource:3 In #9558 (which shipped with 2.13.6) we added support for `case` bindings under -Xsource:3. Since this parser change does not break any existing code and since IntelliJ and scalameta/metals now understand this syntax in Scala 2 code, it should be safe to enable it by default to further ease cross-compilation between Scala 2 and 3. --- spec/06-expressions.md | 16 +++++++++---- spec/13-syntax-summary.md | 2 +- .../scala/tools/nsc/ast/parser/Parsers.scala | 6 ++--- .../neg/for-comprehension-case-future.check | 7 ------ .../neg/for-comprehension-case-future.scala | 24 ------------------- test/files/neg/for-comprehension-case.check | 14 ++++------- test/files/neg/for-comprehension-case.scala | 16 +++++++++---- 7 files changed, 30 insertions(+), 55 deletions(-) delete mode 100644 test/files/neg/for-comprehension-case-future.check delete mode 100644 test/files/neg/for-comprehension-case-future.scala diff --git a/spec/06-expressions.md b/spec/06-expressions.md index d88c7324f1de..6a3408a75f81 100644 --- a/spec/06-expressions.md +++ b/spec/06-expressions.md @@ -912,7 +912,7 @@ A semicolon preceding the `while` symbol of a do loop expression is ignored. Expr1 ::= ‘for’ (‘(’ Enumerators ‘)’ | ‘{’ Enumerators ‘}’) {nl} [‘yield’] Expr Enumerators ::= Generator {semi Generator} -Generator ::= Pattern1 ‘<-’ Expr {[semi] Guard | semi Pattern1 ‘=’ Expr} +Generator ::= [‘case’] Pattern1 ‘<-’ Expr {[semi] Guard | semi Pattern1 ‘=’ Expr} Guard ::= ‘if’ PostfixExpr ``` @@ -922,9 +922,15 @@ A _for comprehension_ `for ($\mathit{enums}\,$) yield $e$` evaluates expression $e$ for each binding generated by the enumerators $\mathit{enums}$ and collects the results. An enumerator sequence always starts with a generator; this can be followed by further generators, value -definitions, or guards. A _generator_ `$p$ <- $e$` -produces bindings from an expression $e$ which is matched in some way -against pattern $p$. A _value definition_ `$p$ = $e$` +definitions, or guards. + +A _generator_ `$p$ <- $e$` produces bindings from an expression $e$ which is +matched in some way against pattern $p$. Optionally, `case` can appear in front +of a generator pattern, this has no meaning in Scala 2 but will be [required in +Scala 3 if `p` is not +irrefutable](https://docs.scala-lang.org/scala3/reference/changed-features/pattern-bindings.html). + +A _value definition_ `$p$ = $e$` binds the value name $p$ (or several names in a pattern $p$) to the result of evaluating the expression $e$. A _guard_ `if $e$` contains a boolean expression which restricts @@ -1762,4 +1768,4 @@ Finally: * `e.m(x) = y` becomes `e.selectDynamic("m").update(x, y)` -None of these methods are actually defined in the `scala.Dynamic`, so that users are free to define them with or without type parameters, or implicit arguments. \ No newline at end of file +None of these methods are actually defined in the `scala.Dynamic`, so that users are free to define them with or without type parameters, or implicit arguments. diff --git a/spec/13-syntax-summary.md b/spec/13-syntax-summary.md index be5cc1324ecd..0e844bf2af2e 100644 --- a/spec/13-syntax-summary.md +++ b/spec/13-syntax-summary.md @@ -172,7 +172,7 @@ grammar: | (Bindings | ([‘implicit’] id | ‘_’) ‘:’ CompoundType) ‘=>’ Block Enumerators ::= Generator {semi Generator} - Generator ::= Pattern1 ‘<-’ Expr {[semi] Guard | semi Pattern1 ‘=’ Expr} + Generator ::= [‘case’] Pattern1 ‘<-’ Expr {[semi] Guard | semi Pattern1 ‘=’ Expr} CaseClauses ::= CaseClause { CaseClause } CaseClause ::= ‘case’ Pattern [Guard] ‘=>’ Block diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala index 2e12f43c547e..cc2330eef1ec 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala @@ -1915,16 +1915,14 @@ self => else generator(!isFirst, allowNestedIf) /** {{{ - * Generator ::= Pattern1 (`<-' | `=') Expr [Guard] + * Generator ::= [`case'] Pattern1 (`<-' | `=') Expr [Guard] * }}} */ def generator(eqOK: Boolean, allowNestedIf: Boolean = true): List[Tree] = { val start = in.offset val hasCase = in.token == CASE - if (hasCase) { - if (!currentRun.isScala3) syntaxError(in.offset, s"`case` keyword in for comprehension requires the -Xsource:3 flag.") + if (hasCase) in.skipCASE() - } val hasVal = in.token == VAL if (hasVal) diff --git a/test/files/neg/for-comprehension-case-future.check b/test/files/neg/for-comprehension-case-future.check deleted file mode 100644 index 02dab922e0d4..000000000000 --- a/test/files/neg/for-comprehension-case-future.check +++ /dev/null @@ -1,7 +0,0 @@ -for-comprehension-case-future.scala:22: error: '<-' expected but '=' found. - case y = x + 1 - ^ -for-comprehension-case-future.scala:23: error: illegal start of simple expression - } yield x + y - ^ -two errors found diff --git a/test/files/neg/for-comprehension-case-future.scala b/test/files/neg/for-comprehension-case-future.scala deleted file mode 100644 index 05602e537759..000000000000 --- a/test/files/neg/for-comprehension-case-future.scala +++ /dev/null @@ -1,24 +0,0 @@ -// scalac: -Xsource:3 -// -class A { - // ok - val a = - for { - case Some(x) <- List(Some(1), None) - y = x + 1 - } yield x + y - - // ok - val b = - for { - Some(x) <- List(Some(1), None) - Some(y) <- List(None, Some(2)) - } yield x+y - - // fail - val c = - for { - case Some(x) <- List(Some(1), None) - case y = x + 1 - } yield x + y -} diff --git a/test/files/neg/for-comprehension-case.check b/test/files/neg/for-comprehension-case.check index b1f2eb0849c5..61b8de0dd30b 100644 --- a/test/files/neg/for-comprehension-case.check +++ b/test/files/neg/for-comprehension-case.check @@ -1,13 +1,7 @@ -for-comprehension-case.scala:5: error: `case` keyword in for comprehension requires the -Xsource:3 flag. - case Some(x) <- List(Some(1), None) - ^ -for-comprehension-case.scala:12: error: `case` keyword in for comprehension requires the -Xsource:3 flag. - case y = x + 1 - ^ -for-comprehension-case.scala:12: error: '<-' expected but '=' found. +for-comprehension-case.scala:20: error: '<-' expected but '=' found. case y = x + 1 ^ -for-comprehension-case.scala:13: error: illegal start of simple expression - } yield x+y +for-comprehension-case.scala:21: error: illegal start of simple expression + } yield x + y ^ -four errors found +two errors found diff --git a/test/files/neg/for-comprehension-case.scala b/test/files/neg/for-comprehension-case.scala index 55e8d44a40e3..d6b14eb91a90 100644 --- a/test/files/neg/for-comprehension-case.scala +++ b/test/files/neg/for-comprehension-case.scala @@ -1,14 +1,22 @@ class A { - // fail + // ok val a = for { case Some(x) <- List(Some(1), None) - } yield x + y = x + 1 + } yield x + y - // fail + // ok val b = for { Some(x) <- List(Some(1), None) - case y = x + 1 + Some(y) <- List(None, Some(2)) } yield x+y + + // fail + val c = + for { + case Some(x) <- List(Some(1), None) + case y = x + 1 + } yield x + y } From 791870b9079857ab476319ebeaea4ca8c8a7db84 Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Thu, 26 Aug 2021 23:19:58 -0700 Subject: [PATCH 363/769] Extra help for stable ident pattern not found --- .../tools/nsc/typechecker/ContextErrors.scala | 5 ++-- .../scala/tools/nsc/typechecker/Typers.scala | 2 +- test/files/neg/not-found.check | 30 +++++++++++++++++++ test/files/neg/not-found.scala | 22 ++++++++++++++ test/files/neg/t11374b.check | 2 ++ 5 files changed, 58 insertions(+), 3 deletions(-) create mode 100644 test/files/neg/not-found.check create mode 100644 test/files/neg/not-found.scala diff --git a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala index cb5e3889b190..b105b821ccee 100644 --- a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala +++ b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala @@ -295,8 +295,9 @@ trait ContextErrors extends splain.SplainErrors { def AmbiguousIdentError(tree: Tree, name: Name, msg: String) = NormalTypeError(tree, "reference to " + name + " is ambiguous;\n" + msg) - def SymbolNotFoundError(tree: Tree, name: Name, owner: Symbol, startingIdentCx: Context) = { - NormalTypeError(tree, "not found: "+decodeWithKind(name, owner)) + def SymbolNotFoundError(tree: Tree, name: Name, owner: Symbol, startingIdentCx: Context, inPattern: Boolean) = { + def help = if (inPattern && name.isTermName) s"\nIdentifiers ${if (name.charAt(0).isUpper) "that begin with uppercase" else "enclosed in backticks"} are not pattern variables but match the value in scope." else "" + NormalTypeError(tree, s"not found: ${decodeWithKind(name, owner)}$help") } // typedAppliedTypeTree diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index 3efe38df1519..58cb1a525d64 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -5476,7 +5476,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper case LookupInaccessible(sym, msg) => issue(AccessError(tree, sym, context, msg)) case LookupNotFound => asTypeName orElse inEmptyPackage orElse lookupInRoot(name) match { - case NoSymbol => issue(SymbolNotFoundError(tree, name, context.owner, startContext)) + case NoSymbol => issue(SymbolNotFoundError(tree, name, context.owner, startContext, mode.in(all = PATTERNmode, none = APPSELmode | TYPEPATmode))) case sym => typed1(tree setSymbol sym, mode, pt) } case LookupSucceeded(qual, sym) => diff --git a/test/files/neg/not-found.check b/test/files/neg/not-found.check new file mode 100644 index 000000000000..da64a6cfe1fe --- /dev/null +++ b/test/files/neg/not-found.check @@ -0,0 +1,30 @@ +not-found.scala:10: error: not found: value Simple +Identifiers that begin with uppercase are not pattern variables but match the value in scope. + case Simple => 2 + ^ +not-found.scala:11: error: not found: value Simple + case Simple.member => 3 + ^ +not-found.scala:12: error: not found: value sample +Identifiers enclosed in backticks are not pattern variables but match the value in scope. + case `sample` => 4 + ^ +not-found.scala:13: error: not found: type Simple + case _: Simple => 5 + ^ +not-found.scala:14: error: not found: value Simple + case Simple(_) => 6 + ^ +not-found.scala:17: error: object Simple is not a member of package p +did you mean Sample? + def g = p.Simple + ^ +not-found.scala:21: error: not found: value X +Identifiers that begin with uppercase are not pattern variables but match the value in scope. + val X :: Nil = List(42) + ^ +not-found.scala:21: warning: Pattern definition introduces Unit-valued member of T; consider wrapping it in `locally { ... }`. + val X :: Nil = List(42) + ^ +1 warning +7 errors diff --git a/test/files/neg/not-found.scala b/test/files/neg/not-found.scala new file mode 100644 index 000000000000..239548e58bbe --- /dev/null +++ b/test/files/neg/not-found.scala @@ -0,0 +1,22 @@ + +package p + +object Sample + +trait T { + def f(x: Any) = + x match { + case Sample => 1 + case Simple => 2 + case Simple.member => 3 + case `sample` => 4 + case _: Simple => 5 + case Simple(_) => 6 + case _ => 7 + } + def g = p.Simple + + val x :: Nil = List(42) + + val X :: Nil = List(42) +} diff --git a/test/files/neg/t11374b.check b/test/files/neg/t11374b.check index 4867de39c3a2..f7ec70d4c1d8 100644 --- a/test/files/neg/t11374b.check +++ b/test/files/neg/t11374b.check @@ -1,7 +1,9 @@ t11374b.scala:3: error: not found: value _ +Identifiers enclosed in backticks are not pattern variables but match the value in scope. val Some(`_`) = Option(42) // was crashola ^ t11374b.scala:6: error: not found: value _ +Identifiers enclosed in backticks are not pattern variables but match the value in scope. val Some(`_`) = Option(42) // was crashola ^ t11374b.scala:3: warning: Pattern definition introduces Unit-valued member of C; consider wrapping it in `locally { ... }`. From 98cdb386aadba87e217a24390772419de8e9eaf9 Mon Sep 17 00:00:00 2001 From: Georgi Krastev Date: Sat, 28 Aug 2021 09:53:54 +0300 Subject: [PATCH 364/769] Fix scala/bug#8493 - add regression test --- test/files/pos/t8493.scala | 25 +++++++++++++++++++++++++ 1 file changed, 25 insertions(+) create mode 100644 test/files/pos/t8493.scala diff --git a/test/files/pos/t8493.scala b/test/files/pos/t8493.scala new file mode 100644 index 000000000000..a5e63a1bede4 --- /dev/null +++ b/test/files/pos/t8493.scala @@ -0,0 +1,25 @@ +object Test { + trait Foo { + def foo: this.type + } + + case class Impl() extends Foo { + def foo = ??? + def bar: Unit = () + } + + object Foo { + def foo(f: Foo): f.type = f.foo + } + + def work(f: Impl): Unit = + Foo.foo(f).bar + + def bug(f: Int => Impl): Unit = + Foo.foo(f(1)).bar + + def workaround(f: Int => Impl): Unit = { + val tmp = f(1) + Foo.foo(tmp).bar + } +} From 72e46c8939ba5d64c9192d3c66793f0bdbc379a8 Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Thu, 19 Aug 2021 16:45:59 +0200 Subject: [PATCH 365/769] fix scala/bug#12449: keep ThisType prefix in more places --- .../tools/nsc/tasty/bridge/ContextOps.scala | 9 +++-- .../tools/nsc/tasty/bridge/TypeOps.scala | 28 +++++++++++----- test/tasty/neg/src-2/TestThisTypes.check | 6 ++++ test/tasty/neg/src-2/TestThisTypes_fail.scala | 15 +++++++++ test/tasty/neg/src-3/ThisTypes.scala | 15 +++++++++ .../run/src-2/tastytest/TestAsyncSuite.scala | 7 ++++ .../run/src-2/tastytest/TestThisTypes.scala | 10 ++++++ .../tasty/run/src-3/tastytest/ThisTypes.scala | 33 +++++++++++++++++++ .../tastytest/testsuite/testsuites.scala | 22 +++++++++++++ 9 files changed, 133 insertions(+), 12 deletions(-) create mode 100644 test/tasty/neg/src-2/TestThisTypes.check create mode 100644 test/tasty/neg/src-2/TestThisTypes_fail.scala create mode 100644 test/tasty/neg/src-3/ThisTypes.scala create mode 100644 test/tasty/run/src-2/tastytest/TestAsyncSuite.scala create mode 100644 test/tasty/run/src-2/tastytest/TestThisTypes.scala create mode 100644 test/tasty/run/src-3/tastytest/ThisTypes.scala create mode 100644 test/tasty/run/src-3/tastytest/testsuite/testsuites.scala diff --git a/src/compiler/scala/tools/nsc/tasty/bridge/ContextOps.scala b/src/compiler/scala/tools/nsc/tasty/bridge/ContextOps.scala index a9a263cee470..ce414b67a0f0 100644 --- a/src/compiler/scala/tools/nsc/tasty/bridge/ContextOps.scala +++ b/src/compiler/scala/tools/nsc/tasty/bridge/ContextOps.scala @@ -64,9 +64,12 @@ trait ContextOps { self: TastyUniverse => } final def location(owner: Symbol): String = { - if (!isSymbol(owner)) "" - else if (owner.isClass) s"${owner.kindString} ${owner.fullNameString}" - else s"${describeOwner(owner)} in ${location(owner.owner)}" + if (!isSymbol(owner)) + "" + else if (owner.isClass || owner.isPackageClass || owner.isPackageObjectOrClass) + s"${owner.kindString} ${owner.fullNameString}" + else + s"${describeOwner(owner)} in ${location(owner.owner)}" } @inline final def typeError[T](msg: String): T = throw new u.TypeError(msg) diff --git a/src/compiler/scala/tools/nsc/tasty/bridge/TypeOps.scala b/src/compiler/scala/tools/nsc/tasty/bridge/TypeOps.scala index a6145a026ccf..323686b52499 100644 --- a/src/compiler/scala/tools/nsc/tasty/bridge/TypeOps.scala +++ b/src/compiler/scala/tools/nsc/tasty/bridge/TypeOps.scala @@ -82,7 +82,13 @@ trait TypeOps { self: TastyUniverse => } def preStr(pre: Type): String = { val preSym = symOfType(pre) - if (isSymbol(preSym)) s"${preSym.fullName}." else "" + val thisStr = { + if (pre.isInstanceOf[u.ThisType] && !pre.typeSymbol.isPackageClass && !pre.typeSymbol.isModuleClass) + ".this" + else + "" + } + if (isSymbol(preSym)) s"${preSym.fullName}$thisStr." else "" } tpe match { case tpe: u.ClassInfoType => cls(Nil, tpe) @@ -92,19 +98,24 @@ trait TypeOps { self: TastyUniverse => case tpe: u.ThisType => prefixed("path") { s"${tpe.sym.fullName}.this" } case tpe: u.SingleType => - prefixed("path") { s"${preStr(tpe.prefix)}${tpe.sym.name}.type" } + prefixed("path") { + if (tpe.sym.isModule) tpe.sym.fullName + ".type" + else s"${preStr(tpe.pre)}${tpe.sym.name}.type" + } case tpe: u.TypeRef => - val pre = preStr(tpe.prefix) if (tpe.sym.is(Object)) prefixed("path") { - s"$pre${tpe.sym.name}.type" + s"${tpe.sym.fullName}.type" } else prefixed("tpelazy") { + val pre = preStr(tpe.pre) val argsStrs = tpe.args.map(showType(_, wrap = false)) val argsStr = if (argsStrs.nonEmpty) argsStrs.mkString("[", ", ", "]") else "" s"$pre${tpe.sym.name}$argsStr" } + case tpe: u.TypeBounds => prefixed("tpebounds") { s"$tpe"} + case tpe => prefixed("tpe") { s"$tpe" } } } @@ -553,9 +564,9 @@ trait TypeOps { self: TastyUniverse => def prefixedRef(prefix: Type, sym: Symbol): Type = { if (sym.isType) { prefix match { - case tp: u.ThisType if tp.sym.isRefinementClass => sym.preciseRef(prefix) - case _:u.SingleType | _:u.RefinedType => sym.preciseRef(prefix) - case _ => sym.ref + case tp: u.ThisType if !sym.isTypeParameter => sym.preciseRef(prefix) + case _:u.SingleType | _:u.RefinedType => sym.preciseRef(prefix) + case _ => sym.ref } } else if (sym.isConstructor) { @@ -576,9 +587,8 @@ trait TypeOps { self: TastyUniverse => def namedMemberOfPrefix(pre: Type, name: TastyName)(implicit ctx: Context): Type = namedMemberOfTypeWithPrefix(pre, pre, name) - def namedMemberOfTypeWithPrefix(pre: Type, space: Type, tname: TastyName)(implicit ctx: Context): Type = { + def namedMemberOfTypeWithPrefix(pre: Type, space: Type, tname: TastyName)(implicit ctx: Context): Type = prefixedRef(pre, namedMemberOfType(space, tname)) - } def lambdaResultType(resType: Type): Type = resType match { case res: LambdaPolyType => res.toNested diff --git a/test/tasty/neg/src-2/TestThisTypes.check b/test/tasty/neg/src-2/TestThisTypes.check new file mode 100644 index 000000000000..a9025c7b3fa3 --- /dev/null +++ b/test/tasty/neg/src-2/TestThisTypes.check @@ -0,0 +1,6 @@ +TestThisTypes_fail.scala:12: error: type mismatch; + found : b.Base + required: a.Base + aBase = b.doTest.get // error + ^ +1 error diff --git a/test/tasty/neg/src-2/TestThisTypes_fail.scala b/test/tasty/neg/src-2/TestThisTypes_fail.scala new file mode 100644 index 000000000000..b0c82d808096 --- /dev/null +++ b/test/tasty/neg/src-2/TestThisTypes_fail.scala @@ -0,0 +1,15 @@ +package tastytest + +import ThisTypes._ + +object TestThisTypes { + + def test = { + val a = new Sub3() + val b = new Sub3() + + var aBase = a.doTest.get + aBase = b.doTest.get // error + } + +} diff --git a/test/tasty/neg/src-3/ThisTypes.scala b/test/tasty/neg/src-3/ThisTypes.scala new file mode 100644 index 000000000000..bf958993c0d8 --- /dev/null +++ b/test/tasty/neg/src-3/ThisTypes.scala @@ -0,0 +1,15 @@ +package tastytest + +object ThisTypes { + + abstract class Wrap3 { + class Base + final type Res = Option[Base] + def doTest: Res + } + + class Sub3 extends Wrap3 { + def doTest: Res = Some(new Base()) + } + +} diff --git a/test/tasty/run/src-2/tastytest/TestAsyncSuite.scala b/test/tasty/run/src-2/tastytest/TestAsyncSuite.scala new file mode 100644 index 000000000000..ea101a9f3ec8 --- /dev/null +++ b/test/tasty/run/src-2/tastytest/TestAsyncSuite.scala @@ -0,0 +1,7 @@ +package tastytest + +object TestAsyncSuite extends Suite("TestAsyncSuite") { + + class MySuite extends testsuite.AsyncSuite + +} diff --git a/test/tasty/run/src-2/tastytest/TestThisTypes.scala b/test/tasty/run/src-2/tastytest/TestThisTypes.scala new file mode 100644 index 000000000000..ae7f12fbaa54 --- /dev/null +++ b/test/tasty/run/src-2/tastytest/TestThisTypes.scala @@ -0,0 +1,10 @@ +package tastytest + +import ThisTypes._ + +object TestThisTypes extends Suite("TestThisTypes") { + + test(assert(new Sub().doTest.get.x === 23)) + test(assert(new Sub2().doTest.get.x === 23)) + +} diff --git a/test/tasty/run/src-3/tastytest/ThisTypes.scala b/test/tasty/run/src-3/tastytest/ThisTypes.scala new file mode 100644 index 000000000000..43936831ebe3 --- /dev/null +++ b/test/tasty/run/src-3/tastytest/ThisTypes.scala @@ -0,0 +1,33 @@ +package tastytest + +object ThisTypes { + + abstract class Wrap[T] { + type Base[A] <: { // if not resolved to Sub.this.Base then reflective calls will be needed + def x: A + } + final type Res = Option[Base[T]] + def doTest: Res + } + + class Sub extends Wrap[Int] { + class BaseImpl[A](a: A) { + def x: A = a + } + override type Base[A] = BaseImpl[A] + def doTest: Res = Some(new BaseImpl(23)) + } + + abstract class Wrap2[T] { + class Base[A](a: A) { + def x: A = a + } + final type Res = Option[Base[T]] + def doTest: Res + } + + class Sub2 extends Wrap2[Int] { + def doTest: Res = Some(new Base(23)) + } + +} diff --git a/test/tasty/run/src-3/tastytest/testsuite/testsuites.scala b/test/tasty/run/src-3/tastytest/testsuite/testsuites.scala new file mode 100644 index 000000000000..b1d6330fe215 --- /dev/null +++ b/test/tasty/run/src-3/tastytest/testsuite/testsuites.scala @@ -0,0 +1,22 @@ +package tastytest.testsuite + +import scala.concurrent.Future + +class AsyncSuite extends TestSuite { + final type TestBody = Future[Any] + + def testsuiteTests(): Seq[Test] = ??? +} + +abstract class TestSuite { + + type TestBody + final type Test = TestImpl[TestBody] + + def testsuiteTests(): Seq[Test] + +} + +class TestImpl[T] + +class MySuite extends AsyncSuite From 607c3c8d10da94eba3cc337a993ed79cd6383198 Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Mon, 23 Aug 2021 17:42:57 +0200 Subject: [PATCH 366/769] refactor tasty sources --- .../scala/tools/nsc/tasty/TreeUnpickler.scala | 43 +++--- .../tools/nsc/tasty/bridge/SymbolOps.scala | 22 ++- .../tools/nsc/tasty/bridge/TreeOps.scala | 4 +- .../tools/nsc/tasty/bridge/TypeOps.scala | 142 ++++++++---------- 4 files changed, 94 insertions(+), 117 deletions(-) diff --git a/src/compiler/scala/tools/nsc/tasty/TreeUnpickler.scala b/src/compiler/scala/tools/nsc/tasty/TreeUnpickler.scala index bab7e789ddfa..89d42b9f48ba 100644 --- a/src/compiler/scala/tools/nsc/tasty/TreeUnpickler.scala +++ b/src/compiler/scala/tools/nsc/tasty/TreeUnpickler.scala @@ -380,18 +380,21 @@ class TreeUnpickler[Tasty <: TastyUniverse]( val result = (tag: @switch) match { - case TERMREFin => selectTerm(readTastyName(), readType(), readType()) - case TYPEREFin => selectType(readTastyName().toTypeName, readType(), readType()) + case TERMREFin => + defn.TermRefIn(name = readTastyName(), prefix = readType(), space = readType()) + case TYPEREFin => + defn.TypeRefIn(name = readTastyName().toTypeName, prefix = readType(), space = readType()) case REFINEDtype => var name = readTastyName() val parent = readType() if (nextUnsharedTag === TYPEBOUNDS) name = name.toTypeName - ctx.enterRefinement(parent)(refinedCtx => defn.RefinedType(parent, name, refinedCtx.owner, readType())) + ctx.enterRefinement(parent)(refinedCtx => + defn.RefinedType(parent, name, refinedCtx.owner, readType()) + ) case APPLIEDtype => defn.AppliedType(readType(), until(end)(readType())) case TYPEBOUNDS => val lo = readType() - if (nothingButMods(end)) - typeRef(readVariances(lo)) + if (nothingButMods(end)) readVariances(lo) else defn.TypeBounds(lo, readVariances(readType())) case ANNOTATEDtype => defn.AnnotatedType(readType(), readTerm()(ctx.addMode(ReadAnnotation))) case ANDtype => defn.IntersectionType(readType(), readType()) @@ -409,14 +412,14 @@ class TreeUnpickler[Tasty <: TastyUniverse]( def readSimpleType(): Type = { (tag: @switch) match { - case TYPEREFdirect => readSymRef().termRef - case TERMREFdirect => readSymRef().singleRef - case TYPEREFsymbol | TERMREFsymbol => readSymNameRef() - case TYPEREFpkg => readPackageRef().objectImplementation.ref - case TERMREFpkg => readPackageRef().termRef - case TYPEREF => selectType(readTastyName().toTypeName, readType()) - case TERMREF => selectTerm(readTastyName(), readType()) - case THIS => defn.ThisType(singletonLike(readType())) + case TYPEREFdirect => defn.NamedType(defn.NoPrefix, readSymRef()) + case TERMREFdirect => defn.NamedType(defn.NoPrefix, readSymRef()) + case TYPEREFsymbol | TERMREFsymbol => defn.NamedType(sym = readSymRef(), prefix = readType()) + case TYPEREFpkg => defn.NamedType(defn.NoPrefix, sym = readPackageRef().objectImplementation) + case TERMREFpkg => defn.NamedType(defn.NoPrefix, sym = readPackageRef()) + case TYPEREF => defn.TypeRef(name = readTastyName().toTypeName, prefix = readType()) + case TERMREF => defn.TermRef(name = readTastyName(), prefix = readType()) + case THIS => defn.ThisType(readType()) case RECtype => typeAtAddr.get(start) match { case Some(tp) => @@ -427,7 +430,7 @@ class TreeUnpickler[Tasty <: TastyUniverse]( registeringTypeWith(rt, readType()(ctx.withOwner(rt.refinementClass))) ).tap(typeAtAddr(start) = _) } - case RECthis => recThis(readTypeRef()) + case RECthis => defn.RecThis(readTypeRef()) case SHAREDtype => val ref = readAddr() typeAtAddr.getOrElseUpdate(ref, forkAt(ref).readType()) @@ -440,12 +443,6 @@ class TreeUnpickler[Tasty <: TastyUniverse]( } } - private def readSymNameRef()(implicit ctx: Context): Type = { - val sym = readSymRef() - val prefix = readType() - prefixedRef(prefix, sym) - } - private def readPackageRef()(implicit ctx: Context): Symbol = { ctx.requiredPackage(readTastyName()) } @@ -839,7 +836,7 @@ class TreeUnpickler[Tasty <: TastyUniverse]( sym.addAnnotation(annot) } val valueParamss = normalizeIfConstructor(vparamss, isCtor) - val resType = effectiveResultType(sym, typeParams, tpt.tpe) + val resType = effectiveResultType(sym, tpt.tpe) ctx.setInfo(sym, defn.DefDefType(if (isCtor) Nil else typeParams, valueParamss, resType)) } @@ -850,7 +847,7 @@ class TreeUnpickler[Tasty <: TastyUniverse]( ctx.setInfo(sym, if (repr.tflags.is(FlagSets.SingletonEnum)) { ctx.completeEnumSingleton(sym, tpe) - prefixedRef(sym.owner.thisPrefix, sym.objectImplementation) + defn.NamedType(sym.owner.thisPrefix, sym.objectImplementation) } else if (sym.isFinal && isConstantType(tpe)) defn.InlineExprType(tpe) else if (sym.isMethod) defn.ExprType(tpe) @@ -1078,7 +1075,7 @@ class TreeUnpickler[Tasty <: TastyUniverse]( def readQualId(): (TastyName.TypeName, Type) = { val qual = readTerm() - (qual.typeIdent, defn.ThisType(symOfTypeRef(qual.tpe))) + (qual.typeIdent, defn.ThisType(qual.tpe)) } def completeSelectType(name: TastyName.TypeName)(implicit ctx: Context): Tree = completeSelect(name) diff --git a/src/compiler/scala/tools/nsc/tasty/bridge/SymbolOps.scala b/src/compiler/scala/tools/nsc/tasty/bridge/SymbolOps.scala index 4384cc14a193..543bbb72d469 100644 --- a/src/compiler/scala/tools/nsc/tasty/bridge/SymbolOps.scala +++ b/src/compiler/scala/tools/nsc/tasty/bridge/SymbolOps.scala @@ -33,10 +33,10 @@ trait SymbolOps { self: TastyUniverse => final def declaringSymbolOf(sym: Symbol): Symbol = if (sym.isModuleClass) sym.sourceModule else sym - private final def deepComplete(tpe: Type)(implicit ctx: Context): Unit = { - symOfType(tpe) match { + private final def deepComplete(space: Type)(implicit ctx: Context): Unit = { + symOfType(space) match { case u.NoSymbol => - ctx.log(s"could not retrieve symbol from type ${showType(tpe)}") + ctx.log(s"could not retrieve symbol from type ${showType(space)}") case termSym if termSym.isTerm => if (termSym.is(Object)) { termSym.ensureCompleted(SpaceForce) @@ -113,11 +113,7 @@ trait SymbolOps { self: TastyUniverse => def objectImplementation: Symbol = sym.moduleClass def sourceObject: Symbol = sym.sourceModule - def ref(args: List[Type]): Type = u.appliedType(sym, args) - def ref: Type = sym.ref(Nil) - def singleRef: Type = u.singleType(u.NoPrefix, sym) - def termRef: Type = sym.preciseRef(u.NoPrefix) - def preciseRef(pre: Type): Type = u.typeRef(pre, sym, Nil) + def ref: Type = u.appliedType(sym, Nil) def safeOwner: Symbol = if (sym.owner eq sym) sym else sym.owner } @@ -129,15 +125,15 @@ trait SymbolOps { self: TastyUniverse => else termParamss - def namedMemberOfType(space: Type, tname: TastyName)(implicit ctx: Context): Symbol = { + private[bridge] def lookupSymbol(space: Type, tname: TastyName)(implicit ctx: Context): Symbol = { deepComplete(space) tname match { - case SignedName(qual, sig, target) => signedMemberOfSpace(space, qual, sig.map(_.encode), target) - case _ => memberOfSpace(space, tname) + case SignedName(qual, sig, target) => lookupSigned(space, qual, sig.map(_.encode), target) + case _ => lookupSimple(space, tname) } } - private def memberOfSpace(space: Type, tname: TastyName)(implicit ctx: Context): Symbol = { + private def lookupSimple(space: Type, tname: TastyName)(implicit ctx: Context): Symbol = { // TODO [tasty]: dotty uses accessibleDenot which asserts that `fetched.isAccessibleFrom(pre)`, // or else filters for non private. // There should be an investigation to see what code makes that false, and what is an equivalent check. @@ -189,7 +185,7 @@ trait SymbolOps { self: TastyUniverse => typeError(s"can't find $missing; perhaps it is missing from the classpath.") } - private def signedMemberOfSpace( + private def lookupSigned( space: Type, qual: TastyName, sig: MethodSignature[ErasedTypeRef], diff --git a/src/compiler/scala/tools/nsc/tasty/bridge/TreeOps.scala b/src/compiler/scala/tools/nsc/tasty/bridge/TreeOps.scala index 57401cb81bce..7faac4e3c313 100644 --- a/src/compiler/scala/tools/nsc/tasty/bridge/TreeOps.scala +++ b/src/compiler/scala/tools/nsc/tasty/bridge/TreeOps.scala @@ -57,10 +57,10 @@ trait TreeOps { self: TastyUniverse => new TastyIdent(name).setType(tpe) @inline final def Select(qual: Tree, name: TastyName)(implicit ctx: Context): Tree = - selectImpl(qual, name)(implicit ctx => namedMemberOfPrefix(qual.tpe, name)) + selectImpl(qual, name)(implicit ctx => lookupTypeFrom(qual.tpe)(qual.tpe, name)) @inline final def Select(owner: Type)(qual: Tree, name: TastyName)(implicit ctx: Context): Tree = - selectImpl(qual, name)(implicit ctx => namedMemberOfTypeWithPrefix(qual.tpe, owner, name)) + selectImpl(qual, name)(implicit ctx => lookupTypeFrom(owner)(qual.tpe, name)) private def selectImpl(qual: Tree, name: TastyName)(lookup: Context => Type)(implicit ctx: Context): Tree = { diff --git a/src/compiler/scala/tools/nsc/tasty/bridge/TypeOps.scala b/src/compiler/scala/tools/nsc/tasty/bridge/TypeOps.scala index 323686b52499..513a2bf01cee 100644 --- a/src/compiler/scala/tools/nsc/tasty/bridge/TypeOps.scala +++ b/src/compiler/scala/tools/nsc/tasty/bridge/TypeOps.scala @@ -155,6 +155,7 @@ trait TypeOps { self: TastyUniverse => } final val NoType: Type = u.NoType + final val NoPrefix: Type = u.NoPrefix def adjustParent(tp: Type): Type = { val tpe = tp.dealias @@ -213,7 +214,7 @@ trait TypeOps { self: TastyUniverse => def PolyType(params: List[Symbol], res: Type): Type = u.PolyType(params, res) def ClassInfoType(parents: List[Type], clazz: Symbol): Type = u.ClassInfoType(parents, clazz.rawInfo.decls, clazz.asType) def ClassInfoType(parents: List[Type], decls: List[Symbol], clazz: Symbol): Type = u.ClassInfoType(parents, u.newScopeWith(decls:_*), clazz.asType) - def ThisType(sym: Symbol): Type = u.ThisType(sym) + def ThisType(tpe: Type): Type = u.ThisType(symOfType(tpe)) def ConstantType(c: Constant): Type = u.ConstantType(c) def IntersectionType(tps: Type*): Type = u.intersectionType(tps.toList) def IntersectionType(tps: List[Type]): Type = u.intersectionType(tps) @@ -226,6 +227,7 @@ trait TypeOps { self: TastyUniverse => def SuperType(thisTpe: Type, superTpe: Type): Type = u.SuperType(thisTpe, superTpe) def LambdaFromParams(typeParams: List[Symbol], ret: Type): Type = u.PolyType(typeParams, lambdaResultType(ret)) def RecType(run: RecType => Type)(implicit ctx: Context): Type = new RecType(run).parent + def RecThis(tpe: Type): Type = tpe.asInstanceOf[RecType].recThis /** The method type corresponding to given parameters and result type */ def DefDefType(typeParams: List[Symbol], valueParamss: List[List[Symbol]], resultType: Type): Type = { @@ -295,6 +297,60 @@ trait TypeOps { self: TastyUniverse => def ParamRef(binder: Type, idx: Int): Type = binder.asInstanceOf[LambdaType].lambdaParams(idx).ref + def NamedType(prefix: Type, sym: Symbol): Type = { + if (sym.isType) { + prefix match { + case tp: u.ThisType if !sym.isTypeParameter => u.typeRef(prefix, sym, Nil) + case _:u.SingleType | _:u.RefinedType => u.typeRef(prefix, sym, Nil) + case _ => u.appliedType(sym, Nil) + } + } + else { // is a term + if (sym.hasAllFlags(Flags.PackageFlags)) { + u.typeRef(u.NoPrefix, sym, Nil) + } else { + u.singleType(prefix, sym) + } + } + } + + def TypeRef(prefix: Type, name: TastyName.TypeName)(implicit ctx: Context): Type = + TypeRefIn(prefix, prefix, name) + + def TypeRefIn(prefix: Type, space: Type, name: TastyName.TypeName)(implicit ctx: Context): Type = { + import scala.tools.tasty.TastyName._ + + def doLookup = lookupTypeFrom(space)(prefix, name) + + // we escape some types in the scala package especially + if (prefix.typeSymbol === u.definitions.ScalaPackage) { + name match { + case TypeName(SimpleName(raw @ SyntheticScala3Type())) => raw match { + case tpnme.And => AndTpe + case tpnme.Or => unionIsUnsupported + case tpnme.ContextFunctionN(n) if (n.toInt > 0) => ContextFunctionType(n.toInt) + case tpnme.FunctionN(n) if (n.toInt > 22) => FunctionXXLType(n.toInt) + case tpnme.TupleCons => genTupleIsUnsupported("scala.*:") + case tpnme.Tuple if !ctx.mode.is(ReadParents) => genTupleIsUnsupported("scala.Tuple") + case tpnme.AnyKind => u.definitions.AnyTpe + case tpnme.Matchable => u.definitions.AnyTpe + case _ => doLookup + } + + case _ => doLookup + } + } + else { + doLookup + } + } + + def TermRef(prefix: Type, name: TastyName)(implicit ctx: Context): Type = + TermRefIn(prefix, prefix, name) + + def TermRefIn(prefix: Type, space: Type, name: TastyName)(implicit ctx: Context): Type = + lookupTypeFrom(space)(prefix, name.toTermName) + } private[bridge] def mkRefinedTypeWith(parents: List[Type], clazz: Symbol, decls: u.Scope): Type = @@ -335,7 +391,7 @@ trait TypeOps { self: TastyUniverse => /** This is a port from Dotty of transforming a Method type to an ErasedTypeRef */ - private[bridge] object NameErasure { + private object NameErasure { def isRepeatedParam(self: Type): Boolean = self.typeSymbol eq u.definitions.RepeatedParamClass @@ -356,7 +412,7 @@ trait TypeOps { self: TastyUniverse => } val arg = elemType(self) val arg1 = if (wildcardArg) u.TypeBounds.upper(arg) else arg - to.ref(arg1 :: Nil) + u.appliedType(to, arg1 :: Nil) } else self } @@ -441,47 +497,6 @@ trait TypeOps { self: TastyUniverse => private val SyntheticScala3Type = raw"^(?:&|\||AnyKind|(?:Context)?Function\d+|\*:|Tuple|Matchable)$$".r - def selectType(name: TastyName.TypeName, prefix: Type)(implicit ctx: Context): Type = selectType(name, prefix, prefix) - def selectType(name: TastyName.TypeName, prefix: Type, space: Type)(implicit ctx: Context): Type = { - import scala.tools.tasty.TastyName._ - - def lookupType = namedMemberOfTypeWithPrefix(prefix, space, name) - - // we escape some types in the scala package especially - if (prefix.typeSymbol === u.definitions.ScalaPackage) { - name match { - case TypeName(SimpleName(raw @ SyntheticScala3Type())) => raw match { - case tpnme.And => AndTpe - case tpnme.Or => unionIsUnsupported - case tpnme.ContextFunctionN(n) if (n.toInt > 0) => ContextFunctionType(n.toInt) - case tpnme.FunctionN(n) if (n.toInt > 22) => FunctionXXLType(n.toInt) - case tpnme.TupleCons => genTupleIsUnsupported("scala.*:") - case tpnme.Tuple if !ctx.mode.is(ReadParents) => genTupleIsUnsupported("scala.Tuple") - case tpnme.AnyKind => u.definitions.AnyTpe - case tpnme.Matchable => u.definitions.AnyTpe - case _ => lookupType - } - - case _ => lookupType - } - } - else { - lookupType - } - } - - def selectTerm(name: TastyName, prefix: Type)(implicit ctx: Context): Type = selectTerm(name, prefix, prefix) - def selectTerm(name: TastyName, prefix: Type, space: Type)(implicit ctx: Context): Type = - namedMemberOfTypeWithPrefix(prefix, space, name.toTermName) - - def singletonLike(tpe: Type): Symbol = tpe match { - case u.SingleType(_, sym) => sym - case u.TypeRef(_,sym,_) => sym - case x => throw new MatchError(x) - } - - private[TypeOps] val NoSymbolFn = (_: Context) => u.NoSymbol - sealed abstract trait TastyRepr extends u.Type { def tflags: TastyFlagSet final def unsupportedFlags: TastyFlagSet = tflags & FlagSets.TastyOnlyFlags @@ -561,36 +576,10 @@ trait TypeOps { self: TastyUniverse => def computeInfo(sym: Symbol)(implicit ctx: Context): Unit } - def prefixedRef(prefix: Type, sym: Symbol): Type = { - if (sym.isType) { - prefix match { - case tp: u.ThisType if !sym.isTypeParameter => sym.preciseRef(prefix) - case _:u.SingleType | _:u.RefinedType => sym.preciseRef(prefix) - case _ => sym.ref - } - } - else if (sym.isConstructor) { - normaliseConstructorRef(sym) - } - else { - u.singleType(prefix, sym) - } - } - - def normaliseConstructorRef(ctor: Symbol): Type = { - var tpe = ctor.tpe - val tParams = ctor.owner.typeParams - if (tParams.nonEmpty) tpe = u.PolyType(tParams, tpe) - tpe - } - - def namedMemberOfPrefix(pre: Type, name: TastyName)(implicit ctx: Context): Type = - namedMemberOfTypeWithPrefix(pre, pre, name) - - def namedMemberOfTypeWithPrefix(pre: Type, space: Type, tname: TastyName)(implicit ctx: Context): Type = - prefixedRef(pre, namedMemberOfType(space, tname)) + private[bridge] def lookupTypeFrom(owner: Type)(pre: Type, tname: TastyName)(implicit ctx: Context): Type = + defn.NamedType(pre, lookupSymbol(owner, tname)) - def lambdaResultType(resType: Type): Type = resType match { + private def lambdaResultType(resType: Type): Type = resType match { case res: LambdaPolyType => res.toNested case res => res } @@ -621,12 +610,10 @@ trait TypeOps { self: TastyUniverse => private[bridge] final class OpaqueTypeBounds(lo: Type, hi: Type, val alias: Type) extends u.TypeBounds(lo, hi) - def typeRef(tpe: Type): Type = u.appliedType(tpe, Nil) - /** The given type, unless `sym` is a constructor, in which case the * type of the constructed instance is returned */ - def effectiveResultType(sym: Symbol, typeParams: List[Symbol], givenTp: Type): Type = + def effectiveResultType(sym: Symbol, givenTp: Type): Type = if (sym.name == u.nme.CONSTRUCTOR) sym.owner.tpe else givenTp @@ -857,9 +844,6 @@ trait TypeOps { self: TastyUniverse => abstract class TermLambdaFactory extends LambdaFactory[TastyName] abstract class TypeLambdaFactory extends LambdaFactory[TastyName.TypeName] - def recThis(tpe: Type): Type = tpe.asInstanceOf[RecType].recThis - def symOfTypeRef(tpe: Type): Symbol = tpe.asInstanceOf[u.TypeRef].sym - private[TypeOps] final class RecType(run: RecType => Type)(implicit ctx: Context) extends Type with Product { override val productPrefix = "RecType" From e5529f643908acf639294a4563331e838c91927d Mon Sep 17 00:00:00 2001 From: NthPortal Date: Fri, 27 Aug 2021 12:34:38 -0400 Subject: [PATCH 367/769] Add specialized builder for s.c.i.SeqMap --- project/MimaFilters.scala | 2 + .../scala/collection/immutable/SeqMap.scala | 55 ++++++++++++++++++- .../scala/collection/FactoriesTest.scala | 5 +- .../collection/immutable/SeqMapTest.scala | 42 ++++++++++++++ 4 files changed, 99 insertions(+), 5 deletions(-) create mode 100644 test/junit/scala/collection/immutable/SeqMapTest.scala diff --git a/project/MimaFilters.scala b/project/MimaFilters.scala index db6622643cdb..1674ddfcb751 100644 --- a/project/MimaFilters.scala +++ b/project/MimaFilters.scala @@ -48,6 +48,8 @@ object MimaFilters extends AutoPlugin { ProblemFilters.exclude[DirectMissingMethodProblem]("scala.collection.convert.JavaCollectionWrappers#JConcurrentMapWrapper.filterInPlaceImpl"), ProblemFilters.exclude[DirectMissingMethodProblem]("scala.collection.convert.JavaCollectionWrappers#JConcurrentMapWrapper.mapValuesInPlaceImpl"), + // #9741 + ProblemFilters.exclude[MissingClassProblem]("scala.collection.immutable.SeqMap$SeqMapBuilderImpl"), ) override val buildSettings = Seq( diff --git a/src/library/scala/collection/immutable/SeqMap.scala b/src/library/scala/collection/immutable/SeqMap.scala index 8a899ee535c7..013697d64cce 100644 --- a/src/library/scala/collection/immutable/SeqMap.scala +++ b/src/library/scala/collection/immutable/SeqMap.scala @@ -14,7 +14,7 @@ package scala package collection package immutable -import scala.collection.mutable.Builder +import scala.collection.mutable.{Builder, ReusableBuilder} /** * A generic trait for ordered immutable maps. Concrete classes have to provide @@ -48,7 +48,7 @@ object SeqMap extends MapFactory[SeqMap] { case _ => (newBuilder[K, V] ++= it).result() } - def newBuilder[K, V]: Builder[(K, V), SeqMap[K, V]] = VectorMap.newBuilder + def newBuilder[K, V]: Builder[(K, V), SeqMap[K, V]] = new SeqMapBuilderImpl @SerialVersionUID(3L) private object EmptySeqMap extends SeqMap[Any, Nothing] with Serializable { @@ -220,6 +220,55 @@ object SeqMap extends MapFactory[SeqMap] { f(key3, value3) f(key4, value4) } - hashCode + + private[SeqMap] def buildTo[V1 >: V](builder: Builder[(K, V1), SeqMap[K, V1]]): builder.type = + builder.addOne((key1, value1)).addOne((key2, value2)).addOne((key3, value3)).addOne((key4, value4)) + } + + private final class SeqMapBuilderImpl[K, V] extends ReusableBuilder[(K, V), SeqMap[K, V]] { + private[this] var elems: SeqMap[K, V] = SeqMap.empty + private[this] var switchedToVectorMapBuilder: Boolean = false + private[this] var vectorMapBuilder: VectorMapBuilder[K, V] = _ + + override def clear(): Unit = { + elems = SeqMap.empty + if (vectorMapBuilder != null) { + vectorMapBuilder.clear() + } + switchedToVectorMapBuilder = false + } + + override def result(): SeqMap[K, V] = + if (switchedToVectorMapBuilder) vectorMapBuilder.result() else elems + + def addOne(elem: (K, V)) = { + if (switchedToVectorMapBuilder) { + vectorMapBuilder.addOne(elem) + } else if (elems.size < 4) { + elems = elems + elem + } else { + // assert(elems.size == 4) + if (elems.contains(elem._1)) { + elems = elems + elem // will not increase the size of the map + } else { + switchedToVectorMapBuilder = true + if (vectorMapBuilder == null) { + vectorMapBuilder = new VectorMapBuilder + } + elems.asInstanceOf[SeqMap4[K, V]].buildTo(vectorMapBuilder) + vectorMapBuilder.addOne(elem) + } + } + + this + } + + override def addAll(xs: IterableOnce[(K, V)]): this.type = + if (switchedToVectorMapBuilder) { + vectorMapBuilder.addAll(xs) + this + } else { + super.addAll(xs) + } } } diff --git a/test/junit/scala/collection/FactoriesTest.scala b/test/junit/scala/collection/FactoriesTest.scala index 6eb4ccd8779a..d08fac0759d7 100644 --- a/test/junit/scala/collection/FactoriesTest.scala +++ b/test/junit/scala/collection/FactoriesTest.scala @@ -277,8 +277,9 @@ class FactoriesTest { assert(Iterable().isInstanceOf[List[_]], "Iterable.apply should delegate to List.apply") assert(Iterable(1,2,3).isInstanceOf[List[_]], "Iterable.apply should delegate to List.apply") - assert(im.SeqMap().isInstanceOf[im.VectorMap[_, _]], "immutable.SeqMap.apply should delegate to VectorMap.apply") - assert(im.SeqMap(1 -> 2, 3 -> 4, 5 -> 6).isInstanceOf[im.VectorMap[_, _]], "immutable.SeqMap.apply should delegate to VectorMap.apply") + assert(im.SeqMap().getClass.getSimpleName == "EmptySeqMap$", "immutable.SeqMap.apply should use EmptySeqMap$") + assert(im.SeqMap(1 -> 2, 3 -> 4, 5 -> 6).getClass.getSimpleName == "SeqMap3", "immutable.SeqMap.apply should use SeqMap3") + assert(im.SeqMap(1 -> 2, 3 -> 4, 5 -> 6, 7 -> 8, 9 -> 10).isInstanceOf[im.VectorMap[_, _]], "immutable.SeqMap.apply should delegate to VectorMap.apply") assert(Map().isInstanceOf[im.Map[_, _]], "Map.apply should delegate to immutable.Map.apply") assert(Map(1 -> 2, 3 -> 4, 5 -> 6).isInstanceOf[im.Map[_, _]], "Map.apply should delegate to immutable.Map.apply") diff --git a/test/junit/scala/collection/immutable/SeqMapTest.scala b/test/junit/scala/collection/immutable/SeqMapTest.scala new file mode 100644 index 000000000000..3d8cce9a5fb0 --- /dev/null +++ b/test/junit/scala/collection/immutable/SeqMapTest.scala @@ -0,0 +1,42 @@ +package scala.collection.immutable + +import org.junit.Test +import org.junit.Assert.assertEquals + +import scala.collection.mutable + +class SeqMapTest { + private def checkClass(map: SeqMap[_, _], simpleName: String): Unit = { + assertEquals(simpleName, map.getClass.getSimpleName.stripSuffix("$")) + } + + @Test + def applyFromSmallSizeSpecialization(): Unit = { + checkClass(SeqMap(), "EmptySeqMap") + checkClass(SeqMap(1 -> 1), "SeqMap1") + checkClass(SeqMap(1 -> 1, 2 -> 2), "SeqMap2") + checkClass(SeqMap(1 -> 1, 2 -> 2, 3 -> 3), "SeqMap3") + checkClass(SeqMap(1 -> 1, 2 -> 2, 3 -> 3, 4 -> 4), "SeqMap4") + checkClass(SeqMap(1 -> 1, 2 -> 2, 3 -> 3, 4 -> 4, 5 -> 5), "VectorMap") + + // no knownSize + checkClass(SeqMap.from(List(1 -> 1)), "SeqMap1") + } + + @Test + def newBuilderSmallSizeSpecialization(): Unit = { + type Builder = mutable.Builder[(Int, Int), SeqMap[Int, Int]] + def build(op: Builder => Builder): SeqMap[Int, Int] = + op(SeqMap.newBuilder[Int, Int]).result() + + checkClass(build(identity), "EmptySeqMap") + checkClass(build(_ += 1 -> 1), "SeqMap1") + checkClass(build(_ += 1 -> 1 += 2 -> 2), "SeqMap2") + checkClass(build(_ += 1 -> 1 += 2 -> 2 += 3 -> 3), "SeqMap3") + checkClass(build(_ += 1 -> 1 += 2 -> 2 += 3 -> 3 += 4 -> 4), "SeqMap4") + checkClass(build(_ += 1 -> 1 += 2 -> 2 += 3 -> 3 += 4 -> 4 += 5 -> 5), "VectorMap") + + // `addAll` + checkClass(build(_ ++= List(1 -> 1)), "SeqMap1") + } +} From 8d4e2337a02cda996bd7116fe54f4def85d99de3 Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Thu, 2 Sep 2021 11:10:25 +0200 Subject: [PATCH 368/769] skip a few projects in bsp export --- build.sbt | 17 ++++++++++++----- 1 file changed, 12 insertions(+), 5 deletions(-) diff --git a/build.sbt b/build.sbt index b82d4b4d83a3..8d8d911deb4c 100644 --- a/build.sbt +++ b/build.sbt @@ -354,7 +354,7 @@ def setForkedWorkingDirectory: Seq[Setting[_]] = { } // This project provides the STARR scalaInstance for bootstrapping -lazy val bootstrap = project in file("target/bootstrap") +lazy val bootstrap = project.in(file("target/bootstrap")).settings(bspEnabled := false) lazy val library = configureAsSubproject(project) .settings(generatePropertiesFileSettings) @@ -634,8 +634,9 @@ lazy val specLib = project.in(file("test") / "instrumented") .settings(commonSettings) .settings(disableDocs) .settings(fatalWarningsSettings) - .settings(publish / skip := true) .settings( + publish / skip := true, + bspEnabled := false, Compile / sourceGenerators += Def.task { import scala.collection.JavaConverters._ val srcBase = (library / Compile / sourceDirectories).value.head / "scala/runtime" @@ -679,7 +680,9 @@ lazy val bench = project.in(file("test") / "benchmarks") else "org.scala-lang" % "scala-compiler" % benchmarkScalaVersion :: Nil }, scalacOptions ++= Seq("-feature", "-opt:l:inline", "-opt-inline-from:scala/**", "-opt-warnings"), - Jmh / bspEnabled := false // Skips JMH source generators during IDE import to avoid needing to compile scala-library during the import + // Skips JMH source generators during IDE import to avoid needing to compile scala-library during the import + // should not be needed once sbt-jmh 0.4.3 is out (https://github.com/sbt/sbt-jmh/pull/207) + Jmh / bspEnabled := false ).settings(inConfig(JmhPlugin.JmhKeys.Jmh)(scalabuild.JitWatchFilePlugin.jitwatchSettings)) @@ -804,8 +807,9 @@ def osgiTestProject(p: Project, framework: ModuleID) = p .dependsOn(library, reflect, compiler) .settings(commonSettings) .settings(disableDocs) - .settings(publish / skip := true) .settings( + publish / skip := true, + bspEnabled := false, Test / fork := true, Test / parallelExecution := false, libraryDependencies ++= { @@ -930,6 +934,7 @@ lazy val scalaDist = Project("scalaDist", file(".") / "target" / "scala-dist-dis .settings(commonSettings) .settings(disableDocs) .settings( + bspEnabled := false, name := "scala-dist", Compile / packageBin / mappings ++= { val binBaseDir = buildDirectory.value / "pack" @@ -981,9 +986,10 @@ def partestDesc(in: String): Def.Initialize[Task[(Result[Unit], String)]] = lazy val root: Project = (project in file(".")) .settings(disableDocs) - .settings(publish / skip := true) .settings(generateBuildCharacterFileSettings) .settings( + publish / skip := true, + bspEnabled := false, commands ++= ScriptCommands.all, extractBuildCharacterPropertiesFile := { val jar = (bootstrap / scalaInstance).value.allJars.find(_.getName contains "-compiler").get @@ -1122,6 +1128,7 @@ lazy val distDependencies = Seq(replFrontend, compiler, library, reflect, scalap lazy val dist = (project in file("dist")) .settings(commonSettings) .settings( + bspEnabled := false, libraryDependencies ++= jlineDeps, mkBin := mkBinImpl.value, mkQuick := Def.task { From 3494a13eb92c482ee4223c24ce4dd6f6ea92abb6 Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Thu, 2 Sep 2021 21:30:49 +0200 Subject: [PATCH 369/769] Prevent compilation of library/reflect/compiler on bsp import Importing / refreshing the BSP triggers compilation of the compiler (and library, reflect). It's rooted in the `buildTarget/resources` bsp call, which runs `bspBuildTargetResources file:/Users/luc/scala/scala13/#scaladoc/Compile`. The issue can be reproduced by just calling `scaladoc/resources` in sbt. The fix is to only look at the `externalDependencyClasspath` (not the `dependencyClasspath`) when searching for webjars as scaladoc resources. --- project/ScaladocSettings.scala | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/project/ScaladocSettings.scala b/project/ScaladocSettings.scala index ed4b8a188f38..ead5d1680a97 100644 --- a/project/ScaladocSettings.scala +++ b/project/ScaladocSettings.scala @@ -1,7 +1,7 @@ package scala.build import sbt._ -import sbt.Keys.{ artifact, dependencyClasspath, moduleID, resourceManaged } +import sbt.Keys.{ artifact, externalDependencyClasspath, moduleID, resourceManaged } object ScaladocSettings { @@ -15,7 +15,9 @@ object ScaladocSettings { s.get(artifact.key).isDefined && s.get(moduleID.key).exists(_.organization == "org.webjars") val dest = (resourceManaged.value / "webjars").getAbsoluteFile IO.createDirectory(dest) - val classpathes = (Compile / dependencyClasspath).value + // externalDependencyClasspath (not dependencyClasspath) to avoid compiling + // upstream projects (library, reflect, compiler) on bsp `buildTarget/resources` + val classpathes = (Compile / externalDependencyClasspath).value val files: Seq[File] = classpathes.filter(isWebjar).flatMap { classpathEntry => val jarFile = classpathEntry.data IO.unzip(jarFile, dest) From d6fcac36d317d4da3495be0ea866ca1a1895cdbd Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Fri, 3 Sep 2021 10:31:14 +0200 Subject: [PATCH 370/769] Small build cleanup Align folders in src/ and build/quick Fixes invalid `replFrontend/resourceDirectories` was: /Users/luc/scala/scala13/src/replFrontend now: /Users/luc/scala/scala13/src/repl-frontend Also for partestJavaAgent. --- build.sbt | 22 ++++++++++++---------- 1 file changed, 12 insertions(+), 10 deletions(-) diff --git a/build.sbt b/build.sbt index e887476e5d5f..8178442d8b52 100644 --- a/build.sbt +++ b/build.sbt @@ -46,6 +46,8 @@ val jlineDeps = Seq(jlineDep, jnaDep) val testInterfaceDep = "org.scala-sbt" % "test-interface" % "1.0" val diffUtilsDep = "com.googlecode.java-diff-utils" % "diffutils" % "1.3.0" +val projectFolder = settingKey[String]("subfolder in src when using configureAsSubproject, else the project name") + // `set Global / fatalWarnings := true` to enable -Werror for the certain modules // currently, many modules cannot support -Werror; ideally this setting will eventually // enable -Werror for all modules @@ -131,20 +133,21 @@ lazy val commonSettings = instanceSettings ++ clearSourceAndResourceDirectories // we always assume that Java classes are standalone and do not have any dependency // on Scala classes compileOrder := CompileOrder.JavaThenScala, + projectFolder := thisProject.value.id, // overridden in configureAsSubproject Compile / javacOptions ++= Seq("-g", "-source", "1.8", "-target", "1.8", "-Xlint:unchecked"), Compile / unmanagedJars := Seq.empty, // no JARs in version control! Compile / sourceDirectory := baseDirectory.value, Compile / unmanagedSourceDirectories := List(baseDirectory.value), - Compile / unmanagedResourceDirectories += (ThisBuild / baseDirectory).value / "src" / thisProject.value.id, + Compile / unmanagedResourceDirectories += (ThisBuild / baseDirectory).value / "src" / projectFolder.value, sourcesInBase := false, Compile / scalaSource := (Compile / sourceDirectory).value, // for some reason sbt 1.4 issues unused-settings warnings for this, it seems to me incorrectly Global / excludeLintKeys ++= Set(scalaSource), // each subproject has to ask specifically for files they want to include Compile / unmanagedResources / includeFilter := NothingFilter, - target := (ThisBuild / target).value / thisProject.value.id, - Compile / classDirectory := buildDirectory.value / "quick/classes" / thisProject.value.id, - Compile / doc / target := buildDirectory.value / "scaladoc" / thisProject.value.id, + target := (ThisBuild / target).value / projectFolder.value, + Compile / classDirectory := buildDirectory.value / "quick/classes" / projectFolder.value, + Compile / doc / target := buildDirectory.value / "scaladoc" / projectFolder.value, // given that classDirectory and doc target are overridden to be _outside_ of target directory, we have // to make sure they are being cleaned properly cleanFiles += (Compile / classDirectory).value, @@ -425,7 +428,7 @@ lazy val reflect = configureAsSubproject(project) "/project/description" -> Compiler for the Scala Programming Language, "/project/packaging" -> jar ), - apiURL := Some(url(s"https://www.scala-lang.org/api/${versionProperties.value.mavenVersion}/scala-${thisProject.value.id}/")), + apiURL := Some(url(s"https://www.scala-lang.org/api/${versionProperties.value.mavenVersion}/scala-${projectFolder.value}/")), MimaFilters.mimaSettings, ) .dependsOn(library) @@ -509,7 +512,7 @@ lazy val compiler = configureAsSubproject(project) "/project/description" -> Compiler for the Scala Programming Language, "/project/packaging" -> jar ), - apiURL := Some(url(s"https://www.scala-lang.org/api/${versionProperties.value.mavenVersion}/scala-${thisProject.value.id}/")), + apiURL := Some(url(s"https://www.scala-lang.org/api/${versionProperties.value.mavenVersion}/scala-${projectFolder.value}/")), pomDependencyExclusions += (("org.scala-lang.modules", "scala-asm")) ) .dependsOn(library, reflect) @@ -847,10 +850,8 @@ def osgiTestProject(p: Project, framework: ModuleID) = p cleanFiles += (ThisBuild / buildDirectory).value / "osgi" ) -lazy val partestJavaAgent = Project("partestJavaAgent", file(".") / "src" / "partest-javaagent") - .settings(commonSettings) +lazy val partestJavaAgent = configureAsSubproject(project, srcdir = Some("partest-javaagent")) .settings(fatalWarningsSettings) - .settings(generatePropertiesFileSettings) .settings(disableDocs) .settings( libraryDependencies += asmDep, @@ -1140,7 +1141,7 @@ lazy val dist = (project in file("dist")) (ThisBuild / buildDirectory).value / "quick" }.dependsOn((distDependencies.map(_ / Runtime / products) :+ mkBin): _*).value, mkPack := Def.task { (ThisBuild / buildDirectory).value / "pack" }.dependsOn(Compile / packageBin / packagedArtifact, mkBin).value, - target := (ThisBuild / target).value / thisProject.value.id, + target := (ThisBuild / target).value / projectFolder.value, Compile / packageBin := { val targetDir = (ThisBuild / buildDirectory).value / "pack" / "lib" val jlineJAR = findJar((Compile / dependencyClasspath).value, jlineDep).get.data @@ -1177,6 +1178,7 @@ def configureAsSubproject(project: Project, srcdir: Option[String] = None): Proj (project in base) .settings(scalaSubprojectSettings) .settings(generatePropertiesFileSettings) + .settings(projectFolder := srcdir.getOrElse(project.id)) } lazy val mkBin = taskKey[Seq[File]]("Generate shell script (bash or Windows batch).") From d14559c82076d9cf9be35ce9f7bf2502f3dbac30 Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Fri, 3 Sep 2021 11:45:30 +0200 Subject: [PATCH 371/769] re-enable bsp in root project to restore build.sbt support in IntelliJ --- build.sbt | 1 - 1 file changed, 1 deletion(-) diff --git a/build.sbt b/build.sbt index 8178442d8b52..37445b02f6c5 100644 --- a/build.sbt +++ b/build.sbt @@ -990,7 +990,6 @@ lazy val root: Project = (project in file(".")) .settings(generateBuildCharacterFileSettings) .settings( publish / skip := true, - bspEnabled := false, commands ++= ScriptCommands.all, extractBuildCharacterPropertiesFile := { val jar = (bootstrap / scalaInstance).value.allJars.find(_.getName contains "-compiler").get From 3b4dafff1585bc96255cecf944765c31cc702e7e Mon Sep 17 00:00:00 2001 From: dengziming Date: Thu, 2 Sep 2021 20:10:44 +0800 Subject: [PATCH 372/769] ISSUE-12393: Parse concrete private interface methods in Java --- src/compiler/scala/tools/nsc/javac/JavaParsers.scala | 2 +- test/files/pos/t12393/R1.java | 7 +++++++ 2 files changed, 8 insertions(+), 1 deletion(-) create mode 100644 test/files/pos/t12393/R1.java diff --git a/src/compiler/scala/tools/nsc/javac/JavaParsers.scala b/src/compiler/scala/tools/nsc/javac/JavaParsers.scala index fdd81da701da..2049693a81f3 100644 --- a/src/compiler/scala/tools/nsc/javac/JavaParsers.scala +++ b/src/compiler/scala/tools/nsc/javac/JavaParsers.scala @@ -613,7 +613,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { val vparams = formalParams() if (!isVoid) rtpt = optArrayBrackets(rtpt) optThrows() - val isConcreteInterfaceMethod = !inInterface || (mods hasFlag Flags.JAVA_DEFAULTMETHOD) || (mods hasFlag Flags.STATIC) + val isConcreteInterfaceMethod = !inInterface || (mods hasFlag Flags.JAVA_DEFAULTMETHOD) || (mods hasFlag Flags.STATIC) || (mods hasFlag Flags.PRIVATE) val bodyOk = !(mods1 hasFlag Flags.DEFERRED) && isConcreteInterfaceMethod val body = if (bodyOk && in.token == LBRACE) { diff --git a/test/files/pos/t12393/R1.java b/test/files/pos/t12393/R1.java new file mode 100644 index 000000000000..08c764ceb4ba --- /dev/null +++ b/test/files/pos/t12393/R1.java @@ -0,0 +1,7 @@ +// javaVersion: 9+ +public interface R1 { + + private void foo() { + return; + } +} From 42bfebf8c2cd754aa2fcaac730d2f85eb2be4002 Mon Sep 17 00:00:00 2001 From: Daniel Le Date: Sun, 5 Sep 2021 12:40:54 +0800 Subject: [PATCH 373/769] Fix BTypes.LONG.maxValueType(BTypes.FLOAT) The result must be BTypes.FLOAT instead of BTypes.DOUBLE. This implementation incorporates the suggestion at https://github.com/scala/scala/pull/7435#pullrequestreview-176328267. It also adds unit tests for BTypes.LONG receivers of this method. --- .../scala/tools/nsc/backend/jvm/BTypes.scala | 2 +- .../tools/nsc/backend/jvm/BTypesTest.scala | 29 ++++++++++++++++++- 2 files changed, 29 insertions(+), 2 deletions(-) diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BTypes.scala b/src/compiler/scala/tools/nsc/backend/jvm/BTypes.scala index f4c5bb3e9adb..23eacc7e5c27 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BTypes.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BTypes.scala @@ -297,7 +297,7 @@ abstract class BTypes { case LONG => if (other.isIntegralType) LONG - else if (other.isRealType) DOUBLE + else if (other.isRealType) other else uncomparable case FLOAT => diff --git a/test/junit/scala/tools/nsc/backend/jvm/BTypesTest.scala b/test/junit/scala/tools/nsc/backend/jvm/BTypesTest.scala index 65f7af64ec32..76ddfe064453 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/BTypesTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/BTypesTest.scala @@ -19,7 +19,8 @@ class BTypesTest extends BytecodeTesting { } import global.genBCode.bTypes._ - def classBTFS(sym: global.Symbol) = global.exitingDelambdafy(classBTypeFromSymbol(sym)) + def duringBackend[T](f: => T) = global.exitingDelambdafy(f) + def classBTFS(sym: global.Symbol) = duringBackend { classBTypeFromSymbol(sym) } def jlo = global.definitions.ObjectClass def jls = global.definitions.StringClass @@ -221,4 +222,30 @@ class BTypesTest extends BytecodeTesting { def maxTypeTest() { } + + @Test + def maxValueTypeATest(): Unit = duringBackend { + assertEquals(LONG, LONG.maxValueType(BYTE)) + assertEquals(LONG, LONG.maxValueType(SHORT)) + assertEquals(LONG, LONG.maxValueType(CHAR)) + assertEquals(LONG, LONG.maxValueType(INT)) + assertEquals(LONG, LONG.maxValueType(LONG)) + assertEquals(FLOAT, LONG.maxValueType(FLOAT)) + assertEquals(DOUBLE, LONG.maxValueType(DOUBLE)) + + assertUncomparable(LONG, UNIT) + assertUncomparable(LONG, BOOL) + assertUncomparable(LONG, o) + assertUncomparable(LONG, s) + assertUncomparable(LONG, oArr) + assertUncomparable(LONG, method) + + def assertUncomparable(t1: PrimitiveBType, t2: BType): Unit = { + try { + t1.maxValueType(t2) + } catch { + case e: AssertionError => assertEquals(s"Cannot compute maxValueType: $t1, $t2", e.getMessage) + } + } + } } From 81de907b717d3684dd8642fdf60aee2395bc756a Mon Sep 17 00:00:00 2001 From: Daniel Le Date: Sun, 5 Sep 2021 12:49:03 +0800 Subject: [PATCH 374/769] Refactor BTypesTest#typedOpcodes --- test/junit/scala/tools/nsc/backend/jvm/BTypesTest.scala | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/test/junit/scala/tools/nsc/backend/jvm/BTypesTest.scala b/test/junit/scala/tools/nsc/backend/jvm/BTypesTest.scala index 76ddfe064453..40c96db7c23e 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/BTypesTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/BTypesTest.scala @@ -51,7 +51,7 @@ class BTypesTest extends BytecodeTesting { assert(FLOAT.typedOpcode(Opcodes.IALOAD) == Opcodes.FALOAD) assert(LONG.typedOpcode(Opcodes.IALOAD) == Opcodes.LALOAD) assert(DOUBLE.typedOpcode(Opcodes.IALOAD) == Opcodes.DALOAD) - assert(classBTFS(jls).typedOpcode(Opcodes.IALOAD) == Opcodes.AALOAD) + assert(s.typedOpcode(Opcodes.IALOAD) == Opcodes.AALOAD) assert(UNIT.typedOpcode(Opcodes.IRETURN) == Opcodes.RETURN) assert(BOOL.typedOpcode(Opcodes.IRETURN) == Opcodes.IRETURN) @@ -62,7 +62,7 @@ class BTypesTest extends BytecodeTesting { assert(FLOAT.typedOpcode(Opcodes.IRETURN) == Opcodes.FRETURN) assert(LONG.typedOpcode(Opcodes.IRETURN) == Opcodes.LRETURN) assert(DOUBLE.typedOpcode(Opcodes.IRETURN) == Opcodes.DRETURN) - assert(classBTFS(jls).typedOpcode(Opcodes.IRETURN) == Opcodes.ARETURN) + assert(s.typedOpcode(Opcodes.IRETURN) == Opcodes.ARETURN) } @Test From 8f9416d3e3534efcefa87d9d4e3fe4c74bfefacb Mon Sep 17 00:00:00 2001 From: Daniel Le Date: Mon, 6 Sep 2021 16:02:33 +0800 Subject: [PATCH 375/769] Use AssertUtil.assertThrows instead of try-catch --- .../junit/scala/tools/nsc/backend/jvm/BTypesTest.scala | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/test/junit/scala/tools/nsc/backend/jvm/BTypesTest.scala b/test/junit/scala/tools/nsc/backend/jvm/BTypesTest.scala index 40c96db7c23e..7e8aec192a3f 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/BTypesTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/BTypesTest.scala @@ -8,6 +8,7 @@ import org.junit.runners.JUnit4 import scala.collection.mutable import scala.tools.asm.Opcodes +import scala.tools.testing.AssertUtil.assertThrows import scala.tools.testing.BytecodeTesting @RunWith(classOf[JUnit4]) @@ -241,11 +242,10 @@ class BTypesTest extends BytecodeTesting { assertUncomparable(LONG, method) def assertUncomparable(t1: PrimitiveBType, t2: BType): Unit = { - try { - t1.maxValueType(t2) - } catch { - case e: AssertionError => assertEquals(s"Cannot compute maxValueType: $t1, $t2", e.getMessage) - } + assertThrows[AssertionError]( + t1.maxValueType(t2), + _.equals(s"Cannot compute maxValueType: $t1, $t2") + ) } } } From b2b00410f297ebfb9e66d93aa0305e42b1c26214 Mon Sep 17 00:00:00 2001 From: NthPortal Date: Tue, 24 Aug 2021 08:39:28 -0400 Subject: [PATCH 376/769] Optimise TrieMap methods by using eq Optimise `filterInPlaceImpl`, `mapValuesInPlaceImpl` and `updateWith` methods on `TrieMap` by using reference equality instead of full equality. --- project/MimaFilters.scala | 18 ++-- .../scala/collection/concurrent/Map.scala | 59 +++++++++++-- .../scala/collection/concurrent/TrieMap.scala | 83 ++++++++++++------- 3 files changed, 117 insertions(+), 43 deletions(-) diff --git a/project/MimaFilters.scala b/project/MimaFilters.scala index 2324946c32ee..d3db0f35967f 100644 --- a/project/MimaFilters.scala +++ b/project/MimaFilters.scala @@ -66,13 +66,21 @@ object MimaFilters extends AutoPlugin { ProblemFilters.exclude[MissingClassProblem]("scala.reflect.io.FileZipArchive$zipFilePool$"), // #9727 - ProblemFilters.exclude[DirectMissingMethodProblem]("scala.collection.concurrent.TrieMap.filterInPlaceImpl"), - ProblemFilters.exclude[DirectMissingMethodProblem]("scala.collection.concurrent.TrieMap.mapValuesInPlaceImpl"), - ProblemFilters.exclude[DirectMissingMethodProblem]("scala.collection.convert.JavaCollectionWrappers#JConcurrentMapWrapper.filterInPlaceImpl"), - ProblemFilters.exclude[DirectMissingMethodProblem]("scala.collection.convert.JavaCollectionWrappers#JConcurrentMapWrapper.mapValuesInPlaceImpl"), + ProblemFilters.exclude[DirectMissingMethodProblem]("scala.collection.concurrent.TrieMap.filterInPlaceImpl"), // private[collection] + ProblemFilters.exclude[DirectMissingMethodProblem]("scala.collection.concurrent.TrieMap.mapValuesInPlaceImpl"), // private[collection] + ProblemFilters.exclude[DirectMissingMethodProblem]("scala.collection.convert.JavaCollectionWrappers#JConcurrentMapWrapper.filterInPlaceImpl"), // private[collection] + ProblemFilters.exclude[DirectMissingMethodProblem]("scala.collection.convert.JavaCollectionWrappers#JConcurrentMapWrapper.mapValuesInPlaceImpl"), // private[collection] + + // #9733 + ProblemFilters.exclude[MissingClassProblem]("scala.collection.concurrent.TrieMap$RemovalPolicy$"), // private[concurrent] + // is this a MiMa bug? we really should need these two filters + //ProblemFilters.exclude[DirectMissingMethodProblem]("scala.collection.concurrent.TrieMap.removeRefEq"), // private[concurrent] + //ProblemFilters.exclude[DirectMissingMethodProblem]("scala.collection.concurrent.TrieMap.replaceRefEq"), // private[concurrent] + ProblemFilters.exclude[DirectMissingMethodProblem]("scala.collection.convert.JavaCollectionWrappers#JConcurrentMapWrapper.removeRefEq"), // private[concurrent] + ProblemFilters.exclude[DirectMissingMethodProblem]("scala.collection.convert.JavaCollectionWrappers#JConcurrentMapWrapper.replaceRefEq"), // private[concurrent] // #9741 - ProblemFilters.exclude[MissingClassProblem]("scala.collection.immutable.SeqMap$SeqMapBuilderImpl"), + ProblemFilters.exclude[MissingClassProblem]("scala.collection.immutable.SeqMap$SeqMapBuilderImpl"), // private[SeqMap] ) override val buildSettings = Seq( diff --git a/src/library/scala/collection/concurrent/Map.scala b/src/library/scala/collection/concurrent/Map.scala index ed9e6f3f3e43..6343e443eda2 100644 --- a/src/library/scala/collection/concurrent/Map.scala +++ b/src/library/scala/collection/concurrent/Map.scala @@ -103,6 +103,43 @@ trait Map[K, V] extends scala.collection.mutable.Map[K, V] { } } + /** + * Removes the entry for the specified key if it's currently mapped to the + * specified value. Comparison to the specified value is done using reference + * equality. + * + * Not all map implementations can support removal based on reference + * equality, and for those implementations, object equality is used instead. + * + * $atomicop + * + * @param k key for which the entry should be removed + * @param v value expected to be associated with the specified key if + * the removal is to take place + * @return `true` if the removal took place, `false` otherwise + */ + // TODO: make part of the API in a future version + private[concurrent] def removeRefEq(k: K, v: V): Boolean = remove(k, v) + + /** + * Replaces the entry for the given key only if it was previously mapped to + * a given value. Comparison to the specified value is done using reference + * equality. + * + * Not all map implementations can support replacement based on reference + * equality, and for those implementations, object equality is used instead. + * + * $atomicop + * + * @param k key for which the entry should be replaced + * @param oldValue value expected to be associated with the specified key + * if replacing is to happen + * @param newValue value to be associated with the specified key + * @return `true` if the entry was replaced, `false` otherwise + */ + // TODO: make part of the API in a future version + private[concurrent] def replaceRefEq(k: K, oldValue: V, newValue: V): Boolean = replace(k, oldValue, newValue) + /** * Update a mapping for the specified key and its current optionally-mapped value * (`Some` if there is current mapping, `None` if not). @@ -121,22 +158,26 @@ trait Map[K, V] extends scala.collection.mutable.Map[K, V] { @tailrec private def updateWithAux(key: K)(remappingFunction: Option[V] => Option[V]): Option[V] = { - val previousValue = this.get(key) + val previousValue = get(key) val nextValue = remappingFunction(previousValue) - (previousValue, nextValue) match { - case (None, None) => None - case (None, Some(next)) if this.putIfAbsent(key, next).isEmpty => nextValue - case (Some(prev), None) if this.remove(key, prev) => None - case (Some(prev), Some(next)) if this.replace(key, prev, next) => nextValue - case _ => this.updateWithAux(key)(remappingFunction) + previousValue match { + case Some(prev) => nextValue match { + case Some(next) => if (replaceRefEq(key, prev, next)) return nextValue + case _ => if (removeRefEq(key, prev)) return None + } + case _ => nextValue match { + case Some(next) => if (putIfAbsent(key, next).isEmpty) return nextValue + case _ => return None + } } + updateWithAux(key)(remappingFunction) } private[collection] def filterInPlaceImpl(p: (K, V) => Boolean): this.type = { val it = iterator while (it.hasNext) { val (k, v) = it.next() - if (!p(k, v)) remove(k, v) + if (!p(k, v)) removeRefEq(k, v) } this } @@ -145,7 +186,7 @@ trait Map[K, V] extends scala.collection.mutable.Map[K, V] { val it = iterator while (it.hasNext) { val (k, v) = it.next() - replace(k, v, f(k, v)) + replaceRefEq(k, v, f(k, v)) } this } diff --git a/src/library/scala/collection/concurrent/TrieMap.scala b/src/library/scala/collection/concurrent/TrieMap.scala index ca7681b115c1..1ce2ec05e538 100644 --- a/src/library/scala/collection/concurrent/TrieMap.scala +++ b/src/library/scala/collection/concurrent/TrieMap.scala @@ -15,9 +15,9 @@ package collection package concurrent import java.util.concurrent.atomic._ - import scala.{unchecked => uc} import scala.annotation.tailrec +import scala.collection.concurrent.TrieMap.RemovalPolicy import scala.collection.generic.DefaultSerializable import scala.collection.immutable.{List, Nil} import scala.collection.mutable.GrowableBuilder @@ -153,11 +153,12 @@ private[collection] final class INode[K, V](bn: MainNode[K, V], g: Gen, equiv: E * KEY_ABSENT - key wasn't there, insert only, do not overwrite * KEY_PRESENT - key was there, overwrite only, do not insert * other value `v` - only overwrite if the current value is this + * @param fullEquals whether to use reference or full equals when comparing `v` to the current value * @param hc the hashcode of `k` * * @return null if unsuccessful, Option[V] otherwise (indicating previous value bound to the key) */ - @tailrec def rec_insertif(k: K, v: V, hc: Int, cond: AnyRef, lev: Int, parent: INode[K, V], startgen: Gen, ct: TrieMap[K, V]): Option[V] = { + @tailrec def rec_insertif(k: K, v: V, hc: Int, cond: AnyRef, fullEquals: Boolean, lev: Int, parent: INode[K, V], startgen: Gen, ct: TrieMap[K, V]): Option[V] = { val m = GCAS_READ(ct) // use -Yinline! m match { @@ -171,9 +172,9 @@ private[collection] final class INode[K, V](bn: MainNode[K, V], g: Gen, equiv: E // 1a) insert below cn.array(pos) match { case in: INode[K, V] @uc => - if (startgen eq in.gen) in.rec_insertif(k, v, hc, cond, lev + 5, this, startgen, ct) + if (startgen eq in.gen) in.rec_insertif(k, v, hc, cond, fullEquals, lev + 5, this, startgen, ct) else { - if (GCAS(cn, cn.renewed(startgen, ct), ct)) rec_insertif(k, v, hc, cond, lev, parent, startgen, ct) + if (GCAS(cn, cn.renewed(startgen, ct), ct)) rec_insertif(k, v, hc, cond, fullEquals, lev, parent, startgen, ct) else null } case sn: SNode[K, V] @uc => cond match { @@ -199,7 +200,7 @@ private[collection] final class INode[K, V](bn: MainNode[K, V], g: Gen, equiv: E if (GCAS(cn, cn.updatedAt(pos, new SNode(k, v, hc), gen), ct)) Some(sn.v) else null } else None case otherv => - if (sn.hc == hc && equal(sn.k, k, ct) && sn.v == otherv) { + if (sn.hc == hc && equal(sn.k, k, ct) && (if (fullEquals) sn.v == otherv else sn.v.asInstanceOf[AnyRef] eq otherv)) { if (GCAS(cn, cn.updatedAt(pos, new SNode(k, v, hc), gen), ct)) Some(sn.v) else null } else None } @@ -237,7 +238,8 @@ private[collection] final class INode[K, V](bn: MainNode[K, V], g: Gen, equiv: E } case otherv => ln.get(k) match { - case Some(v0) if v0 == otherv => if (insertln()) Some(otherv.asInstanceOf[V]) else null + case Some(v0) if (if (fullEquals) v0 == otherv else v0.asInstanceOf[AnyRef] eq otherv) => + if (insertln()) Some(otherv.asInstanceOf[V]) else null case _ => None } } @@ -296,15 +298,15 @@ private[collection] final class INode[K, V](bn: MainNode[K, V], g: Gen, equiv: E * * @param hc the hashcode of `k` * - * @param removeAlways if true, then the value will be removed regardless of the value - * if false, then value will only be removed if it exactly matches v` + * @param removalPolicy policy deciding whether to remove `k` based on `v` and the + * current value associated with `k` (Always, FullEquals, or ReferenceEq) * * @return null if not successful, an Option[V] indicating the previous value otherwise */ def rec_remove( k: K, v: V, - removeAlways: Boolean, + removalPolicy: Int, hc: Int, lev: Int, parent: INode[K, V], @@ -324,13 +326,13 @@ private[collection] final class INode[K, V](bn: MainNode[K, V], g: Gen, equiv: E val sub = cn.array(pos) val res = sub match { case in: INode[K, V] @uc => - if (startgen eq in.gen) in.rec_remove(k, v, removeAlways, hc, lev + 5, this, startgen, ct) + if (startgen eq in.gen) in.rec_remove(k, v, removalPolicy, hc, lev + 5, this, startgen, ct) else { - if (GCAS(cn, cn.renewed(startgen, ct), ct)) rec_remove(k, v, removeAlways, hc, lev, parent, startgen, ct) + if (GCAS(cn, cn.renewed(startgen, ct), ct)) rec_remove(k, v, removalPolicy, hc, lev, parent, startgen, ct) else null } case sn: SNode[K, V] @uc => - if (sn.hc == hc && equal(sn.k, k, ct) && (removeAlways || sn.v == v)) { + if (sn.hc == hc && equal(sn.k, k, ct) && RemovalPolicy.shouldRemove(removalPolicy)(sn.v, v)) { val ncn = cn.removedAt(pos, flag, gen).toContracted(lev) if (GCAS(cn, ncn, ct)) Some(sn.v) else null } else None @@ -374,12 +376,12 @@ private[collection] final class INode[K, V](bn: MainNode[K, V], g: Gen, equiv: E clean(parent, ct, lev - 5) null case ln: LNode[K, V] => - if (removeAlways) { + if (removalPolicy == RemovalPolicy.Always) { val optv = ln.get(k) val nn = ln.removed(k, ct) if (GCAS(ln, nn, ct)) optv else null } else ln.get(k) match { - case optv @ Some(v0) if v0 == v => + case optv @ Some(v0) if RemovalPolicy.shouldRemove(removalPolicy)(v, v0) => val nn = ln.removed(k, ct) if (GCAS(ln, nn, ct)) optv else null case _ => None @@ -796,11 +798,11 @@ final class TrieMap[K, V] private (r: AnyRef, rtupd: AtomicReferenceFieldUpdater if (!r.rec_insert(k, v, hc, 0, null, r.gen, this)) inserthc(k, hc, v) } - @tailrec private def insertifhc(k: K, hc: Int, v: V, cond: AnyRef): Option[V] = { + @tailrec private def insertifhc(k: K, hc: Int, v: V, cond: AnyRef, fullEquals: Boolean): Option[V] = { val r = RDCSS_READ_ROOT() - val ret = r.rec_insertif(k, v, hc, cond, 0, null, r.gen, this) - if (ret eq null) insertifhc(k, hc, v, cond) + val ret = r.rec_insertif(k, v, hc, cond, fullEquals, 0, null, r.gen, this) + if (ret eq null) insertifhc(k, hc, v, cond, fullEquals) else ret } @@ -822,15 +824,15 @@ final class TrieMap[K, V] private (r: AnyRef, rtupd: AtomicReferenceFieldUpdater * * @param k the key to remove * @param v the value compare with the value found associated with the key - * @param removeAlways if true, then `k` will be removed whether or not its value matches `v` - * if false, then `k` will ONLY be removed if its value matches `v` + * @param removalPolicy policy deciding whether to remove `k` based on `v` and the + * current value associated with `k` (Always, FullEquals, or ReferenceEq) * @return an Option[V] indicating the previous value */ - @tailrec private def removehc(k: K, v: V, removeAlways: Boolean, hc: Int): Option[V] = { + @tailrec private def removehc(k: K, v: V, removalPolicy: Int, hc: Int): Option[V] = { val r = RDCSS_READ_ROOT() - val res = r.rec_remove(k, v, removeAlways, hc, 0, null, r.gen, this) + val res = r.rec_remove(k, v, removalPolicy, hc, 0, null, r.gen, this) if (res ne null) res - else removehc(k, v, removeAlways, hc) + else removehc(k, v, removalPolicy, hc) } @@ -907,7 +909,7 @@ final class TrieMap[K, V] private (r: AnyRef, rtupd: AtomicReferenceFieldUpdater override def put(key: K, value: V): Option[V] = { val hc = computeHash(key) - insertifhc(key, hc, value, INode.KEY_PRESENT_OR_ABSENT) + insertifhc(key, hc, value, INode.KEY_PRESENT_OR_ABSENT, fullEquals = false /* unused */) } override def update(k: K, v: V): Unit = { @@ -922,7 +924,7 @@ final class TrieMap[K, V] private (r: AnyRef, rtupd: AtomicReferenceFieldUpdater override def remove(k: K): Option[V] = { val hc = computeHash(k) - removehc(k = k, v = null.asInstanceOf[V], removeAlways = true, hc = hc) + removehc(k = k, v = null.asInstanceOf[V], RemovalPolicy.Always, hc = hc) } def subtractOne(k: K) = { @@ -932,7 +934,7 @@ final class TrieMap[K, V] private (r: AnyRef, rtupd: AtomicReferenceFieldUpdater def putIfAbsent(k: K, v: V): Option[V] = { val hc = computeHash(k) - insertifhc(k, hc, v, INode.KEY_ABSENT) + insertifhc(k, hc, v, INode.KEY_ABSENT, fullEquals = false /* unused */) } // TODO once computeIfAbsent is added to concurrent.Map, @@ -957,7 +959,7 @@ final class TrieMap[K, V] private (r: AnyRef, rtupd: AtomicReferenceFieldUpdater lookuphc(k, hc) match { case INodeBase.NO_SUCH_ELEMENT_SENTINEL => val v = op - insertifhc(k, hc, v, INode.KEY_ABSENT) match { + insertifhc(k, hc, v, INode.KEY_ABSENT, fullEquals = false /* unused */) match { case Some(oldValue) => oldValue case None => v } @@ -967,17 +969,27 @@ final class TrieMap[K, V] private (r: AnyRef, rtupd: AtomicReferenceFieldUpdater def remove(k: K, v: V): Boolean = { val hc = computeHash(k) - removehc(k, v, removeAlways = false, hc).nonEmpty + removehc(k, v, RemovalPolicy.FullEquals, hc).nonEmpty + } + + override private[concurrent] def removeRefEq(k: K, v: V): Boolean = { + val hc = computeHash(k) + removehc(k, v, RemovalPolicy.ReferenceEq, hc).nonEmpty } def replace(k: K, oldvalue: V, newvalue: V): Boolean = { val hc = computeHash(k) - insertifhc(k, hc, newvalue, oldvalue.asInstanceOf[AnyRef]).nonEmpty + insertifhc(k, hc, newvalue, oldvalue.asInstanceOf[AnyRef], fullEquals = true).nonEmpty + } + + override private[concurrent] def replaceRefEq(k: K, oldValue: V, newValue: V): Boolean = { + val hc = computeHash(k) + insertifhc(k, hc, newValue, oldValue.asInstanceOf[AnyRef], fullEquals = false).nonEmpty } def replace(k: K, v: V): Option[V] = { val hc = computeHash(k) - insertifhc(k, hc, v, INode.KEY_PRESENT) + insertifhc(k, hc, v, INode.KEY_PRESENT, fullEquals = false /* unused */) } def iterator: Iterator[(K, V)] = { @@ -1039,6 +1051,19 @@ object TrieMap extends MapFactory[TrieMap] { class MangledHashing[K] extends Hashing[K] { def hash(k: K): Int = scala.util.hashing.byteswap32(k.##) } + + private[concurrent] object RemovalPolicy { + final val Always = 0 + final val FullEquals = 1 + final val ReferenceEq = 2 + + def shouldRemove[V](removalPolicy: Int)(a: V, b: V): Boolean = + removalPolicy match { + case Always => true + case FullEquals => a == b + case ReferenceEq => a.asInstanceOf[AnyRef] eq b.asInstanceOf[AnyRef] + } + } } // non-final as an extension point for parallel collections From 0eb49ce841397ff16bee29cf6a432207d5fa095d Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Thu, 9 Sep 2021 09:52:29 +0200 Subject: [PATCH 377/769] Change `private[concurrent]` for methods used outside `concurrent` Mixing forwarders generated for `removeRefEq` / `replaceRefEq` in `JConcurrentMapWrapper` are outside package `concurrent`, the methods should be accessible. --- project/MimaFilters.scala | 7 ++----- src/library/scala/collection/concurrent/Map.scala | 4 ++-- src/library/scala/collection/concurrent/TrieMap.scala | 4 ++-- 3 files changed, 6 insertions(+), 9 deletions(-) diff --git a/project/MimaFilters.scala b/project/MimaFilters.scala index d3db0f35967f..31f5633182e6 100644 --- a/project/MimaFilters.scala +++ b/project/MimaFilters.scala @@ -73,11 +73,8 @@ object MimaFilters extends AutoPlugin { // #9733 ProblemFilters.exclude[MissingClassProblem]("scala.collection.concurrent.TrieMap$RemovalPolicy$"), // private[concurrent] - // is this a MiMa bug? we really should need these two filters - //ProblemFilters.exclude[DirectMissingMethodProblem]("scala.collection.concurrent.TrieMap.removeRefEq"), // private[concurrent] - //ProblemFilters.exclude[DirectMissingMethodProblem]("scala.collection.concurrent.TrieMap.replaceRefEq"), // private[concurrent] - ProblemFilters.exclude[DirectMissingMethodProblem]("scala.collection.convert.JavaCollectionWrappers#JConcurrentMapWrapper.removeRefEq"), // private[concurrent] - ProblemFilters.exclude[DirectMissingMethodProblem]("scala.collection.convert.JavaCollectionWrappers#JConcurrentMapWrapper.replaceRefEq"), // private[concurrent] + ProblemFilters.exclude[DirectMissingMethodProblem]("scala.collection.convert.JavaCollectionWrappers#JConcurrentMapWrapper.removeRefEq"), // private[collection] + ProblemFilters.exclude[DirectMissingMethodProblem]("scala.collection.convert.JavaCollectionWrappers#JConcurrentMapWrapper.replaceRefEq"), // private[collection] // #9741 ProblemFilters.exclude[MissingClassProblem]("scala.collection.immutable.SeqMap$SeqMapBuilderImpl"), // private[SeqMap] diff --git a/src/library/scala/collection/concurrent/Map.scala b/src/library/scala/collection/concurrent/Map.scala index 6343e443eda2..897a699a55a4 100644 --- a/src/library/scala/collection/concurrent/Map.scala +++ b/src/library/scala/collection/concurrent/Map.scala @@ -119,7 +119,7 @@ trait Map[K, V] extends scala.collection.mutable.Map[K, V] { * @return `true` if the removal took place, `false` otherwise */ // TODO: make part of the API in a future version - private[concurrent] def removeRefEq(k: K, v: V): Boolean = remove(k, v) + private[collection] def removeRefEq(k: K, v: V): Boolean = remove(k, v) /** * Replaces the entry for the given key only if it was previously mapped to @@ -138,7 +138,7 @@ trait Map[K, V] extends scala.collection.mutable.Map[K, V] { * @return `true` if the entry was replaced, `false` otherwise */ // TODO: make part of the API in a future version - private[concurrent] def replaceRefEq(k: K, oldValue: V, newValue: V): Boolean = replace(k, oldValue, newValue) + private[collection] def replaceRefEq(k: K, oldValue: V, newValue: V): Boolean = replace(k, oldValue, newValue) /** * Update a mapping for the specified key and its current optionally-mapped value diff --git a/src/library/scala/collection/concurrent/TrieMap.scala b/src/library/scala/collection/concurrent/TrieMap.scala index 1ce2ec05e538..cc1b08d91e14 100644 --- a/src/library/scala/collection/concurrent/TrieMap.scala +++ b/src/library/scala/collection/concurrent/TrieMap.scala @@ -972,7 +972,7 @@ final class TrieMap[K, V] private (r: AnyRef, rtupd: AtomicReferenceFieldUpdater removehc(k, v, RemovalPolicy.FullEquals, hc).nonEmpty } - override private[concurrent] def removeRefEq(k: K, v: V): Boolean = { + override private[collection] def removeRefEq(k: K, v: V): Boolean = { val hc = computeHash(k) removehc(k, v, RemovalPolicy.ReferenceEq, hc).nonEmpty } @@ -982,7 +982,7 @@ final class TrieMap[K, V] private (r: AnyRef, rtupd: AtomicReferenceFieldUpdater insertifhc(k, hc, newvalue, oldvalue.asInstanceOf[AnyRef], fullEquals = true).nonEmpty } - override private[concurrent] def replaceRefEq(k: K, oldValue: V, newValue: V): Boolean = { + override private[collection] def replaceRefEq(k: K, oldValue: V, newValue: V): Boolean = { val hc = computeHash(k) insertifhc(k, hc, newValue, oldValue.asInstanceOf[AnyRef], fullEquals = false).nonEmpty } From f09c28608e6a9f377dba86c550428eeffd69aaa5 Mon Sep 17 00:00:00 2001 From: Seth Tisue Date: Mon, 13 Sep 2021 19:36:50 -0700 Subject: [PATCH 378/769] add comments to Travis-CI config --- .travis.yml | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/.travis.yml b/.travis.yml index b0b6e9083e2a..6054dc79b62c 100644 --- a/.travis.yml +++ b/.travis.yml @@ -62,11 +62,11 @@ env: global: - ADOPTOPENJDK=8 - secure: "TuJOUtALynPd+MV1AuMeIpVb8BUBHr7Ul7FS48XhS2PyuTRpEBkSWybYcNg3AXyzmWDAuOjUxbaNMQBvP8vvehTbIYls5H5wTGKvj0D0TNVaPIXjF8bA8KyNat9xGNzhnWm2/2BMaWpKBJWRF7Jb+zHhijMYCJEbkMtoiE5R/mY=" # PRIV_KEY_SECRET, for scripts/travis-publish-spec.sh - - secure: "T1fxtvLTxioyXJYiC/zVYdNYsBOt+0Piw+xE04rB1pzeKahm9+G2mISdcAyqv6/vze9eIJt6jNHHpKX32/Z3Cs1/Ruha4m3k+jblj3S0SbxV6ht2ieJXLT5WoUPFRrU68KXI8wqUadXpjxeJJV53qF2FC4lhfMUsw1IwwMhdaE8=" # PRIVATE_REPO_PASS - - secure: "feE5A8mYNpkNQKVwCj3aXrwjVrJWh/4ENpRfFlr2HOD9ORk1GORD5Yq907WZd+dTkYK54Lh1gA+qHOCIDgJHbi9ZLU+kjzEjtYKF6lQy6Wb0LI8smTOnAA6IWVVYifiXw8d66MI2MKZb2jjGeIzy8Q00SZjLhEGjLyTeCIB88Ws=" # SONA_USER - - secure: "ek3As5q2tL8UBXcxSBbv4v5YgsoPD41SCzPOSu72kzfbngyxgQxrcziU5pIM+Lib9KaWex7hVVWNL38tMyDbu+0OpDv8bPjMujzlDx5I2pJUfuOJo7QRYsJE1nsXcY4cA72cCLfbRcLEkvtDAhcdLSaUOqlyQe5BY4X4fY5eoPA=" # SONA_PASS - - secure: "dbAvl6KEuLwZ0MVQPZihFsPzCdiLbX0EFk3so+hcfEbksrmLQ1tn4X5ZM7Wy1UDR8uN9lxngEwHch7a7lKqpugzmXMew9Wnikr9WBWbJT77Z+XJ/jHI6YuiCRpRo+nvxXGp9Ry80tSIgx5eju0J83IaJL41BWlBkvyAd7YAHORI=" # GPG_SUBKEY_SECRET - - secure: "RTyzS6nUgthupw5M0fPwTlcOym1sWgBo8eXYepB2xGiQnRu4g583BGuNBW1UZ3vIjRETi/UKQ1HtMR+i7D8ptF1cNpomopncVJA1iy7pU2w0MJ0xgIPMuvtkIa3kxocd/AnxAp+UhUad3nC8lDpkvZsUhhyA0fb4iPKipd2b2xY=" # TRAVIS_TOKEN (login with GitHub as SethTisue) + - secure: "T1fxtvLTxioyXJYiC/zVYdNYsBOt+0Piw+xE04rB1pzeKahm9+G2mISdcAyqv6/vze9eIJt6jNHHpKX32/Z3Cs1/Ruha4m3k+jblj3S0SbxV6ht2ieJXLT5WoUPFRrU68KXI8wqUadXpjxeJJV53qF2FC4lhfMUsw1IwwMhdaE8=" # PRIVATE_REPO_PASS, for publishing to scala-ci Artifactory + - secure: "feE5A8mYNpkNQKVwCj3aXrwjVrJWh/4ENpRfFlr2HOD9ORk1GORD5Yq907WZd+dTkYK54Lh1gA+qHOCIDgJHbi9ZLU+kjzEjtYKF6lQy6Wb0LI8smTOnAA6IWVVYifiXw8d66MI2MKZb2jjGeIzy8Q00SZjLhEGjLyTeCIB88Ws=" # SONA_USER, for publishing to Sonatype + - secure: "ek3As5q2tL8UBXcxSBbv4v5YgsoPD41SCzPOSu72kzfbngyxgQxrcziU5pIM+Lib9KaWex7hVVWNL38tMyDbu+0OpDv8bPjMujzlDx5I2pJUfuOJo7QRYsJE1nsXcY4cA72cCLfbRcLEkvtDAhcdLSaUOqlyQe5BY4X4fY5eoPA=" # SONA_PASS, for publishing to Sonatype + - secure: "dbAvl6KEuLwZ0MVQPZihFsPzCdiLbX0EFk3so+hcfEbksrmLQ1tn4X5ZM7Wy1UDR8uN9lxngEwHch7a7lKqpugzmXMew9Wnikr9WBWbJT77Z+XJ/jHI6YuiCRpRo+nvxXGp9Ry80tSIgx5eju0J83IaJL41BWlBkvyAd7YAHORI=" # GPG_SUBKEY_SECRET, so we can sign JARs + - secure: "RTyzS6nUgthupw5M0fPwTlcOym1sWgBo8eXYepB2xGiQnRu4g583BGuNBW1UZ3vIjRETi/UKQ1HtMR+i7D8ptF1cNpomopncVJA1iy7pU2w0MJ0xgIPMuvtkIa3kxocd/AnxAp+UhUad3nC8lDpkvZsUhhyA0fb4iPKipd2b2xY=" # TRAVIS_TOKEN (login with GitHub as SethTisue), for triggering scala-dist job # caching for sdkman / sbt / ivy / coursier imported from scala-dev cache: From ca9c932e6b7f694713752b2ed4a87bd7394bedf2 Mon Sep 17 00:00:00 2001 From: Seth Tisue Date: Mon, 13 Sep 2021 19:47:02 -0700 Subject: [PATCH 379/769] re-encrypt SONA_USER and SONA_PASS --- .travis.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.travis.yml b/.travis.yml index 6054dc79b62c..63a500e4fabe 100644 --- a/.travis.yml +++ b/.travis.yml @@ -63,10 +63,10 @@ env: - ADOPTOPENJDK=8 - secure: "TuJOUtALynPd+MV1AuMeIpVb8BUBHr7Ul7FS48XhS2PyuTRpEBkSWybYcNg3AXyzmWDAuOjUxbaNMQBvP8vvehTbIYls5H5wTGKvj0D0TNVaPIXjF8bA8KyNat9xGNzhnWm2/2BMaWpKBJWRF7Jb+zHhijMYCJEbkMtoiE5R/mY=" # PRIV_KEY_SECRET, for scripts/travis-publish-spec.sh - secure: "T1fxtvLTxioyXJYiC/zVYdNYsBOt+0Piw+xE04rB1pzeKahm9+G2mISdcAyqv6/vze9eIJt6jNHHpKX32/Z3Cs1/Ruha4m3k+jblj3S0SbxV6ht2ieJXLT5WoUPFRrU68KXI8wqUadXpjxeJJV53qF2FC4lhfMUsw1IwwMhdaE8=" # PRIVATE_REPO_PASS, for publishing to scala-ci Artifactory - - secure: "feE5A8mYNpkNQKVwCj3aXrwjVrJWh/4ENpRfFlr2HOD9ORk1GORD5Yq907WZd+dTkYK54Lh1gA+qHOCIDgJHbi9ZLU+kjzEjtYKF6lQy6Wb0LI8smTOnAA6IWVVYifiXw8d66MI2MKZb2jjGeIzy8Q00SZjLhEGjLyTeCIB88Ws=" # SONA_USER, for publishing to Sonatype - - secure: "ek3As5q2tL8UBXcxSBbv4v5YgsoPD41SCzPOSu72kzfbngyxgQxrcziU5pIM+Lib9KaWex7hVVWNL38tMyDbu+0OpDv8bPjMujzlDx5I2pJUfuOJo7QRYsJE1nsXcY4cA72cCLfbRcLEkvtDAhcdLSaUOqlyQe5BY4X4fY5eoPA=" # SONA_PASS, for publishing to Sonatype - secure: "dbAvl6KEuLwZ0MVQPZihFsPzCdiLbX0EFk3so+hcfEbksrmLQ1tn4X5ZM7Wy1UDR8uN9lxngEwHch7a7lKqpugzmXMew9Wnikr9WBWbJT77Z+XJ/jHI6YuiCRpRo+nvxXGp9Ry80tSIgx5eju0J83IaJL41BWlBkvyAd7YAHORI=" # GPG_SUBKEY_SECRET, so we can sign JARs - secure: "RTyzS6nUgthupw5M0fPwTlcOym1sWgBo8eXYepB2xGiQnRu4g583BGuNBW1UZ3vIjRETi/UKQ1HtMR+i7D8ptF1cNpomopncVJA1iy7pU2w0MJ0xgIPMuvtkIa3kxocd/AnxAp+UhUad3nC8lDpkvZsUhhyA0fb4iPKipd2b2xY=" # TRAVIS_TOKEN (login with GitHub as SethTisue), for triggering scala-dist job + - secure: "FvhicbSeys7VNTj9ZP/aNT0NhiQP/NNV0KRfK7IHxi3uOeaxFVfaQsln4lzqZn8dkntgzzNrE/VhvMIknfnISAPX7bShy6SRyj3V2BlcUpuem8WtwmkCaZ42xlCJteBL7NW0auG/8rxrNIAJXbRObqF+YdK6XsRMWaBMQHky+ss=" # SONA_USER, token username for publishing to Sonatype + - secure: "Y8CTlEdQbAS+P+LgkY05al/KSbccbX5BATm9N2GI9C6wH7oQuUU/VtU+bwvzeiF9DCsZPjrWXsa0JCuIQE+UzK1NWXxlkhUdGCaCBZ/nUecouBtMk2x/h7uIGpeYInxA041r5SuBecZuZQI79nhl+BwZSLu82Vy1QtP0/Cd8oRM=" # SONA_PASS, token password for publishing to Sonatype # caching for sdkman / sbt / ivy / coursier imported from scala-dev cache: From 829163c64962f83f35dcf2d83fb5a8a90ba66418 Mon Sep 17 00:00:00 2001 From: James Judd Date: Fri, 10 Sep 2021 18:47:32 -0600 Subject: [PATCH 380/769] Fix #12459: Warn instead of error if TASTy is not in sync with classfile This updates Scala 2.13 to match the current behavior in Scala 3 when TASTy is not in sync with classfile, which is to print a warning and suggest cleaning instead of erroring. The same change for Scala 3 happened in the following pull request: https://github.com/lampepfl/dotty/pull/9125 The Scala 3 change was made as a result of the following issue: https://github.com/lampepfl/dotty/issues/8839 The commit message from the Scala 3 fix is as follows: > Only warn if TASTy is not in sync with classfile > > If they are not in sync, we warn and suggest to clean. > We assume that the TASTy is up to date (arbitrary choise) and load it regardless. > This way we are resiliant to the failiure if the loaded class API did not change or > did not have an impact on the code we are compiling." --- .../scala/tools/nsc/symtab/classfile/ClassfileParser.scala | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala index 6816c6d01940..da93a90d72c1 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala @@ -1265,7 +1265,12 @@ abstract class ClassfileParser(reader: ReusableInstance[ReusableDataReader]) { val expectedUUID = new UUID(reader.readUncompressedLong(), reader.readUncompressedLong()) val tastyUUID = new TastyHeaderUnpickler(TASTYBytes).readHeader() if (expectedUUID != tastyUUID) { - reporter.error(NoPosition, s"Tasty UUID ($tastyUUID) file did not correspond the tasty UUID ($expectedUUID) declared in the classfile $file.") + loaders.warning( + NoPosition, + s"$file is out of sync with its TASTy file. Loaded TASTy file. Try cleaning the project to fix this issue", + WarningCategory.Other, + clazz.fullNameString + ) } TASTYBytes } From ae7669ba38a067fc01ded6d9fdd818725b8de22c Mon Sep 17 00:00:00 2001 From: Seth Tisue Date: Tue, 14 Sep 2021 14:30:28 -0700 Subject: [PATCH 381/769] re-STARR on 2.12.15 --- build.sbt | 2 +- project/MimaFilters.scala | 2 +- versions.properties | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/build.sbt b/build.sbt index 6b5ada594743..5bf8d015d125 100644 --- a/build.sbt +++ b/build.sbt @@ -95,7 +95,7 @@ lazy val publishSettings : Seq[Setting[_]] = Seq( // should not be set directly. It is the same as the Maven version and derived automatically from `baseVersion` and // `baseVersionSuffix`. globalVersionSettings -(Global / baseVersion) := "2.12.15" +(Global / baseVersion) := "2.12.16" (Global / baseVersionSuffix) := "SNAPSHOT" (ThisBuild / organization) := "org.scala-lang" (ThisBuild / homepage) := Some(url("https://www.scala-lang.org")) diff --git a/project/MimaFilters.scala b/project/MimaFilters.scala index 235be79b2afd..669be6e85058 100644 --- a/project/MimaFilters.scala +++ b/project/MimaFilters.scala @@ -13,7 +13,7 @@ object MimaFilters extends AutoPlugin { import autoImport._ override val globalSettings = Seq( - mimaReferenceVersion := Some("2.12.14"), + mimaReferenceVersion := Some("2.12.15"), ) val mimaFilters: Seq[ProblemFilter] = Seq[ProblemFilter]( diff --git a/versions.properties b/versions.properties index a267143cb781..dfcfe7df2ca8 100644 --- a/versions.properties +++ b/versions.properties @@ -1,5 +1,5 @@ # Scala version used for bootstrapping (see README.md) -starr.version=2.12.14 +starr.version=2.12.15 # The scala.binary.version determines how modules are resolved. It is set as follows: # - After 2.x.0 is released, the binary version is 2.x From 3ae1f5feb870a7ce2af32e55d80cefe2bfe63655 Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Thu, 16 Sep 2021 10:03:16 +0200 Subject: [PATCH 382/769] Fix range position end for infix calls The range position for the Apply node `qual fun { arg }` ended at `arg` instead of the closing brace. The use of `in.lastOffset` in the patch is the same as in method `r2p` (which is used for non-infix calls). --- .../scala/tools/nsc/ast/parser/Parsers.scala | 2 +- test/files/run/infix-rangepos.scala | 21 +++++++++++++++++++ 2 files changed, 22 insertions(+), 1 deletion(-) create mode 100644 test/files/run/infix-rangepos.scala diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala index cc2330eef1ec..a3404767cb89 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala @@ -924,7 +924,7 @@ self => def finishBinaryOp(isExpr: Boolean, opinfo: OpInfo, rhs: Tree): Tree = { import opinfo._ val operatorPos: Position = Position.range(rhs.pos.source, offset, offset, offset + operator.length) - val pos = lhs.pos union rhs.pos union operatorPos withPoint offset + val pos = lhs.pos.union(rhs.pos).union(operatorPos).withEnd(in.lastOffset).withPoint(offset) atPos(pos)(makeBinop(isExpr, lhs, operator, rhs, operatorPos, opinfo.targs)) } diff --git a/test/files/run/infix-rangepos.scala b/test/files/run/infix-rangepos.scala new file mode 100644 index 000000000000..8d2a16a0b536 --- /dev/null +++ b/test/files/run/infix-rangepos.scala @@ -0,0 +1,21 @@ +import scala.tools.partest._ + +object Test extends CompilerTest { + import global._ + override def extraSettings = super.extraSettings + " -Yrangepos" + override def sources = List( + "class C1 { def t = List(1).map ( x => x ) }", + "class C2 { def t = List(1).map { x => x } }", + "class C3 { def t = List(1).map ({x => x}) }", + "class C4 { def t = List(1) map ( x => x ) }", + "class C5 { def t = List(1) map { x => x } }", + "class C6 { def t = List(1) map ({x => x}) }") + + def check(source: String, unit: CompilationUnit): Unit = unit.body foreach { + case dd: DefDef if dd.name.startsWith("t") => + val pos = dd.rhs.pos + assert(pos.start == 19, pos.start) + assert(pos.end == 41, pos.end) + case _ => + } +} From be04c97056907a867a8f14f1b79d2d4d3f03c311 Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Fri, 17 Sep 2021 11:00:41 +0200 Subject: [PATCH 383/769] Avoid `case _: C[_]#K` type test for higher-kinded `C` The code compiles fine with vanilla 2.12.15, but fails when enabling the semanticdb compiler plugin. I assume the plugin forces some `TypeTreeWithDeferredRefCheck` that's otherwise left alone. The deferred refcheck is created here: https://github.com/scala/scala/blob/v2.12.15/src/compiler/scala/tools/nsc/typechecker/Typers.scala#L5167 --- src/library/scala/collection/immutable/TreeMap.scala | 3 +-- src/library/scala/collection/immutable/TreeSet.scala | 3 +-- 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/src/library/scala/collection/immutable/TreeMap.scala b/src/library/scala/collection/immutable/TreeMap.scala index 49e2f99d546a..9a83a0f9936e 100644 --- a/src/library/scala/collection/immutable/TreeMap.scala +++ b/src/library/scala/collection/immutable/TreeMap.scala @@ -404,11 +404,10 @@ final class TreeMap[A, +B] private (tree: RB.Tree[A, B])(implicit val ordering: } private def sameCBF(bf: CanBuildFrom[_,_,_]): Boolean = { bf match { - case cbf: SortedMapFactory[_]#SortedMapCanBuildFrom[_,_] => { + case cbf: TreeMap.SortedMapCanBuildFrom[_, _] => val factory:AnyRef = cbf.factory ((factory eq TreeMap) || (factory eq immutable.SortedMap) || (factory eq collection.SortedMap)) && cbf.ordering == ordering - } case w: WrappedCanBuildFrom[_,_,_] => sameCBF(w.wrapped) case _ => false } diff --git a/src/library/scala/collection/immutable/TreeSet.scala b/src/library/scala/collection/immutable/TreeSet.scala index 6ce7aed096e0..5ca9fb9c7372 100644 --- a/src/library/scala/collection/immutable/TreeSet.scala +++ b/src/library/scala/collection/immutable/TreeSet.scala @@ -264,11 +264,10 @@ final class TreeSet[A] private[immutable] (private[immutable] val tree: RB.Tree[ private def sameCBF(bf: CanBuildFrom[_,_,_]): Boolean = { bf match { - case cbf: SortedSetFactory[_]#SortedSetCanBuildFrom[_] => { + case cbf: TreeSet.SortedSetCanBuildFrom[_] => val factory:AnyRef = cbf.factory ((factory eq TreeSet) || (factory eq immutable.SortedSet) || (factory eq collection.SortedSet)) && cbf.ordering == ordering - } case w: WrappedCanBuildFrom[_,_,_] => sameCBF(w.wrapped) case _ => false } From 94a1f868f9e441e2f3fc6bc507ff0a17d3551ac4 Mon Sep 17 00:00:00 2001 From: Seth Tisue Date: Tue, 21 Sep 2021 14:15:42 -0600 Subject: [PATCH 384/769] test on JDK 18-ea it's not clear what we "ought" to do here. SDKMAN dropped 17 early access builds (scala/scala-dev#788), but Temurin (aka Adoptium/AdoptOpenJDK) 17 isn't available yet. we could use a Zulu or Java.net build, but I suggest we simply start testing on 18 instead, for now anyway. once Temurin 17 is out we could consider also adding it --- .travis.yml | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/.travis.yml b/.travis.yml index 1d7c481eae30..57277fa73e28 100644 --- a/.travis.yml +++ b/.travis.yml @@ -12,9 +12,9 @@ templates: # this has no effect on travis, it's just a place to put our template pr-jdk8: &pr-jdk8 if: type = pull_request OR repo != scala/scala - cron-jdk17: &cron-jdk17 + cron-jdk18: &cron-jdk18 if: type = cron AND repo = scala/scala - env: ADOPTOPENJDK=17 + env: ADOPTOPENJDK=18 build-for-testing: &build-for-testing # pull request validation (w/ bootstrap) @@ -97,13 +97,13 @@ jobs: <<: *pr-jdk8 - <<: *build-for-testing - <<: *cron-jdk17 + <<: *cron-jdk18 - <<: *test1 - <<: *cron-jdk17 + <<: *cron-jdk18 - <<: *test2 - <<: *cron-jdk17 + <<: *cron-jdk18 - stage: test name: build library with Scala 3 From e105c2e411228282527e35639800254ac1f55217 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=A9bastien=20Doeraene?= Date: Wed, 22 Sep 2021 18:04:15 +0200 Subject: [PATCH 385/769] Remove unused AssertUtil.assert8. The existence of that method prevents the file from compiling with Scala.js, which in turns prevents many JUnit tests to be tested in the Scala.js build. --- test/junit/scala/tools/testing/AssertUtil.scala | 16 ---------------- 1 file changed, 16 deletions(-) diff --git a/test/junit/scala/tools/testing/AssertUtil.scala b/test/junit/scala/tools/testing/AssertUtil.scala index b18421d0daa6..e59b70523d53 100644 --- a/test/junit/scala/tools/testing/AssertUtil.scala +++ b/test/junit/scala/tools/testing/AssertUtil.scala @@ -1,15 +1,6 @@ package scala.tools package testing -import org.junit.Assert -import Assert._ -import scala.reflect.ClassTag -import scala.runtime.ScalaRunTime.stringOf -import scala.collection.GenIterable -import scala.collection.JavaConverters._ -import scala.collection.mutable -import scala.tools.nsc.settings.ScalaVersion -import scala.util.Properties.javaSpecVersion import java.lang.ref._ import java.lang.reflect.{Field, Modifier} import java.util.IdentityHashMap @@ -123,11 +114,4 @@ object AssertUtil { body roots.foreach(assertNoRef) } - - private[this] val version8 = ScalaVersion("8") - - /** Assert on Java 8, but on later versions, just print if assert would fail. */ - def assert8(b: => Boolean, msg: => Any) = - if (ScalaVersion(javaSpecVersion) == version8) assert(b, msg) - else if (!b) println(s"assert not $msg") } From 76150395e3343032df01e91eea82bdeec5ea94a3 Mon Sep 17 00:00:00 2001 From: NthPortal Date: Fri, 24 Sep 2021 04:03:13 -0400 Subject: [PATCH 386/769] Minor cleanup of ScalaCheck properties --- .../ImmutableChampHashSetProperties.scala | 2 +- .../collection/immutable/ListProperties.scala | 2 +- .../immutable/VectorMapProperties.scala | 18 ++++++++++-------- 3 files changed, 12 insertions(+), 10 deletions(-) diff --git a/test/scalacheck/scala/collection/immutable/ImmutableChampHashSetProperties.scala b/test/scalacheck/scala/collection/immutable/ImmutableChampHashSetProperties.scala index 7331f78c64b3..257460e5cb37 100644 --- a/test/scalacheck/scala/collection/immutable/ImmutableChampHashSetProperties.scala +++ b/test/scalacheck/scala/collection/immutable/ImmutableChampHashSetProperties.scala @@ -53,7 +53,7 @@ object ImmutableChampHashSetProperties extends Properties("immutable.HashSet") { } property("notContainedAfterInsertRemove") = forAll { (input: HashSet[K], item: K) => - (input + item - item).contains(item) == false + !(input + item - item).contains(item) } property("intersectIdentityReference") = forAll { (inputShared: HashSet[K]) => diff --git a/test/scalacheck/scala/collection/immutable/ListProperties.scala b/test/scalacheck/scala/collection/immutable/ListProperties.scala index 99e85d4fdb7f..958910e1e136 100644 --- a/test/scalacheck/scala/collection/immutable/ListProperties.scala +++ b/test/scalacheck/scala/collection/immutable/ListProperties.scala @@ -36,7 +36,7 @@ object ListProperties extends Properties("immutable.List") { property("list1 ::: list2 == list1.toVector.prependedAll(list2)") = forAll { (list1: List[Int], list2: List[Int]) => (list1.prependedAll(list2): Seq[Int]) ?= list1.toVector.prependedAll(list2) } - property("list1.prependedAll(iterableOnce) == list1.prependedAll(iterableOnce)") = + property("list1.prependedAll(iterableOnce) == list1.toVector.prependedAll(iterableOnce)") = forAll(arbitrary[List[Int]], iterableOnceGen){ (list1, it) => (list1.prependedAll(it()): Seq[Int]) ?= list1.toVector.prependedAll(it()) } diff --git a/test/scalacheck/scala/collection/immutable/VectorMapProperties.scala b/test/scalacheck/scala/collection/immutable/VectorMapProperties.scala index f8949c9ee4fa..1253c6804a95 100644 --- a/test/scalacheck/scala/collection/immutable/VectorMapProperties.scala +++ b/test/scalacheck/scala/collection/immutable/VectorMapProperties.scala @@ -31,15 +31,17 @@ object VectorMapProperties extends Properties("immutable.VectorMap") { property("internal underlying and index are consistent after removal") = forAll { (m: Map[K, V]) => m.size >= 3 ==> { - val v = Vector.from(m) - val random = v(new scala.util.Random().nextInt(v.size)) - val vm = VectorMap.from(v) + val v = Vector.from(m) + val random = v(new scala.util.Random().nextInt(v.size)) + val vm = VectorMap.from(v) val removed = vm - random._1 - removed.underlying.forall { case (k, (s, v)) => removed.fields(s) == k } - removed.fields.zipWithIndex.forall { - case (k: K, s) => removed.underlying(k)._1 == s - case _ => true - } + ("all map keys are located at the specified indices in the vector" |: + removed.underlying.forall { case (k, (s, v)) => removed.fields(s) == k }) && + ("all elements in the vector are in the map with the correct associated indices" |: + removed.fields.zipWithIndex.forall { + case (k: K, s) => removed.underlying(k)._1 == s + case _ => true + }) } } From faacba30aa05d0bba62b3de9c7dd459861bccb04 Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Sat, 25 Sep 2021 18:50:03 -0700 Subject: [PATCH 387/769] Module does not trigger missing interpolator `"$Void"` is not missing interpolator. Modules generally don't have interesting `toString`, let alone companions of common Java types or primitives. For simplicity, ignore modules for purposes of warning. --- .../scala/tools/nsc/typechecker/Typers.scala | 2 +- test/files/neg/forgot-interpolator.scala | 18 ++++++++++++++++++ 2 files changed, 19 insertions(+), 1 deletion(-) diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index 58cb1a525d64..f0d111d66813 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -5843,7 +5843,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper } def isNullaryTerm: Boolean = { val maybe = context.lookupSymbol(TermName(id), _ => true).symbol - maybe != NoSymbol && !maybe.hasPackageFlag && maybe.alternatives.exists(x => requiresNoArgs(x.info)) + maybe != NoSymbol && !maybe.hasPackageFlag && !maybe.isModule && maybe.alternatives.exists(x => requiresNoArgs(x.info)) } id == "this" || isNullaryTerm } diff --git a/test/files/neg/forgot-interpolator.scala b/test/files/neg/forgot-interpolator.scala index 7ffc7eace4dc..cc00f9170701 100644 --- a/test/files/neg/forgot-interpolator.scala +++ b/test/files/neg/forgot-interpolator.scala @@ -93,3 +93,21 @@ package curry { def f5 = "I draw the line at $palomino" // no warn } } + +package companions { + class X + object X + class C { + def f1 = "$X" // nowarn companion + def f2 = "$Byte" // nowarn companion + def f3 = "$Char" // nowarn companion + def f4 = "$Short" // nowarn companion + def f5 = "$Int" // nowarn companion + def f6 = "$Float" // nowarn companion + def f7 = "$Double" // nowarn companion + def f8 = "$Character" // nowarn companion + def f9 = "$Integer" // nowarn companion + def f0 = "$companions" // nowarn companion + } +} +package object companions From a3dca986a5b47b0f9190da9034003e038a9a4831 Mon Sep 17 00:00:00 2001 From: Seth Tisue Date: Mon, 27 Sep 2021 19:02:46 -0600 Subject: [PATCH 388/769] Revert "test on JDK 18-ea" This reverts commit 94a1f868f9e441e2f3fc6bc507ff0a17d3551ac4. --- .travis.yml | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/.travis.yml b/.travis.yml index 57277fa73e28..1d7c481eae30 100644 --- a/.travis.yml +++ b/.travis.yml @@ -12,9 +12,9 @@ templates: # this has no effect on travis, it's just a place to put our template pr-jdk8: &pr-jdk8 if: type = pull_request OR repo != scala/scala - cron-jdk18: &cron-jdk18 + cron-jdk17: &cron-jdk17 if: type = cron AND repo = scala/scala - env: ADOPTOPENJDK=18 + env: ADOPTOPENJDK=17 build-for-testing: &build-for-testing # pull request validation (w/ bootstrap) @@ -97,13 +97,13 @@ jobs: <<: *pr-jdk8 - <<: *build-for-testing - <<: *cron-jdk18 + <<: *cron-jdk17 - <<: *test1 - <<: *cron-jdk18 + <<: *cron-jdk17 - <<: *test2 - <<: *cron-jdk18 + <<: *cron-jdk17 - stage: test name: build library with Scala 3 From e5af544579ed69ced1f3ef385f0253dc35a5e9e7 Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Tue, 28 Sep 2021 16:43:11 +0200 Subject: [PATCH 389/769] test to ensure collection conversions conserve immutable --- .../scala/collection/ToConserveTest.scala | 90 +++++++++++++++++++ 1 file changed, 90 insertions(+) create mode 100644 test/junit/scala/collection/ToConserveTest.scala diff --git a/test/junit/scala/collection/ToConserveTest.scala b/test/junit/scala/collection/ToConserveTest.scala new file mode 100644 index 000000000000..8a994027999e --- /dev/null +++ b/test/junit/scala/collection/ToConserveTest.scala @@ -0,0 +1,90 @@ +package scala.collection + +import org.junit.Assert.{assertNotSame, assertSame} +import org.junit.Test +import org.junit.runner.RunWith +import org.junit.runners.JUnit4 + +import scala.collection.{immutable => i, mutable => m} +import scala.language.implicitConversions +import scala.{collection => c} + +@RunWith(classOf[JUnit4]) +class ToConserveTest { + // scala/bug#12188 + implicit def toAnyRefFactory[A, CC[_] <: AnyRef](factory: c.IterableFactory[CC]): c.Factory[A, AnyRef] = + c.IterableFactory.toFactory(factory) + implicit def toFactory[K, V, CC[_, _] <: AnyRef](factory: MapFactory[CC]): Factory[(K, V), AnyRef] = + c.MapFactory.toFactory(factory) + + @Test def toConserveList: Unit = { + val l: c.Iterable[Int] = (1 to 3).toList + + assertSame(l, l.toList) + assertSame(l, l.toSeq) + assertSame(l, l.toIterable) + + assertSame(l, l.to(List)) + + assertSame(l, l.to(c.Iterable)) + assertSame(l, l.to(i.Iterable)) + + assertSame(l, l.to(c.Seq)) + assertSame(l, l.to(i.Seq)) + + assertSame(l, l.to(c.LinearSeq)) + assertSame(l, l.to(i.LinearSeq)) + + assertSame(l, l.to(List)) + } + + @Test def toConserveImmutableHashSet: Unit = { + val s: c.Iterable[Int] = (1 to 10).to(immutable.HashSet) + assertSame(s, s.toSet) + assertSame(s, s.toIterable) + + assertSame(s, s.to(c.Iterable)) + assertSame(s, s.to(i.Iterable)) + + assertSame(s, s.to(c.Set)) + assertSame(s, s.to(i.Set)) + + assertSame(s, s.to(i.HashSet)) + } + + @Test def toConserveImmutableHashMap: Unit = { + val m: c.Iterable[(Int, Int)] = (1 to 10).map(x => (x, x)).to(immutable.HashMap): i.Map[Int, Int] + + assertSame(m, m.toMap) + assertSame(m, m.toIterable) + + assertSame(m, m.to(c.Iterable)) + assertSame(m, m.to(i.Iterable)) + + assertSame(m, m.to(c.Map)) + assertSame(m, m.to(i.Map)) + + assertSame(m, m.to(i.HashMap)) + } + + @Test def toRebuildMutable: Unit = { + val s: c.Iterable[Int] = (1 to 3).to(m.HashSet) + assertSame(s, s.toIterable) // slightly inconsistent... + assertNotSame(s, s.to(c.Iterable)) + assertNotSame(s, s.to(m.Iterable)) + assertNotSame(s, s.to(c.Set)) + assertNotSame(s, s.to(m.Set)) + assertNotSame(s, s.to(m.HashSet)) + + val b: c.Iterable[Int] = (1 to 6).to(m.ArrayBuffer) + assertSame(b, b.toIterable) // slightly inconsistent... + assertNotSame(b, b.toBuffer) + assertNotSame(b, b.to(c.Iterable)) + assertNotSame(b, b.to(m.Iterable)) + assertNotSame(b, b.to(c.Seq)) + assertNotSame(b, b.to(m.Seq)) + assertNotSame(b, b.to(m.Buffer)) + assertNotSame(b, b.to(m.IndexedBuffer)) + assertNotSame(b, b.to(m.ArrayBuffer)) + } +} From 82c150709e7672edb4a846e4007d2743704f83da Mon Sep 17 00:00:00 2001 From: Seth Tisue Date: Tue, 28 Sep 2021 19:32:01 -0600 Subject: [PATCH 390/769] Windows CI: test on 17 final (not early-access) --- .github/workflows/ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 65f8d9429d88..228e6e1f1afc 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -17,7 +17,7 @@ jobs: matrix: include: - java: 8 - - java: 17-ea + - java: 17 steps: - run: git config --global core.autocrlf false - name: Checkout From f4100694c9091b5f1dc0f8edeef7f5a4134cf313 Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Wed, 29 Sep 2021 10:31:12 +0200 Subject: [PATCH 391/769] reduce usages of toIterable --- src/library/scala/Enumeration.scala | 4 ++-- src/library/scala/collection/BitSet.scala | 6 +++--- src/library/scala/collection/LinearSeq.scala | 2 +- src/library/scala/collection/Map.scala | 16 ++++++++-------- src/library/scala/collection/Seq.scala | 2 +- src/library/scala/collection/Set.scala | 12 ++++++------ src/library/scala/collection/SortedMap.scala | 12 ++++++------ src/library/scala/collection/SortedSet.scala | 8 ++++---- .../collection/StrictOptimizedIterableOps.scala | 4 ++-- .../scala/collection/immutable/IntMap.scala | 4 ++-- src/library/scala/collection/immutable/Set.scala | 2 +- .../scala/collection/mutable/AnyRefMap.scala | 6 +++--- .../scala/collection/mutable/Builder.scala | 6 ++++-- .../mutable/CollisionProofHashMap.scala | 12 ++++++------ src/library/scala/collection/mutable/Map.scala | 2 +- src/library/scala/collection/mutable/Seq.scala | 2 +- src/library/scala/collection/mutable/Set.scala | 4 ++-- .../tools/nsc/doc/html/page/IndexScript.scala | 2 +- .../collection/immutable/ArraySeqBenchmark.scala | 2 +- test/files/run/colltest1.scala | 3 +-- test/files/run/t4930.scala | 2 +- .../collection/mutable/ArraySortingTest.scala | 2 +- 22 files changed, 58 insertions(+), 57 deletions(-) diff --git a/src/library/scala/Enumeration.scala b/src/library/scala/Enumeration.scala index 7b6d77827e72..831ad8682fcc 100644 --- a/src/library/scala/Enumeration.scala +++ b/src/library/scala/Enumeration.scala @@ -310,8 +310,8 @@ abstract class Enumeration (initial: Int) extends Serializable { override protected def fromSpecific(coll: IterableOnce[Value]): ValueSet = ValueSet.fromSpecific(coll) override protected def newSpecificBuilder = ValueSet.newBuilder - def map(f: Value => Value): ValueSet = fromSpecific(new View.Map(toIterable, f)) - def flatMap(f: Value => IterableOnce[Value]): ValueSet = fromSpecific(new View.FlatMap(toIterable, f)) + def map(f: Value => Value): ValueSet = fromSpecific(new View.Map(this, f)) + def flatMap(f: Value => IterableOnce[Value]): ValueSet = fromSpecific(new View.FlatMap(this, f)) // necessary for disambiguation: override def map[B](f: Value => B)(implicit @implicitNotFound(ValueSet.ordMsg) ev: Ordering[B]): immutable.SortedSet[B] = diff --git a/src/library/scala/collection/BitSet.scala b/src/library/scala/collection/BitSet.scala index ccac61a72a9c..e8ca89806455 100644 --- a/src/library/scala/collection/BitSet.scala +++ b/src/library/scala/collection/BitSet.scala @@ -295,11 +295,11 @@ trait BitSetOps[+C <: BitSet with BitSetOps[C]] * @return a new bitset resulting from applying the given function ''f'' to * each element of this bitset and collecting the results */ - def map(f: Int => Int): C = fromSpecific(new View.Map(toIterable, f)) + def map(f: Int => Int): C = fromSpecific(new View.Map(this, f)) - def flatMap(f: Int => IterableOnce[Int]): C = fromSpecific(new View.FlatMap(toIterable, f)) + def flatMap(f: Int => IterableOnce[Int]): C = fromSpecific(new View.FlatMap(this, f)) - def collect(pf: PartialFunction[Int, Int]): C = fromSpecific(super[SortedSetOps].collect(pf).toIterable) + def collect(pf: PartialFunction[Int, Int]): C = fromSpecific(super[SortedSetOps].collect(pf)) override def partition(p: Int => Boolean): (C, C) = { val left = filter(p) diff --git a/src/library/scala/collection/LinearSeq.scala b/src/library/scala/collection/LinearSeq.scala index fdee005723b5..7dc67096fbfd 100644 --- a/src/library/scala/collection/LinearSeq.scala +++ b/src/library/scala/collection/LinearSeq.scala @@ -248,7 +248,7 @@ trait LinearSeqOps[+A, +CC[X] <: LinearSeq[X], +C <: LinearSeq[A] with LinearSeq trait StrictOptimizedLinearSeqOps[+A, +CC[X] <: LinearSeq[X], +C <: LinearSeq[A] with StrictOptimizedLinearSeqOps[A, CC, C]] extends Any with LinearSeqOps[A, CC, C] with StrictOptimizedSeqOps[A, CC, C] { // A more efficient iterator implementation than the default LinearSeqIterator override def iterator: Iterator[A] = new AbstractIterator[A] { - private[this] var current: Iterable[A] = toIterable + private[this] var current = StrictOptimizedLinearSeqOps.this def hasNext = !current.isEmpty def next() = { val r = current.head; current = current.tail; r } } diff --git a/src/library/scala/collection/Map.scala b/src/library/scala/collection/Map.scala index 44ebf10025d0..c9ccfc986f16 100644 --- a/src/library/scala/collection/Map.scala +++ b/src/library/scala/collection/Map.scala @@ -71,7 +71,7 @@ trait Map[K, +V] false }) - override def hashCode(): Int = MurmurHash3.mapHash(toIterable) + override def hashCode(): Int = MurmurHash3.mapHash(this) // These two methods are not in MapOps so that MapView is not forced to implement them @deprecated("Use - or removed on an immutable Map", "2.13.0") @@ -296,7 +296,7 @@ trait MapOps[K, +V, +CC[_, _] <: IterableOps[_, AnyConstr, _], +C] * @return a new $coll resulting from applying the given function * `f` to each element of this $coll and collecting the results. */ - def map[K2, V2](f: ((K, V)) => (K2, V2)): CC[K2, V2] = mapFactory.from(new View.Map(toIterable, f)) + def map[K2, V2](f: ((K, V)) => (K2, V2)): CC[K2, V2] = mapFactory.from(new View.Map(this, f)) /** Builds a new collection by applying a partial function to all elements of this $coll * on which the function is defined. @@ -309,7 +309,7 @@ trait MapOps[K, +V, +CC[_, _] <: IterableOps[_, AnyConstr, _], +C] * The order of the elements is preserved. */ def collect[K2, V2](pf: PartialFunction[(K, V), (K2, V2)]): CC[K2, V2] = - mapFactory.from(new View.Collect(toIterable, pf)) + mapFactory.from(new View.Collect(this, pf)) /** Builds a new map by applying a function to all elements of this $coll * and using the elements of the resulting collections. @@ -318,7 +318,7 @@ trait MapOps[K, +V, +CC[_, _] <: IterableOps[_, AnyConstr, _], +C] * @return a new $coll resulting from applying the given collection-valued function * `f` to each element of this $coll and concatenating the results. */ - def flatMap[K2, V2](f: ((K, V)) => IterableOnce[(K2, V2)]): CC[K2, V2] = mapFactory.from(new View.FlatMap(toIterable, f)) + def flatMap[K2, V2](f: ((K, V)) => IterableOnce[(K2, V2)]): CC[K2, V2] = mapFactory.from(new View.FlatMap(this, f)) /** Returns a new $coll containing the elements from the left hand operand followed by the elements from the * right hand operand. The element type of the $coll is the most specific superclass encompassing @@ -329,7 +329,7 @@ trait MapOps[K, +V, +CC[_, _] <: IterableOps[_, AnyConstr, _], +C] * of this $coll followed by all elements of `suffix`. */ def concat[V2 >: V](suffix: collection.IterableOnce[(K, V2)]): CC[K, V2] = mapFactory.from(suffix match { - case it: Iterable[(K, V2)] => new View.Concat(toIterable, it) + case it: Iterable[(K, V2)] => new View.Concat(this, it) case _ => iterator.concat(suffix.iterator) }) @@ -343,11 +343,11 @@ trait MapOps[K, +V, +CC[_, _] <: IterableOps[_, AnyConstr, _], +C] @deprecated("Consider requiring an immutable Map or fall back to Map.concat.", "2.13.0") def + [V1 >: V](kv: (K, V1)): CC[K, V1] = - mapFactory.from(new View.Appended(toIterable, kv)) + mapFactory.from(new View.Appended(this, kv)) @deprecated("Use ++ with an explicit collection argument instead of + with varargs", "2.13.0") def + [V1 >: V](elem1: (K, V1), elem2: (K, V1), elems: (K, V1)*): CC[K, V1] = - mapFactory.from(new View.Concat(new View.Appended(new View.Appended(toIterable, elem1), elem2), elems)) + mapFactory.from(new View.Concat(new View.Appended(new View.Appended(this, elem1), elem2), elems)) @deprecated("Consider requiring an immutable Map.", "2.13.0") @`inline` def -- (keys: IterableOnce[K]): C = { @@ -361,7 +361,7 @@ trait MapOps[K, +V, +CC[_, _] <: IterableOps[_, AnyConstr, _], +C] case that: Iterable[(K, V1)] => that case that => View.from(that) } - mapFactory.from(new View.Concat(thatIterable, toIterable)) + mapFactory.from(new View.Concat(thatIterable, this)) } } diff --git a/src/library/scala/collection/Seq.scala b/src/library/scala/collection/Seq.scala index c0a0da8577cb..04b2e911c3cd 100644 --- a/src/library/scala/collection/Seq.scala +++ b/src/library/scala/collection/Seq.scala @@ -38,7 +38,7 @@ trait Seq[+A] case _ => false }) - override def hashCode(): Int = MurmurHash3.seqHash(toIterable) + override def hashCode(): Int = MurmurHash3.seqHash(this) override def toString(): String = super[Iterable].toString() diff --git a/src/library/scala/collection/Set.scala b/src/library/scala/collection/Set.scala index 784e7e8a4fc5..151b04ef1a4a 100644 --- a/src/library/scala/collection/Set.scala +++ b/src/library/scala/collection/Set.scala @@ -70,7 +70,7 @@ trait Set[A] false }) - override def hashCode(): Int = MurmurHash3.setHash(toIterable) + override def hashCode(): Int = MurmurHash3.setHash(this) override def iterableFactory: IterableFactory[Set] = Set @@ -115,7 +115,7 @@ trait SetOps[A, +CC[_], +C <: SetOps[A, CC, C]] */ def subsets(len: Int): Iterator[C] = { if (len < 0 || len > size) Iterator.empty - else new SubsetsItr(toIterable.to(IndexedSeq), len) + else new SubsetsItr(this.to(IndexedSeq), len) } /** An iterator over all subsets of this set. @@ -123,7 +123,7 @@ trait SetOps[A, +CC[_], +C <: SetOps[A, CC, C]] * @return the iterator. */ def subsets(): Iterator[C] = new AbstractIterator[C] { - private[this] val elms = toIterable.to(IndexedSeq) + private[this] val elms = SetOps.this.to(IndexedSeq) private[this] var len = 0 private[this] var itr: Iterator[C] = Iterator.empty @@ -221,15 +221,15 @@ trait SetOps[A, +CC[_], +C <: SetOps[A, CC, C]] * @return a new $coll with the given elements added, omitting duplicates. */ def concat(that: collection.IterableOnce[A]): C = fromSpecific(that match { - case that: collection.Iterable[A] => new View.Concat(toIterable, that) + case that: collection.Iterable[A] => new View.Concat(this, that) case _ => iterator.concat(that.iterator) }) @deprecated("Consider requiring an immutable Set or fall back to Set.union", "2.13.0") - def + (elem: A): C = fromSpecific(new View.Appended(toIterable, elem)) + def + (elem: A): C = fromSpecific(new View.Appended(this, elem)) @deprecated("Use ++ with an explicit collection argument instead of + with varargs", "2.13.0") - def + (elem1: A, elem2: A, elems: A*): C = fromSpecific(new View.Concat(new View.Appended(new View.Appended(toIterable, elem1), elem2), elems)) + def + (elem1: A, elem2: A, elems: A*): C = fromSpecific(new View.Concat(new View.Appended(new View.Appended(this, elem1), elem2), elems)) /** Alias for `concat` */ @`inline` final def ++ (that: collection.IterableOnce[A]): C = concat(that) diff --git a/src/library/scala/collection/SortedMap.scala b/src/library/scala/collection/SortedMap.scala index 86cad03869ec..03ab0bb0dadc 100644 --- a/src/library/scala/collection/SortedMap.scala +++ b/src/library/scala/collection/SortedMap.scala @@ -153,7 +153,7 @@ trait SortedMapOps[K, +V, +CC[X, Y] <: Map[X, Y] with SortedMapOps[X, Y, CC, _], * `f` to each element of this $coll and collecting the results. */ def map[K2, V2](f: ((K, V)) => (K2, V2))(implicit @implicitNotFound(SortedMapOps.ordMsg) ordering: Ordering[K2]): CC[K2, V2] = - sortedMapFactory.from(new View.Map[(K, V), (K2, V2)](toIterable, f)) + sortedMapFactory.from(new View.Map[(K, V), (K2, V2)](this, f)) /** Builds a new sorted map by applying a function to all elements of this $coll * and using the elements of the resulting collections. @@ -163,7 +163,7 @@ trait SortedMapOps[K, +V, +CC[X, Y] <: Map[X, Y] with SortedMapOps[X, Y, CC, _], * `f` to each element of this $coll and concatenating the results. */ def flatMap[K2, V2](f: ((K, V)) => IterableOnce[(K2, V2)])(implicit @implicitNotFound(SortedMapOps.ordMsg) ordering: Ordering[K2]): CC[K2, V2] = - sortedMapFactory.from(new View.FlatMap(toIterable, f)) + sortedMapFactory.from(new View.FlatMap(this, f)) /** Builds a new sorted map by applying a partial function to all elements of this $coll * on which the function is defined. @@ -174,10 +174,10 @@ trait SortedMapOps[K, +V, +CC[X, Y] <: Map[X, Y] with SortedMapOps[X, Y, CC, _], * The order of the elements is preserved. */ def collect[K2, V2](pf: PartialFunction[(K, V), (K2, V2)])(implicit @implicitNotFound(SortedMapOps.ordMsg) ordering: Ordering[K2]): CC[K2, V2] = - sortedMapFactory.from(new View.Collect(toIterable, pf)) + sortedMapFactory.from(new View.Collect(this, pf)) override def concat[V2 >: V](suffix: IterableOnce[(K, V2)]): CC[K, V2] = sortedMapFactory.from(suffix match { - case it: Iterable[(K, V2)] => new View.Concat(toIterable, it) + case it: Iterable[(K, V2)] => new View.Concat(this, it) case _ => iterator.concat(suffix.iterator) })(ordering) @@ -185,10 +185,10 @@ trait SortedMapOps[K, +V, +CC[X, Y] <: Map[X, Y] with SortedMapOps[X, Y, CC, _], @`inline` override final def ++ [V2 >: V](xs: IterableOnce[(K, V2)]): CC[K, V2] = concat(xs) @deprecated("Consider requiring an immutable Map or fall back to Map.concat", "2.13.0") - override def + [V1 >: V](kv: (K, V1)): CC[K, V1] = sortedMapFactory.from(new View.Appended(toIterable, kv))(ordering) + override def + [V1 >: V](kv: (K, V1)): CC[K, V1] = sortedMapFactory.from(new View.Appended(this, kv))(ordering) @deprecated("Use ++ with an explicit collection argument instead of + with varargs", "2.13.0") - override def + [V1 >: V](elem1: (K, V1), elem2: (K, V1), elems: (K, V1)*): CC[K, V1] = sortedMapFactory.from(new View.Concat(new View.Appended(new View.Appended(toIterable, elem1), elem2), elems))(ordering) + override def + [V1 >: V](elem1: (K, V1), elem2: (K, V1), elems: (K, V1)*): CC[K, V1] = sortedMapFactory.from(new View.Concat(new View.Appended(new View.Appended(this, elem1), elem2), elems))(ordering) } object SortedMapOps { diff --git a/src/library/scala/collection/SortedSet.scala b/src/library/scala/collection/SortedSet.scala index 4bbe8576802a..c98ca9ae5523 100644 --- a/src/library/scala/collection/SortedSet.scala +++ b/src/library/scala/collection/SortedSet.scala @@ -118,7 +118,7 @@ trait SortedSetOps[A, +CC[X] <: SortedSet[X], +C <: SortedSetOps[A, CC, C]] * `f` to each element of this $coll and collecting the results. */ def map[B](f: A => B)(implicit @implicitNotFound(SortedSetOps.ordMsg) ev: Ordering[B]): CC[B] = - sortedIterableFactory.from(new View.Map(toIterable, f)) + sortedIterableFactory.from(new View.Map(this, f)) /** Builds a new sorted collection by applying a function to all elements of this $coll * and using the elements of the resulting collections. @@ -129,7 +129,7 @@ trait SortedSetOps[A, +CC[X] <: SortedSet[X], +C <: SortedSetOps[A, CC, C]] * `f` to each element of this $coll and concatenating the results. */ def flatMap[B](f: A => IterableOnce[B])(implicit @implicitNotFound(SortedSetOps.ordMsg) ev: Ordering[B]): CC[B] = - sortedIterableFactory.from(new View.FlatMap(toIterable, f)) + sortedIterableFactory.from(new View.FlatMap(this, f)) /** Returns a $coll formed from this $coll and another iterable collection * by combining corresponding elements in pairs. @@ -142,7 +142,7 @@ trait SortedSetOps[A, +CC[X] <: SortedSet[X], +C <: SortedSetOps[A, CC, C]] */ def zip[B](that: IterableOnce[B])(implicit @implicitNotFound(SortedSetOps.zipOrdMsg) ev: Ordering[(A @uncheckedVariance, B)]): CC[(A @uncheckedVariance, B)] = // sound bcs of VarianceNote sortedIterableFactory.from(that match { - case that: Iterable[B] => new View.Zip(toIterable, that) + case that: Iterable[B] => new View.Zip(this, that) case _ => iterator.zip(that) }) @@ -156,7 +156,7 @@ trait SortedSetOps[A, +CC[X] <: SortedSet[X], +C <: SortedSetOps[A, CC, C]] * The order of the elements is preserved. */ def collect[B](pf: scala.PartialFunction[A, B])(implicit @implicitNotFound(SortedSetOps.ordMsg) ev: Ordering[B]): CC[B] = - sortedIterableFactory.from(new View.Collect(toIterable, pf)) + sortedIterableFactory.from(new View.Collect(this, pf)) } object SortedSetOps { diff --git a/src/library/scala/collection/StrictOptimizedIterableOps.scala b/src/library/scala/collection/StrictOptimizedIterableOps.scala index 3429c2aa4837..a2d6fbaadb1a 100644 --- a/src/library/scala/collection/StrictOptimizedIterableOps.scala +++ b/src/library/scala/collection/StrictOptimizedIterableOps.scala @@ -203,7 +203,7 @@ trait StrictOptimizedIterableOps[+A, +CC[_], +C] override def scanLeft[B](z: B)(op: (B, A) => B): CC[B] = { val b = iterableFactory.newBuilder[B] - b.sizeHint(toIterable, delta = 0) + b.sizeHint(this, delta = 0) var acc = z b += acc val it = iterator @@ -271,7 +271,7 @@ trait StrictOptimizedIterableOps[+A, +CC[_], +C] */ override def dropRight(n: Int): C = { val b = newSpecificBuilder - if (n >= 0) b.sizeHint(toIterable, delta = -n) + if (n >= 0) b.sizeHint(this, delta = -n) val lead = iterator drop n val it = iterator while (lead.hasNext) { diff --git a/src/library/scala/collection/immutable/IntMap.scala b/src/library/scala/collection/immutable/IntMap.scala index f3055deb0812..240821b11460 100644 --- a/src/library/scala/collection/immutable/IntMap.scala +++ b/src/library/scala/collection/immutable/IntMap.scala @@ -323,9 +323,9 @@ sealed abstract class IntMap[+T] extends AbstractMap[Int, T] case IntMap.Nil => IntMap.Tip(key, value) } - def map[V2](f: ((Int, T)) => (Int, V2)): IntMap[V2] = intMapFrom(new View.Map(toIterable, f)) + def map[V2](f: ((Int, T)) => (Int, V2)): IntMap[V2] = intMapFrom(new View.Map(this, f)) - def flatMap[V2](f: ((Int, T)) => IterableOnce[(Int, V2)]): IntMap[V2] = intMapFrom(new View.FlatMap(toIterable, f)) + def flatMap[V2](f: ((Int, T)) => IterableOnce[(Int, V2)]): IntMap[V2] = intMapFrom(new View.FlatMap(this, f)) override def concat[V1 >: T](that: collection.IterableOnce[(Int, V1)]): IntMap[V1] = super.concat(that).asInstanceOf[IntMap[V1]] // Already has correct type but not declared as such diff --git a/src/library/scala/collection/immutable/Set.scala b/src/library/scala/collection/immutable/Set.scala index a8562a878f96..f07eb66991c8 100644 --- a/src/library/scala/collection/immutable/Set.scala +++ b/src/library/scala/collection/immutable/Set.scala @@ -57,7 +57,7 @@ trait SetOps[A, +CC[X], +C <: SetOps[A, CC, C]] @`inline` final override def - (elem: A): C = excl(elem) def diff(that: collection.Set[A]): C = - toIterable.foldLeft(empty)((result, elem) => if (that contains elem) result else result + elem) + foldLeft(empty)((result, elem) => if (that contains elem) result else result + elem) /** Creates a new $coll from this $coll by removing all elements of another * collection. diff --git a/src/library/scala/collection/mutable/AnyRefMap.scala b/src/library/scala/collection/mutable/AnyRefMap.scala index 2c65c8c7a5f1..c02a10770696 100644 --- a/src/library/scala/collection/mutable/AnyRefMap.scala +++ b/src/library/scala/collection/mutable/AnyRefMap.scala @@ -393,7 +393,7 @@ class AnyRefMap[K <: AnyRef, V] private[collection] (defaultEntry: K => V, initi } @deprecated("Consider requiring an immutable Map or fall back to Map.concat", "2.13.0") - override def + [V1 >: V](kv: (K, V1)): AnyRefMap[K, V1] = AnyRefMap.from(new View.Appended(toIterable, kv)) + override def + [V1 >: V](kv: (K, V1)): AnyRefMap[K, V1] = AnyRefMap.from(new View.Appended(this, kv)) @deprecated("Use ++ with an explicit collection argument instead of + with varargs", "2.13.0") override def + [V1 >: V](elem1: (K, V1), elem2: (K, V1), elems: (K, V1)*): AnyRefMap[K, V1] = { @@ -477,9 +477,9 @@ class AnyRefMap[K <: AnyRef, V] private[collection] (defaultEntry: K => V, initi // The implicit dummy parameter is necessary to distinguish these methods from the base methods they overload (not override) def map[K2 <: AnyRef, V2](f: ((K, V)) => (K2, V2))(implicit dummy: DummyImplicit): AnyRefMap[K2, V2] = - AnyRefMap.from(new View.Map(toIterable, f)) + AnyRefMap.from(new View.Map(this, f)) def flatMap[K2 <: AnyRef, V2](f: ((K, V)) => IterableOnce[(K2, V2)])(implicit dummy: DummyImplicit): AnyRefMap[K2, V2] = - AnyRefMap.from(new View.FlatMap(toIterable, f)) + AnyRefMap.from(new View.FlatMap(this, f)) def collect[K2 <: AnyRef, V2](pf: PartialFunction[(K, V), (K2, V2)])(implicit dummy: DummyImplicit): AnyRefMap[K2, V2] = strictOptimizedCollect(AnyRefMap.newBuilder[K2, V2], pf) diff --git a/src/library/scala/collection/mutable/Builder.scala b/src/library/scala/collection/mutable/Builder.scala index 9a4f41df6f7f..0ecc06dff061 100644 --- a/src/library/scala/collection/mutable/Builder.scala +++ b/src/library/scala/collection/mutable/Builder.scala @@ -68,9 +68,11 @@ trait Builder[-A, +To] extends Growable[A] { self => * an IndexedSeqLike, then sizes larger * than collection's size are reduced. */ + // should probably be `boundingColl: IterableOnce[_]`, but binary compatibility final def sizeHintBounded(size: Int, boundingColl: scala.collection.Iterable[_]): Unit = { - if (boundingColl.knownSize != -1) { - sizeHint(scala.math.min(boundingColl.knownSize, size)) + val s = boundingColl.knownSize + if (s != -1) { + sizeHint(scala.math.min(s, size)) } } diff --git a/src/library/scala/collection/mutable/CollisionProofHashMap.scala b/src/library/scala/collection/mutable/CollisionProofHashMap.scala index 4382a31a0f5e..f7619cd1384f 100644 --- a/src/library/scala/collection/mutable/CollisionProofHashMap.scala +++ b/src/library/scala/collection/mutable/CollisionProofHashMap.scala @@ -417,7 +417,7 @@ final class CollisionProofHashMap[K, V](initialCapacity: Int, loadFactor: Double */ def map[K2, V2](f: ((K, V)) => (K2, V2)) (implicit @implicitNotFound(CollisionProofHashMap.ordMsg) ordering: Ordering[K2]): CollisionProofHashMap[K2, V2] = - sortedMapFactory.from(new View.Map[(K, V), (K2, V2)](toIterable, f)) + sortedMapFactory.from(new View.Map[(K, V), (K2, V2)](this, f)) /** Builds a new `CollisionProofHashMap` by applying a function to all elements of this $coll * and using the elements of the resulting collections. @@ -428,7 +428,7 @@ final class CollisionProofHashMap[K, V](initialCapacity: Int, loadFactor: Double */ def flatMap[K2, V2](f: ((K, V)) => IterableOnce[(K2, V2)]) (implicit @implicitNotFound(CollisionProofHashMap.ordMsg) ordering: Ordering[K2]): CollisionProofHashMap[K2, V2] = - sortedMapFactory.from(new View.FlatMap(toIterable, f)) + sortedMapFactory.from(new View.FlatMap(this, f)) /** Builds a new sorted map by applying a partial function to all elements of this $coll * on which the function is defined. @@ -440,10 +440,10 @@ final class CollisionProofHashMap[K, V](initialCapacity: Int, loadFactor: Double */ def collect[K2, V2](pf: PartialFunction[(K, V), (K2, V2)]) (implicit @implicitNotFound(CollisionProofHashMap.ordMsg) ordering: Ordering[K2]): CollisionProofHashMap[K2, V2] = - sortedMapFactory.from(new View.Collect(toIterable, pf)) + sortedMapFactory.from(new View.Collect(this, pf)) override def concat[V2 >: V](suffix: IterableOnce[(K, V2)]): CollisionProofHashMap[K, V2] = sortedMapFactory.from(suffix match { - case it: Iterable[(K, V2)] => new View.Concat(toIterable, it) + case it: Iterable[(K, V2)] => new View.Concat(this, it) case _ => iterator.concat(suffix.iterator) }) @@ -452,11 +452,11 @@ final class CollisionProofHashMap[K, V](initialCapacity: Int, loadFactor: Double @deprecated("Consider requiring an immutable Map or fall back to Map.concat", "2.13.0") override def + [V1 >: V](kv: (K, V1)): CollisionProofHashMap[K, V1] = - sortedMapFactory.from(new View.Appended(toIterable, kv)) + sortedMapFactory.from(new View.Appended(this, kv)) @deprecated("Use ++ with an explicit collection argument instead of + with varargs", "2.13.0") override def + [V1 >: V](elem1: (K, V1), elem2: (K, V1), elems: (K, V1)*): CollisionProofHashMap[K, V1] = - sortedMapFactory.from(new View.Concat(new View.Appended(new View.Appended(toIterable, elem1), elem2), elems)) + sortedMapFactory.from(new View.Concat(new View.Appended(new View.Appended(this, elem1), elem2), elems)) ///////////////////// RedBlackTree code derived from mutable.RedBlackTree: diff --git a/src/library/scala/collection/mutable/Map.scala b/src/library/scala/collection/mutable/Map.scala index 27278c67286c..610dc01029cc 100644 --- a/src/library/scala/collection/mutable/Map.scala +++ b/src/library/scala/collection/mutable/Map.scala @@ -160,7 +160,7 @@ trait MapOps[K, V, +CC[X, Y] <: MapOps[X, Y, CC, _], +C <: MapOps[K, V, CC, C]] def clear(): Unit = { keysIterator foreach -= } - override def clone(): C = empty ++= toIterable + override def clone(): C = empty ++= this @deprecated("Use filterInPlace instead", "2.13.0") @inline final def retain(p: (K, V) => Boolean): this.type = filterInPlace(p) diff --git a/src/library/scala/collection/mutable/Seq.scala b/src/library/scala/collection/mutable/Seq.scala index e624acc2200e..e83d79987208 100644 --- a/src/library/scala/collection/mutable/Seq.scala +++ b/src/library/scala/collection/mutable/Seq.scala @@ -41,7 +41,7 @@ trait SeqOps[A, +CC[_], +C <: AnyRef] override def clone(): C = { val b = newSpecificBuilder - b ++= toIterable + b ++= this b.result() } diff --git a/src/library/scala/collection/mutable/Set.scala b/src/library/scala/collection/mutable/Set.scala index 900d25c71938..6530e8fedf05 100644 --- a/src/library/scala/collection/mutable/Set.scala +++ b/src/library/scala/collection/mutable/Set.scala @@ -78,7 +78,7 @@ trait SetOps[A, +CC[X], +C <: SetOps[A, CC, C]] } def diff(that: collection.Set[A]): C = - toIterable.foldLeft(empty)((result, elem) => if (that contains elem) result else result += elem) + foldLeft(empty)((result, elem) => if (that contains elem) result else result += elem) @deprecated("Use filterInPlace instead", "2.13.0") @inline final def retain(p: A => Boolean): Unit = filterInPlace(p) @@ -104,7 +104,7 @@ trait SetOps[A, +CC[X], +C <: SetOps[A, CC, C]] this } - override def clone(): C = empty ++= toIterable + override def clone(): C = empty ++= this override def knownSize: Int = super[IterableOps].knownSize } diff --git a/src/scaladoc/scala/tools/nsc/doc/html/page/IndexScript.scala b/src/scaladoc/scala/tools/nsc/doc/html/page/IndexScript.scala index 63f54b3546c9..ee8c63842166 100644 --- a/src/scaladoc/scala/tools/nsc/doc/html/page/IndexScript.scala +++ b/src/scaladoc/scala/tools/nsc/doc/html/page/IndexScript.scala @@ -29,7 +29,7 @@ class IndexScript(universe: doc.Universe) extends Page { } val packages = { - val pairs = allPackagesWithTemplates.toIterable.map(_ match { + val pairs = allPackagesWithTemplates.map(_ match { case (pack, templates) => { val merged = mergeByQualifiedName(templates) diff --git a/test/benchmarks/src/main/scala/scala/collection/immutable/ArraySeqBenchmark.scala b/test/benchmarks/src/main/scala/scala/collection/immutable/ArraySeqBenchmark.scala index dab019b0b288..f5d8e6361df5 100644 --- a/test/benchmarks/src/main/scala/scala/collection/immutable/ArraySeqBenchmark.scala +++ b/test/benchmarks/src/main/scala/scala/collection/immutable/ArraySeqBenchmark.scala @@ -50,7 +50,7 @@ class ArraySeqBenchmark { private[this] def oldSorted[A](seq: ArraySeq[A])(implicit ord: Ordering[A], tag: ClassTag[A]): ArraySeq[A] = { val len = seq.length val b = ArraySeq.newBuilder[A](tag) - if (len == 1) b ++= seq.toIterable + if (len == 1) b ++= seq else if (len > 1) { b.sizeHint(len) val arr = new Array[AnyRef](len) diff --git a/test/files/run/colltest1.scala b/test/files/run/colltest1.scala index cc58fbf8230b..ae5e6d8be18e 100644 --- a/test/files/run/colltest1.scala +++ b/test/files/run/colltest1.scala @@ -34,7 +34,7 @@ object Test extends App { val (o, e) = ten.partition(_ % 2 == 0) assert(o.size == e.size) val gs = ten groupBy (x => x / 4) - val vs1 = (for (k <- gs.keysIterator; v <- gs(k).toIterable.iterator) yield v).toList.sorted + val vs1 = (for (k <- gs.keysIterator; v <- gs(k).iterator) yield v).toList.sorted val vs2 = gs.values.toList.flatten.sorted // val vs2 = gs.values.toList flatMap (xs => xs) assert(ten.head == 1) @@ -60,7 +60,6 @@ object Test extends App { assert(buf == ten, buf) assert(ten.toArray.size == 10) assert(ten.toArray.toSeq == ten, ten.toArray.toSeq) - assert(ten.toIterable == ten) assert(ten.toList == ten) assert(ten.toSeq == ten) assert(ten.toStream == ten) diff --git a/test/files/run/t4930.scala b/test/files/run/t4930.scala index 46705729a1d2..dbd5dac43eba 100644 --- a/test/files/run/t4930.scala +++ b/test/files/run/t4930.scala @@ -2,7 +2,7 @@ import collection.immutable.SortedMap import scala.math.Ordering.Implicits._ object Test { - implicit val ord: Ordering[Array[Byte]] = Ordering.by(x => x.toIterable: collection.Seq[Byte]) + implicit val ord: Ordering[Array[Byte]] = Ordering.by(x => x: collection.Seq[Byte]) def main(args: Array[String]): Unit = { val m = SortedMap(Array[Byte](1) -> 0) diff --git a/test/junit/scala/collection/mutable/ArraySortingTest.scala b/test/junit/scala/collection/mutable/ArraySortingTest.scala index 2e98fd6ac350..dd97587e981a 100644 --- a/test/junit/scala/collection/mutable/ArraySortingTest.scala +++ b/test/junit/scala/collection/mutable/ArraySortingTest.scala @@ -25,7 +25,7 @@ class ArraySortingTest { java.util.Arrays.sort(test) scala.util.Sorting.quickSort(cant)(CanOrder) assert( test(6) == 1 ) - assert( test.toIterable.lazyZip(cant).forall(_ == _.i) ) + assert( test.lazyZip(cant).forall(_ == _.i) ) } @Test From 375ed48f4d75107b24adeaed704b586f25169273 Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Wed, 29 Sep 2021 13:25:53 +0200 Subject: [PATCH 392/769] deprecate IterableOps.toIterable --- src/library/scala/collection/Iterable.scala | 11 +++++++++-- .../scala/collection/StrictOptimizedIterableOps.scala | 3 ++- test/junit/scala/collection/IterableTest.scala | 7 ++++--- test/junit/scala/collection/ToConserveTest.scala | 2 ++ 4 files changed, 17 insertions(+), 6 deletions(-) diff --git a/src/library/scala/collection/Iterable.scala b/src/library/scala/collection/Iterable.scala index 6721ea5920dc..db4f7b919943 100644 --- a/src/library/scala/collection/Iterable.scala +++ b/src/library/scala/collection/Iterable.scala @@ -13,6 +13,7 @@ package scala package collection +import scala.annotation.nowarn import scala.annotation.unchecked.uncheckedVariance import scala.collection.mutable.Builder import scala.collection.View.{LeftPartitionMapped, RightPartitionMapped} @@ -29,6 +30,7 @@ trait Iterable[+A] extends IterableOnce[A] with IterableFactoryDefaults[A, Iterable] { // The collection itself + @deprecated("toIterable is internal and will be made protected; its name is similar to `toList` or `toSeq`, but it doesn't copy non-immutable collections", "2.13.7") final def toIterable: this.type = this final protected def coll: this.type = this @@ -133,13 +135,15 @@ trait IterableOps[+A, +CC[_], +C] extends Any with IterableOnce[A] with Iterable /** * @return This collection as an `Iterable[A]`. No new collection will be built if `this` is already an `Iterable[A]`. */ + // Should be `protected def asIterable`, or maybe removed altogether if it's not needed + @deprecated("toIterable is internal and will be made protected; its name is similar to `toList` or `toSeq`, but it doesn't copy non-immutable collections", "2.13.7") def toIterable: Iterable[A] /** Converts this $coll to an unspecified Iterable. Will return * the same collection if this instance is already Iterable. * @return An Iterable containing all elements of this $coll. */ - @deprecated("Use toIterable instead", "2.13.0") + @deprecated("toTraversable is internal and will be made protected; its name is similar to `toList` or `toSeq`, but it doesn't copy non-immutable collections", "2.13.0") final def toTraversable: Traversable[A] = toIterable override def isTraversableAgain: Boolean = true @@ -830,7 +834,10 @@ trait IterableOps[+A, +CC[_], +C] extends Any with IterableOnce[A] with Iterable // A helper for tails and inits. private[this] def iterateUntilEmpty(f: Iterable[A] => Iterable[A]): Iterator[C] = { - val it = Iterator.iterate(toIterable)(f).takeWhile(_.nonEmpty) + // toIterable ties the knot between `this: IterableOnceOps[A, CC, C]` and `this.tail: C` + // `this.tail.tail` doesn't compile as `C` is unbounded + // `Iterable.from(this)` would eagerly copy non-immutable collections + val it = Iterator.iterate(toIterable: @nowarn("cat=deprecation"))(f).takeWhile(_.nonEmpty) (it ++ Iterator.single(Iterable.empty)).map(fromSpecific) } diff --git a/src/library/scala/collection/StrictOptimizedIterableOps.scala b/src/library/scala/collection/StrictOptimizedIterableOps.scala index a2d6fbaadb1a..a09766cfa912 100644 --- a/src/library/scala/collection/StrictOptimizedIterableOps.scala +++ b/src/library/scala/collection/StrictOptimizedIterableOps.scala @@ -13,6 +13,7 @@ package scala package collection +import scala.annotation.nowarn import scala.annotation.unchecked.uncheckedVariance import scala.runtime.Statics @@ -254,7 +255,7 @@ trait StrictOptimizedIterableOps[+A, +CC[_], +C] */ override def takeRight(n: Int): C = { val b = newSpecificBuilder - b.sizeHintBounded(n, toIterable) + b.sizeHintBounded(n, toIterable: @nowarn("cat=deprecation")) val lead = iterator drop n val it = iterator while (lead.hasNext) { diff --git a/test/junit/scala/collection/IterableTest.scala b/test/junit/scala/collection/IterableTest.scala index 78f911aace1d..3a3495d2602b 100644 --- a/test/junit/scala/collection/IterableTest.scala +++ b/test/junit/scala/collection/IterableTest.scala @@ -1,7 +1,9 @@ package scala.collection -import org.junit.{Assert, Test}, Assert.{assertEquals, assertTrue} +import org.junit.{Assert, Test} +import Assert.{assertEquals, assertTrue} +import scala.annotation.nowarn import scala.collection.immutable.{ArraySeq, List, Range, Vector} import scala.tools.testkit.AssertUtil._ @@ -135,8 +137,7 @@ class IterableTest { check(new Array(10), l.copyToArray(_, 0, -1), 0, 0, 0) } - @deprecated("Uses deprecated toTraversable", since="2.13.0") - @Test + @Test @nowarn("cat=deprecation") def emptyToTraversable(): Unit = { assert(Iterable.empty == Array.empty.toIterable) assert(Iterable.empty == Array.empty.toTraversable) diff --git a/test/junit/scala/collection/ToConserveTest.scala b/test/junit/scala/collection/ToConserveTest.scala index 8a994027999e..9219f192a293 100644 --- a/test/junit/scala/collection/ToConserveTest.scala +++ b/test/junit/scala/collection/ToConserveTest.scala @@ -5,11 +5,13 @@ import org.junit.Test import org.junit.runner.RunWith import org.junit.runners.JUnit4 +import scala.annotation.nowarn import scala.collection.{immutable => i, mutable => m} import scala.language.implicitConversions import scala.{collection => c} @RunWith(classOf[JUnit4]) +@nowarn("cat=deprecation") class ToConserveTest { // scala/bug#12188 implicit def toAnyRefFactory[A, CC[_] <: AnyRef](factory: c.IterableFactory[CC]): c.Factory[A, AnyRef] = From 158c05b14595b67bda84ad187bf4ce7ccb55a686 Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Wed, 29 Sep 2021 16:12:52 +0200 Subject: [PATCH 393/769] clean up ToConserveTest --- .../scala/collection/ToConserveTest.scala | 24 +++++++++++++++---- 1 file changed, 20 insertions(+), 4 deletions(-) diff --git a/test/junit/scala/collection/ToConserveTest.scala b/test/junit/scala/collection/ToConserveTest.scala index 9219f192a293..b3acbd5f13b2 100644 --- a/test/junit/scala/collection/ToConserveTest.scala +++ b/test/junit/scala/collection/ToConserveTest.scala @@ -26,8 +26,6 @@ class ToConserveTest { assertSame(l, l.toSeq) assertSame(l, l.toIterable) - assertSame(l, l.to(List)) - assertSame(l, l.to(c.Iterable)) assertSame(l, l.to(i.Iterable)) @@ -41,7 +39,7 @@ class ToConserveTest { } @Test def toConserveImmutableHashSet: Unit = { - val s: c.Iterable[Int] = (1 to 10).to(immutable.HashSet) + val s: c.Iterable[Int] = (1 to 10).to(i.HashSet) assertSame(s, s.toSet) assertSame(s, s.toIterable) @@ -55,7 +53,7 @@ class ToConserveTest { } @Test def toConserveImmutableHashMap: Unit = { - val m: c.Iterable[(Int, Int)] = (1 to 10).map(x => (x, x)).to(immutable.HashMap): i.Map[Int, Int] + val m: c.Iterable[(Int, Int)] = (1 to 10).map(x => (x, x)).to(i.HashMap): i.Map[Int, Int] assertSame(m, m.toMap) assertSame(m, m.toIterable) @@ -69,6 +67,24 @@ class ToConserveTest { assertSame(m, m.to(i.HashMap)) } + @Test def toConserveLazyList: Unit = { + val l: c.Iterable[Int] = LazyList.from(1 to 10) + + assertSame(l, l.toSeq) + assertSame(l, l.toIterable) + + assertSame(l, l.to(c.Iterable)) + assertSame(l, l.to(i.Iterable)) + + assertSame(l, l.to(c.Seq)) + assertSame(l, l.to(i.Seq)) + + assertSame(l, l.to(c.LinearSeq)) + assertSame(l, l.to(i.LinearSeq)) + + assertSame(l, l.to(LazyList)) + } + @Test def toRebuildMutable: Unit = { val s: c.Iterable[Int] = (1 to 3).to(m.HashSet) assertSame(s, s.toIterable) // slightly inconsistent... From fbb77949a9f4522452bbd5f3cb5e0d568e75ffe3 Mon Sep 17 00:00:00 2001 From: Seth Tisue Date: Wed, 29 Sep 2021 15:21:32 -0600 Subject: [PATCH 394/769] fix t2318 on JDK 17 backports one piece of #9677 fixes scala/scala-dev#790 --- test/files/run/t2318.scala | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/test/files/run/t2318.scala b/test/files/run/t2318.scala index f455fe250643..824954f10103 100644 --- a/test/files/run/t2318.scala +++ b/test/files/run/t2318.scala @@ -4,9 +4,12 @@ import java.security._ import scala.language.reflectiveCalls +// SecurityManager is deprecated on JDK 17, so we sprinkle `@deprecated` around + object Test { trait Bar { def bar: Unit } + @deprecated object Mgr extends SecurityManager { override def checkPermission(perm: Permission) = perm match { case _: java.lang.RuntimePermission => () @@ -24,6 +27,7 @@ object Test { def doDestroy( obj : Destroyable ) : Unit = obj.destroy(); doDestroy( p ); } + @deprecated def t2() = { System.setSecurityManager(Mgr) @@ -34,11 +38,11 @@ object Test { structural.bar } - def main(args: Array[String]) { + def main(args: Array[String]): Unit = { // figuring this will otherwise break on windows try t1() catch { case _: java.io.IOException => () } - t2() + t2(): @annotation.nowarn("cat=deprecation") } } From c3b974f2e2903e0b6bb6d3c35274eb0ffea800e9 Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Thu, 30 Sep 2021 16:43:27 +0200 Subject: [PATCH 395/769] emit deprecations for classOf arguments --- src/compiler/scala/tools/nsc/typechecker/RefChecks.scala | 4 ++++ test/files/neg/classOfDeprecation.check | 9 +++++++++ test/files/neg/classOfDeprecation.scala | 8 ++++++++ 3 files changed, 21 insertions(+) create mode 100644 test/files/neg/classOfDeprecation.check create mode 100644 test/files/neg/classOfDeprecation.scala diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala index 88dd49c3417e..8d524d8f5d00 100644 --- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala +++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala @@ -1812,6 +1812,10 @@ abstract class RefChecks extends Transform { case x @ Select(_, _) => transformSelect(x) + case Literal(Constant(tp: Type)) => + checkTypeRef(tp, tree, skipBounds = false) + tree + case UnApply(fun, args) => transform(fun) // just make sure we enterReference for unapply symbols, note that super.transform(tree) would not transform(fun) // transformTrees(args) // TODO: is this necessary? could there be forward references in the args?? diff --git a/test/files/neg/classOfDeprecation.check b/test/files/neg/classOfDeprecation.check new file mode 100644 index 000000000000..e67fc64fc74d --- /dev/null +++ b/test/files/neg/classOfDeprecation.check @@ -0,0 +1,9 @@ +classOfDeprecation.scala:6: warning: class C is deprecated (since like, forever): no no! + val t = classOf[C] + ^ +classOfDeprecation.scala:7: warning: class C is deprecated (since like, forever): no no! + @ann(classOf[C]) def u = 1 + ^ +error: No warnings can be incurred under -Werror. +2 warnings +1 error diff --git a/test/files/neg/classOfDeprecation.scala b/test/files/neg/classOfDeprecation.scala new file mode 100644 index 000000000000..d7557e3f2e90 --- /dev/null +++ b/test/files/neg/classOfDeprecation.scala @@ -0,0 +1,8 @@ +// scalac: -deprecation -Werror + +@deprecated("no no!", "like, forever") class C +class ann(x: Any) extends annotation.Annotation +object T { + val t = classOf[C] + @ann(classOf[C]) def u = 1 +} From 1fba2e581388d8f4168b39d42e8df98b492dc638 Mon Sep 17 00:00:00 2001 From: Seth Tisue Date: Mon, 4 Oct 2021 16:28:09 -0600 Subject: [PATCH 396/769] AdoptOpenJDK is now Temurin --- .github/workflows/ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 228e6e1f1afc..867bf52ff2db 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -29,7 +29,7 @@ jobs: - name: Setup Java uses: actions/setup-java@v2 with: - distribution: adopt + distribution: temurin java-version: ${{matrix.java}} - name: Cache From 1805d8854dbc23d7174c47dff6a9428456ce0ba0 Mon Sep 17 00:00:00 2001 From: Seth Tisue Date: Tue, 5 Oct 2021 12:27:58 -0600 Subject: [PATCH 397/769] revert bad change to spec publishing I should not have brought this over from 2.12.x fixes scala/scala-dev#791 --- .travis.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.travis.yml b/.travis.yml index 0a2627d7533c..6c7db60c05f6 100644 --- a/.travis.yml +++ b/.travis.yml @@ -154,6 +154,7 @@ jobs: env: global: - ADOPTOPENJDK=8 + - secure: "P8EqpZoin/YTnwel9TTxSSAHtXfZ4M262BKXlYUZmjoQsjyvXDAeZ7yAqgAvX5BeRFrGkBToPiE+V60stdWkPKs3+9COw2BDUB1CULBHhRY9Lxordmz0xVhgEfsoH4f6r6wOlIQ9kuaWhmP+JdB/mzOHZhLch9ziPi8O46Z8t4k=" # PRIV_KEY_SECRET, for scripts/travis-publish-spec.sh - secure: "TuJOUtALynPd+MV1AuMeIpVb8BUBHr7Ul7FS48XhS2PyuTRpEBkSWybYcNg3AXyzmWDAuOjUxbaNMQBvP8vvehTbIYls5H5wTGKvj0D0TNVaPIXjF8bA8KyNat9xGNzhnWm2/2BMaWpKBJWRF7Jb+zHhijMYCJEbkMtoiE5R/mY=" # PRIV_KEY_SECRET, for scripts/travis-publish-spec.sh - secure: "T1fxtvLTxioyXJYiC/zVYdNYsBOt+0Piw+xE04rB1pzeKahm9+G2mISdcAyqv6/vze9eIJt6jNHHpKX32/Z3Cs1/Ruha4m3k+jblj3S0SbxV6ht2ieJXLT5WoUPFRrU68KXI8wqUadXpjxeJJV53qF2FC4lhfMUsw1IwwMhdaE8=" # PRIVATE_REPO_PASS, for publishing to scala-ci Artifactory - secure: "dbAvl6KEuLwZ0MVQPZihFsPzCdiLbX0EFk3so+hcfEbksrmLQ1tn4X5ZM7Wy1UDR8uN9lxngEwHch7a7lKqpugzmXMew9Wnikr9WBWbJT77Z+XJ/jHI6YuiCRpRo+nvxXGp9Ry80tSIgx5eju0J83IaJL41BWlBkvyAd7YAHORI=" # GPG_SUBKEY_SECRET, so we can sign JARs From a60217a611b21567b8f168c2793e92ff3b24394c Mon Sep 17 00:00:00 2001 From: Seth Tisue Date: Tue, 5 Oct 2021 12:30:14 -0600 Subject: [PATCH 398/769] sigh. fix spec publishing fix --- .travis.yml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/.travis.yml b/.travis.yml index 6c7db60c05f6..9435f22b3493 100644 --- a/.travis.yml +++ b/.travis.yml @@ -155,8 +155,7 @@ env: global: - ADOPTOPENJDK=8 - secure: "P8EqpZoin/YTnwel9TTxSSAHtXfZ4M262BKXlYUZmjoQsjyvXDAeZ7yAqgAvX5BeRFrGkBToPiE+V60stdWkPKs3+9COw2BDUB1CULBHhRY9Lxordmz0xVhgEfsoH4f6r6wOlIQ9kuaWhmP+JdB/mzOHZhLch9ziPi8O46Z8t4k=" # PRIV_KEY_SECRET, for scripts/travis-publish-spec.sh - - secure: "TuJOUtALynPd+MV1AuMeIpVb8BUBHr7Ul7FS48XhS2PyuTRpEBkSWybYcNg3AXyzmWDAuOjUxbaNMQBvP8vvehTbIYls5H5wTGKvj0D0TNVaPIXjF8bA8KyNat9xGNzhnWm2/2BMaWpKBJWRF7Jb+zHhijMYCJEbkMtoiE5R/mY=" # PRIV_KEY_SECRET, for scripts/travis-publish-spec.sh - - secure: "T1fxtvLTxioyXJYiC/zVYdNYsBOt+0Piw+xE04rB1pzeKahm9+G2mISdcAyqv6/vze9eIJt6jNHHpKX32/Z3Cs1/Ruha4m3k+jblj3S0SbxV6ht2ieJXLT5WoUPFRrU68KXI8wqUadXpjxeJJV53qF2FC4lhfMUsw1IwwMhdaE8=" # PRIVATE_REPO_PASS, for publishing to scala-ci Artifactory + - secure: "TuJOUtALynPd+MV1AuMeIpVb8BUBHr7Ul7FS48XhS2PyuTRpEBkSWybYcNg3AXyzmWDAuOjUxbaNMQBvP8vvehTbIYls5H5wTGKvj0D0 - secure: "T1fxtvLTxioyXJYiC/zVYdNYsBOt+0Piw+xE04rB1pzeKahm9+G2mISdcAyqv6/vze9eIJt6jNHHpKX32/Z3Cs1/Ruha4m3k+jblj3S0SbxV6ht2ieJXLT5WoUPFRrU68KXI8wqUadXpjxeJJV53qF2FC4lhfMUsw1IwwMhdaE8=" # PRIVATE_REPO_PASS, for publishing to scala-ci Artifactory - secure: "dbAvl6KEuLwZ0MVQPZihFsPzCdiLbX0EFk3so+hcfEbksrmLQ1tn4X5ZM7Wy1UDR8uN9lxngEwHch7a7lKqpugzmXMew9Wnikr9WBWbJT77Z+XJ/jHI6YuiCRpRo+nvxXGp9Ry80tSIgx5eju0J83IaJL41BWlBkvyAd7YAHORI=" # GPG_SUBKEY_SECRET, so we can sign JARs - secure: "RTyzS6nUgthupw5M0fPwTlcOym1sWgBo8eXYepB2xGiQnRu4g583BGuNBW1UZ3vIjRETi/UKQ1HtMR+i7D8ptF1cNpomopncVJA1iy7pU2w0MJ0xgIPMuvtkIa3kxocd/AnxAp+UhUad3nC8lDpkvZsUhhyA0fb4iPKipd2b2xY=" # TRAVIS_TOKEN (login with GitHub as SethTisue), for triggering scala-dist job - secure: "FvhicbSeys7VNTj9ZP/aNT0NhiQP/NNV0KRfK7IHxi3uOeaxFVfaQsln4lzqZn8dkntgzzNrE/VhvMIknfnISAPX7bShy6SRyj3V2BlcUpuem8WtwmkCaZ42xlCJteBL7NW0auG/8rxrNIAJXbRObqF+YdK6XsRMWaBMQHky+ss=" # SONA_USER, token username for publishing to Sonatype From c0e5e6d52ee3e554b0fc06ce0e018c3e09231644 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E6=A2=A6=E5=A2=83=E8=BF=B7=E7=A6=BB?= Date: Thu, 9 Sep 2021 23:23:09 +0800 Subject: [PATCH 399/769] scala/bug#11846 --- .../nsc/interactive/CompilerControl.scala | 7 ++- .../scala/tools/nsc/interactive/Global.scala | 44 ++++++++++++++----- .../tools/nsc/interpreter/jline/Reader.scala | 2 +- .../tools/nsc/interpreter/Interface.scala | 4 +- .../interpreter/PresentationCompilation.scala | 8 ++-- .../nsc/interpreter/CompletionTest.scala | 20 +++++++++ 6 files changed, 66 insertions(+), 19 deletions(-) diff --git a/src/interactive/scala/tools/nsc/interactive/CompilerControl.scala b/src/interactive/scala/tools/nsc/interactive/CompilerControl.scala index f2bf95350879..cbf821a70315 100644 --- a/src/interactive/scala/tools/nsc/interactive/CompilerControl.scala +++ b/src/interactive/scala/tools/nsc/interactive/CompilerControl.scala @@ -280,6 +280,7 @@ trait CompilerControl { self: Global => val sym: Symbol val tpe: Type val accessible: Boolean + val aliasInfo: Option[ScopeMember] def implicitlyAdded = false def symNameDropLocal: Name = if (sym.name.isTermName) sym.name.dropLocal else sym.name @@ -298,7 +299,8 @@ trait CompilerControl { self: Global => tpe: Type, accessible: Boolean, inherited: Boolean, - viaView: Symbol) extends Member { + viaView: Symbol, + aliasInfo: Option[ScopeMember] = None) extends Member { // should be a case class parameter, but added as a var instead to preserve compatibility with the IDE var prefix: Type = NoType override def implicitlyAdded = viaView != NoSymbol @@ -308,7 +310,8 @@ trait CompilerControl { self: Global => sym: Symbol, tpe: Type, accessible: Boolean, - viaImport: Tree) extends Member { + viaImport: Tree, + aliasInfo: Option[ScopeMember] = None) extends Member { // should be a case class parameter, but added as a var instead to preserve compatibility with the IDE var prefix: Type = NoType } diff --git a/src/interactive/scala/tools/nsc/interactive/Global.scala b/src/interactive/scala/tools/nsc/interactive/Global.scala index a72adb3274e9..6da4105019a3 100644 --- a/src/interactive/scala/tools/nsc/interactive/Global.scala +++ b/src/interactive/scala/tools/nsc/interactive/Global.scala @@ -992,7 +992,7 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "") private[interactive] def getScopeCompletion(pos: Position, response: Response[List[Member]]): Unit = { informIDE("getScopeCompletion" + pos) - respond(response) { scopeMembers(pos) } + respond(response) { scopeMemberFlatten(scopeMembers(pos)) } } private class Members[M <: Member] extends LinkedHashMap[Name, Set[M]] { @@ -1043,9 +1043,15 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "") locals.add(sym, pre, implicitlyAdded = false) { (s, st) => // imported val and var are always marked as inaccessible, but they could be accessed through their getters. scala/bug#7995 val member = if (s.hasGetter) - new ScopeMember(s, st, context.isAccessible(s.getter, pre, superAccess = false), viaImport) - else - new ScopeMember(s, st, context.isAccessible(s, pre, superAccess = false), viaImport) + ScopeMember(s, st, context.isAccessible(s.getter, pre, superAccess = false), viaImport) + else { + if (s.isAliasType) { + val aliasInfo = ScopeMember(s, st, context.isAccessible(s, pre, superAccess = false), viaImport) + ScopeMember(s.info.typeSymbol, s.info.typeSymbol.tpe, + context.isAccessible(s.info.typeSymbol, pre, superAccess = false), viaImport, + aliasInfo = Some(aliasInfo)) + } else ScopeMember(s, st, context.isAccessible(s, pre, superAccess = false), viaImport) + } member.prefix = pre member } @@ -1181,14 +1187,23 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "") def matchingResults(nameMatcher: (Name) => Name => Boolean = entered => candidate => candidate.startsWith(entered)): List[M] = { val enteredName = if (name == nme.ERROR) nme.EMPTY else name val matcher = nameMatcher(enteredName) - results filter { (member: Member) => + results.filter { (member: Member) => val symbol = member.sym def isStable = member.tpe.isStable || member.sym.isStable || member.sym.getterIn(member.sym.owner).isStable def isJunk = !symbol.exists || symbol.name.isEmpty || !isIdentifierStart(member.sym.name.charAt(0)) // e.g. - def nameTypeOk = forImport || // Completing an import: keep terms and types. - symbol.name.isTermName == name.isTermName || // Keep names of the same type - name.isTypeName && isStable // Completing a type: keep stable terms (paths) - !isJunk && member.accessible && !symbol.isConstructor && (name.isEmpty || matcher(member.sym.name) && nameTypeOk) + def nameTypeOk: Boolean = { + forImport || // Completing an import: keep terms and types. + symbol.name.isTermName == name.isTermName || // Keep names of the same type + name.isTypeName && isStable // Completing a type: keep stable terms (paths) + } + // scala/bug#11846 aliasInfo should be match + def aliasTypeOk: Boolean = { + matcher(member.aliasInfo.map(_.sym.name).getOrElse(NoSymbol.name)) && !forImport && symbol.name.isTermName == name.isTermName + } + + !isJunk && member.accessible && !symbol.isConstructor && (name.isEmpty || (matcher(member.sym.name) || aliasTypeOk) + && nameTypeOk) + } } } @@ -1208,6 +1223,11 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "") } } + private def scopeMemberFlatten(members: List[ScopeMember]): List[ScopeMember] = { + val (infoWithoutAlias, infoWithAlias) = members.partition(_.aliasInfo.isEmpty) + infoWithoutAlias ++ infoWithAlias ++ infoWithAlias.flatten(_.aliasInfo) + } + final def completionsAt(pos: Position): CompletionResult = { val focus1: Tree = typedTreeAt(pos) def typeCompletions(tree: Tree, qual: Tree, nameStart: Int, name: Name): CompletionResult = { @@ -1235,13 +1255,13 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "") val allMembers = scopeMembers(pos) val positionDelta: Int = pos.start - focus1.pos.start val subName = name.subName(0, positionDelta) - CompletionResult.ScopeMembers(positionDelta, allMembers, subName, forImport = false) + CompletionResult.ScopeMembers(positionDelta, scopeMemberFlatten(allMembers), subName, forImport = false) case imp@Import(i @ Ident(name), head :: Nil) if head.name == nme.ERROR => val allMembers = scopeMembers(pos) val nameStart = i.pos.start val positionDelta: Int = pos.start - nameStart val subName = name.subName(0, pos.start - i.pos.start) - CompletionResult.ScopeMembers(positionDelta, allMembers, subName, forImport = true) + CompletionResult.ScopeMembers(positionDelta, scopeMemberFlatten(allMembers), subName, forImport = true) case imp@Import(qual, selectors) => selectors.reverseIterator.find(_.namePos <= pos.start) match { case None => CompletionResult.NoResults @@ -1264,7 +1284,7 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "") val allMembers = scopeMembers(pos) val positionDelta: Int = pos.start - focus1.pos.start val subName = name.subName(0, positionDelta) - CompletionResult.ScopeMembers(positionDelta, allMembers, subName, forImport = false) + CompletionResult.ScopeMembers(positionDelta, scopeMemberFlatten(allMembers), subName, forImport = false) case _ => CompletionResult.NoResults } diff --git a/src/repl-frontend/scala/tools/nsc/interpreter/jline/Reader.scala b/src/repl-frontend/scala/tools/nsc/interpreter/jline/Reader.scala index 2825764f5a5d..6f3f518205a9 100644 --- a/src/repl-frontend/scala/tools/nsc/interpreter/jline/Reader.scala +++ b/src/repl-frontend/scala/tools/nsc/interpreter/jline/Reader.scala @@ -350,7 +350,7 @@ class Completion(delegate: shell.Completion) extends shell.Completion with Compl } val parsedLineWord = parsedLine.word() - result.candidates.filter(_.name == parsedLineWord) match { + result.candidates.filter(c => c.name == parsedLineWord || c.alias.fold(false)(a => a == parsedLineWord)) match { case Nil => case exacts => val declStrings = exacts.map(_.declString()).filterNot(_ == "") diff --git a/src/repl/scala/tools/nsc/interpreter/Interface.scala b/src/repl/scala/tools/nsc/interpreter/Interface.scala index 790750daf367..efd1ed7487c8 100644 --- a/src/repl/scala/tools/nsc/interpreter/Interface.scala +++ b/src/repl/scala/tools/nsc/interpreter/Interface.scala @@ -335,7 +335,9 @@ case class CompletionCandidate( arity: CompletionCandidate.Arity = CompletionCandidate.Nullary, isDeprecated: Boolean = false, isUniversal: Boolean = false, - declString: () => String = () => "") + declString: () => String = () => "", + alias: Option[String] = None +) object CompletionCandidate { sealed trait Arity case object Nullary extends Arity diff --git a/src/repl/scala/tools/nsc/interpreter/PresentationCompilation.scala b/src/repl/scala/tools/nsc/interpreter/PresentationCompilation.scala index a2128f52cf49..97170236dc7c 100644 --- a/src/repl/scala/tools/nsc/interpreter/PresentationCompilation.scala +++ b/src/repl/scala/tools/nsc/interpreter/PresentationCompilation.scala @@ -211,7 +211,7 @@ trait PresentationCompilation { self: IMain => if (m.sym.paramss.isEmpty) CompletionCandidate.Nullary else if (m.sym.paramss.size == 1 && m.sym.paramss.head.isEmpty) CompletionCandidate.Nilary else CompletionCandidate.Other - def defStringCandidates(matching: List[Member], name: Name, isNew: Boolean) = { + def defStringCandidates(matching: List[Member], isNew: Boolean): List[CompletionCandidate] = { val seen = new mutable.HashSet[Symbol]() val ccs = for { member <- matching @@ -232,7 +232,9 @@ trait PresentationCompilation { self: IMain => val methodOtherDesc = if (!desc.exists(_ != "")) "" else " " + desc.filter(_ != "").mkString(" ") sugared.defStringSeenAs(tp) + methodOtherDesc } - }) + }, + alias = member.aliasInfo.fold[Option[String]](None)(s => Some(s.sym.nameString)) + ) } ccs } @@ -257,7 +259,7 @@ trait PresentationCompilation { self: IMain => } else super.traverse(t) } }.traverse(unit.body) - val candidates = defStringCandidates(matching, r.name, isNew) + val candidates = defStringCandidates(matching, isNew) val pos = cursor - r.positionDelta (pos, candidates.sortBy(_.name)) } diff --git a/test/junit/scala/tools/nsc/interpreter/CompletionTest.scala b/test/junit/scala/tools/nsc/interpreter/CompletionTest.scala index d37fad76419d..8889a6553f63 100644 --- a/test/junit/scala/tools/nsc/interpreter/CompletionTest.scala +++ b/test/junit/scala/tools/nsc/interpreter/CompletionTest.scala @@ -356,4 +356,24 @@ object Test2 { assertEquals(expected.sorted.mkString(" "), actual.toSeq.distinct.sorted.mkString(" ")) } + @Test + def ignoreAlias(): Unit = { + val (completer, _, _) = interpretLines( + """class Foo(i: Int) { def this(s: String) = this(s.toInt) }""", + """type Bar = Foo""" + ) + // We not only keep the original `type Bar = Bar`, but also add more detailed candidates + val candidates = completer.complete("new Bar").candidates + //type Bar = Bar + //def (i: Int): Foo + //def (s: String): Foo + assertEquals(3, candidates.size) + assertEquals("type Bar = Bar", candidates.head.declString.apply()) + assertEquals("def (i: Int): Foo", candidates(1).declString.apply()) + assertEquals("def (s: String): Foo", candidates(2).declString.apply()) + + val candidates1 = completer.complete("new Foo").candidates + assertEquals(2, candidates1.size) + } + } From c178b41bdf33f18392162d0782224d71594dd432 Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Tue, 27 Jul 2021 12:15:38 -0700 Subject: [PATCH 400/769] Disallow toplevel wildcard type param --- .../scala/tools/nsc/ast/parser/Parsers.scala | 29 ++++++++++++++----- .../tools/nsc/typechecker/Implicits.scala | 21 +++++++------- .../tools/partest/ScaladocModelTest.scala | 2 +- .../scala/reflect/internal/StdNames.scala | 1 + .../reflect/internal/TypeDebugging.scala | 2 +- .../scala/reflect/internal/Types.scala | 6 ++-- .../doc/model/ModelFactoryTypeSupport.scala | 15 +++++----- test/files/neg/t2462c.scala | 2 +- test/files/neg/t5606.check | 16 ++++++++++ test/files/neg/t5606.scala | 26 +++++++++++++++++ test/files/neg/t5606b.check | 15 ++++++++++ test/files/neg/t5606b.scala | 11 +++++++ test/files/neg/trailing-commas.check | 8 +---- test/files/pos/t5606.scala | 14 ++++----- 14 files changed, 123 insertions(+), 45 deletions(-) create mode 100644 test/files/neg/t5606.check create mode 100644 test/files/neg/t5606.scala create mode 100644 test/files/neg/t5606b.check create mode 100644 test/files/neg/t5606b.scala diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala index 968311cf5c7c..7de107517da2 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala @@ -274,6 +274,14 @@ self => final val InBlock: Location = 1 final val InTemplate: Location = 2 + type ParamOwner = Int + object ParamOwner { + final val Class = 0 + final val Type = 1 + final val TypeParam = 2 // unused + final val Def = 3 + } + // These symbols may not yet be loaded (e.g. in the ide) so don't go // through definitions to obtain the names. lazy val ScalaValueClassNames = Seq(tpnme.AnyVal, @@ -2554,8 +2562,9 @@ self => * TypeParam ::= Id TypeParamClauseOpt TypeBounds {`<%` Type} {`:` Type} * }}} */ - def typeParamClauseOpt(owner: Name, contextBoundBuf: ListBuffer[Tree]): List[TypeDef] = { + def typeParamClauseOpt(owner: Name, contextBoundBuf: ListBuffer[Tree], ownerKind: ParamOwner): List[TypeDef] = { def typeParam(ms: Modifiers): TypeDef = { + val isAbstractOwner = ownerKind == ParamOwner.Type //|| ownerKind == ParamOwner.TypeParam var mods = ms | Flags.PARAM val start = in.offset if (owner.isTypeName && isIdent) { @@ -2570,10 +2579,16 @@ self => val nameOffset = in.offset checkQMarkDefinition() checkKeywordDefinition() - // TODO AM: freshTermName(o2p(in.skipToken()), "_$$"), will need to update test suite - val pname: TypeName = wildcardOrIdent().toTypeName + val pname: TypeName = + if (in.token == USCORE && (isAbstractOwner || !currentRun.isScala3)) { + if (!isAbstractOwner) + deprecationWarning(in.offset, "Top-level wildcard is not allowed and will error under -Xsource:3", "2.13.7") + in.nextToken() + freshTypeName("_$$") + } + else ident(skipIt = false).toTypeName val param = atPos(start, nameOffset) { - val tparams = typeParamClauseOpt(pname, null) // @M TODO null --> no higher-order context bounds for now + val tparams = typeParamClauseOpt(pname, null, ParamOwner.Type) // @M TODO null --> no higher-order context bounds for now TypeDef(mods, pname, tparams, typeBounds()) } if (contextBoundBuf ne null) { @@ -2903,7 +2918,7 @@ self => // [T : B] or [T : => B]; it contains the equivalent implicit parameter type, // i.e. (B[T] or T => B) val contextBoundBuf = new ListBuffer[Tree] - val tparams = typeParamClauseOpt(name, contextBoundBuf) + val tparams = typeParamClauseOpt(name, contextBoundBuf, ParamOwner.Def) val vparamss = paramClauses(name, contextBoundBuf.toList, ofCaseClass = false) newLineOptWhenFollowedBy(LBRACE) var restype = fromWithinReturnType(typedOpt()) @@ -3005,7 +3020,7 @@ self => checkKeywordDefinition() val name = identForType() // @M! a type alias as well as an abstract type may declare type parameters - val tparams = typeParamClauseOpt(name, null) + val tparams = typeParamClauseOpt(name, null, ParamOwner.Type) in.token match { case EQUALS => in.nextToken() @@ -3070,7 +3085,7 @@ self => atPos(start, if (name == tpnme.ERROR) start else nameOffset) { savingClassContextBounds { val contextBoundBuf = new ListBuffer[Tree] - val tparams = typeParamClauseOpt(name, contextBoundBuf) + val tparams = typeParamClauseOpt(name, contextBoundBuf, ParamOwner.Class) classContextBounds = contextBoundBuf.toList val tstart = (in.offset :: classContextBounds.map(_.pos.start)).min if (!classContextBounds.isEmpty && mods.isTrait) { diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala index fe3a8549c5d0..507bf035b924 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala @@ -20,14 +20,13 @@ package tools.nsc package typechecker import scala.annotation.{nowarn, tailrec} -import scala.collection.mutable -import mutable.{LinkedHashMap, ListBuffer} -import scala.util.matching.Regex -import symtab.Flags._ +import scala.collection.mutable, mutable.{LinkedHashMap, ListBuffer} +import scala.language.implicitConversions import scala.reflect.internal.util.{ReusableInstance, Statistics, TriState} import scala.reflect.internal.TypesStats -import scala.language.implicitConversions import scala.tools.nsc.Reporting.WarningCategory +import scala.util.matching.Regex +import symtab.Flags._ /** This trait provides methods to find various kinds of implicits. * @@ -1830,7 +1829,7 @@ trait Implicits extends splain.SplainData { private def interpolate(text: String, vars: Map[String, String]) = Intersobralator.replaceAllIn(text, (_: Regex.Match) match { - case Regex.Groups(v) => Regex quoteReplacement vars.getOrElse(v, "") + case Regex.Groups(v) => Regex.quoteReplacement(vars.getOrElse(v, "")) // #3915: need to quote replacement string since it may include $'s (such as the interpreter's $iw) case x => throw new MatchError(x) }) @@ -1859,7 +1858,7 @@ trait Implicits extends splain.SplainData { formatDefSiteMessage(typeArgsAtSym(paramTp).map(_.toString)) def formatDefSiteMessage(typeArgs: List[String]): String = - interpolate(msg, Map(symTypeParamNames zip typeArgs: _*)) + interpolate(msg, Map(symTypeParamNames.zip(typeArgs): _*)) def formatParameterMessage(fun: Tree): String = { val paramNames = referencedTypeParams @@ -1880,13 +1879,15 @@ trait Implicits extends splain.SplainData { case PolyType(tps, tr@TypeRef(_, _, tprefs)) => if (tps.corresponds(tprefs)((p, r) => p == r.typeSymbol)) tr.typeConstructor.toString else { - val freshTpars = tps.mapConserve { case p if p.name == tpnme.WILDCARD => p.cloneSymbol.setName(newTypeName("?T" + tps.indexOf(p))) case p => p } + val freshTpars = tps.mapConserve { p => + if (p.unexpandedName == tpnme.WILDCARD) p.cloneSymbol.setName(newTypeName("?T" + tps.indexOf(p))) + else p + } freshTpars.map(_.name).mkString("[", ", ", "] -> ") + tr.instantiateTypeParams(tps, freshTpars.map(_.typeConstructor)).toString } - case tp => tp.toString } - interpolate(msg, Map(paramNames zip argTypes: _*)) + interpolate(msg, Map(paramNames.zip(argTypes): _*)) } def validate: Option[String] = { diff --git a/src/partest/scala/tools/partest/ScaladocModelTest.scala b/src/partest/scala/tools/partest/ScaladocModelTest.scala index ec158f9cfd60..5a73ce9ee0c2 100644 --- a/src/partest/scala/tools/partest/ScaladocModelTest.scala +++ b/src/partest/scala/tools/partest/ScaladocModelTest.scala @@ -72,7 +72,7 @@ abstract class ScaladocModelTest extends DirectTest { try { // 1 - compile with scaladoc and get the model out - val universe = model.getOrElse({sys.error("Scaladoc Model Test ERROR: No universe generated!")}) + val universe = model.getOrElse { sys.error("Scaladoc Model Test ERROR: No universe generated!") } // 2 - check the model generated testModel(universe.rootPackage) println("Done.") diff --git a/src/reflect/scala/reflect/internal/StdNames.scala b/src/reflect/scala/reflect/internal/StdNames.scala index 00a2cc0603dd..a37391d8db37 100644 --- a/src/reflect/scala/reflect/internal/StdNames.scala +++ b/src/reflect/scala/reflect/internal/StdNames.scala @@ -473,6 +473,7 @@ trait StdNames { def unexpandedName(name: Name): Name = name.lastIndexOf("$$") match { case 0 | -1 => name + case 1 if name.charAt(0) == '_' => if (name.isTermName) nme.WILDCARD else tpnme.WILDCARD case idx0 => // Sketchville - We've found $$ but if it's part of $$$ or $$$$ // or something we need to keep the bonus dollars, so e.g. foo$$$outer diff --git a/src/reflect/scala/reflect/internal/TypeDebugging.scala b/src/reflect/scala/reflect/internal/TypeDebugging.scala index 2be3f520345a..6adab6fbe87e 100644 --- a/src/reflect/scala/reflect/internal/TypeDebugging.scala +++ b/src/reflect/scala/reflect/internal/TypeDebugging.scala @@ -140,7 +140,7 @@ trait TypeDebugging { def debugString(tp: Type) = debug(tp) } def paramString(tp: Type) = typeDebug.str params tp.params - def typeParamsString(tp: Type) = typeDebug.str brackets (tp.typeParams map (_.defString)) + def typeParamsString(tp: Type) = typeDebug.str.brackets(tp.typeParams.map(_.defString)) def debugString(tp: Type) = typeDebug debugString tp } diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala index fc7a52b0e6a6..d1c46db78d16 100644 --- a/src/reflect/scala/reflect/internal/Types.scala +++ b/src/reflect/scala/reflect/internal/Types.scala @@ -1559,18 +1559,16 @@ trait Types /** Bounds notation used in Scala syntax. * For example +This <: scala.collection.generic.Sorted[K,This]. */ - private[internal] def scalaNotation(typeString: Type => String): String = { + private[internal] def scalaNotation(typeString: Type => String): String = (if (emptyLowerBound) "" else " >: " + typeString(lo)) + (if (emptyUpperBound) "" else " <: " + typeString(hi)) - } /** Bounds notation used in https://adriaanm.github.com/files/higher.pdf. * For example *(scala.collection.generic.Sorted[K,This]). */ - private[internal] def starNotation(typeString: Type => String): String = { + private[internal] def starNotation(typeString: Type => String): String = if (emptyLowerBound && emptyUpperBound) "" else if (emptyLowerBound) s"(${typeString(hi)})" else s"(${typeString(lo)}, ${typeString(hi)})" - } override def kind = "TypeBoundsType" override def mapOver(map: TypeMap): Type = { val lo1 = map match { diff --git a/src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryTypeSupport.scala b/src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryTypeSupport.scala index 799fbf760c2e..888dde133742 100644 --- a/src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryTypeSupport.scala +++ b/src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryTypeSupport.scala @@ -45,7 +45,7 @@ trait ModelFactoryTypeSupport { appendType0(tp) case tp :: tps => appendType0(tp) - nameBuffer append sep + nameBuffer.append(sep) appendTypes0(tps, sep) } @@ -202,15 +202,16 @@ trait ModelFactoryTypeSupport { /* Polymorphic types */ case PolyType(tparams, result) => assert(tparams.nonEmpty, "polymorphic type must have at least one type parameter") - def typeParamsToString(tps: List[Symbol]): String = if (tps.isEmpty) "" else - tps.map{tparam => - tparam.varianceString + tparam.name + typeParamsToString(tparam.typeParams) - }.mkString("[", ", ", "]") - nameBuffer append typeParamsToString(tparams) + def typeParamsToString(tps: List[Symbol]): String = + if (tps.isEmpty) "" + else + tps.map { tparam => + tparam.varianceString + tparam.unexpandedName + typeParamsToString(tparam.typeParams) + }.mkString("[", ", ", "]") + nameBuffer.append(typeParamsToString(tparams)) appendType0(result) case et@ExistentialType(quantified, underlying) => - def appendInfoStringReduced(sym: Symbol, tp: Type): Unit = { if (sym.isType && !sym.isAliasType && !sym.isClass) { tp match { diff --git a/test/files/neg/t2462c.scala b/test/files/neg/t2462c.scala index 9e62c8eb9ca8..d059a47ceb09 100644 --- a/test/files/neg/t2462c.scala +++ b/test/files/neg/t2462c.scala @@ -1,4 +1,4 @@ -// scalac: -Xfatal-warnings +// scalac: -Werror // import annotation._ diff --git a/test/files/neg/t5606.check b/test/files/neg/t5606.check new file mode 100644 index 000000000000..c51564f29abe --- /dev/null +++ b/test/files/neg/t5606.check @@ -0,0 +1,16 @@ +t5606.scala:3: error: identifier expected but '_' found. +case class CaseTest[_](someData: String) + ^ +t5606.scala:5: error: using `?` as a type name requires backticks. +case class CaseTest_?[?](someData: String) + ^ +t5606.scala:8: error: identifier expected but '_' found. +case class CaseTest2[_, _](someData: String) + ^ +t5606.scala:11: error: identifier expected but '_' found. + def f[_](x: Int) = ??? + ^ +t5606.scala:23: error: using `?` as a type name requires backticks. + def regress_?[F[?]] = 2 + ^ +5 errors diff --git a/test/files/neg/t5606.scala b/test/files/neg/t5606.scala new file mode 100644 index 000000000000..c44b1e96e378 --- /dev/null +++ b/test/files/neg/t5606.scala @@ -0,0 +1,26 @@ +// scalac: -Xsource:3 +// was: _ taken as ident of type param, but poor interactions below +case class CaseTest[_](someData: String) + +case class CaseTest_?[?](someData: String) + +// was: _ already defined +case class CaseTest2[_, _](someData: String) + +class C { + def f[_](x: Int) = ??? +} + +object Test extends App { + def f0 = new CaseTest("X") + def f1: CaseTest[Int] = new CaseTest[Int]("X") // OK! + def f2: CaseTest[Int] = CaseTest[Int]("X") // CaseTest[Any] + def f3 = new CaseTest[Int]("X").copy() // CaseTest[Any] + def f4 = new CaseTest[Int]("X").copy[Int]() // CaseTest[Any] + + def regress0[F[_]] = 0 + def regress1[F[_, _]] = 1 + def regress_?[F[?]] = 2 + //def regress0[F[_$$1]] = 0; + //def regress1[F[_$$2, _$$3]] = 1 +} diff --git a/test/files/neg/t5606b.check b/test/files/neg/t5606b.check new file mode 100644 index 000000000000..cdbd20ecb3e9 --- /dev/null +++ b/test/files/neg/t5606b.check @@ -0,0 +1,15 @@ +t5606b.scala:4: warning: Top-level wildcard is not allowed and will error under -Xsource:3 +case class CaseTest[_](someData: String) + ^ +t5606b.scala:7: warning: Top-level wildcard is not allowed and will error under -Xsource:3 +case class CaseTest2[_, _](someData: String) + ^ +t5606b.scala:7: warning: Top-level wildcard is not allowed and will error under -Xsource:3 +case class CaseTest2[_, _](someData: String) + ^ +t5606b.scala:10: warning: Top-level wildcard is not allowed and will error under -Xsource:3 + def f[_](x: Int) = ??? + ^ +error: No warnings can be incurred under -Werror. +4 warnings +1 error diff --git a/test/files/neg/t5606b.scala b/test/files/neg/t5606b.scala new file mode 100644 index 000000000000..3931de26d43b --- /dev/null +++ b/test/files/neg/t5606b.scala @@ -0,0 +1,11 @@ +// scalac: -Xlint -Werror +// +// was: _ taken as ident of type param, now a fresh name +case class CaseTest[_](someData: String) + +// was: _ already defined, now a fresh name +case class CaseTest2[_, _](someData: String) + +class C { + def f[_](x: Int) = ??? +} diff --git a/test/files/neg/trailing-commas.check b/test/files/neg/trailing-commas.check index 17ceb40c09d1..a371d51fe2fb 100644 --- a/test/files/neg/trailing-commas.check +++ b/test/files/neg/trailing-commas.check @@ -61,15 +61,9 @@ trait TypeArgs { def f: C[Int, String, ] } trailing-commas.scala:23: error: identifier expected but ']' found. trait TypeParamClause { type C[A, B, ] } ^ -trailing-commas.scala:23: error: ']' expected but '}' found. -trait TypeParamClause { type C[A, B, ] } - ^ trailing-commas.scala:24: error: identifier expected but ']' found. trait FunTypeParamClause { def f[A, B, ] } ^ -trailing-commas.scala:24: error: ']' expected but '}' found. -trait FunTypeParamClause { def f[A, B, ] } - ^ trailing-commas.scala:26: error: identifier expected but ')' found. trait SimpleType { def f: (Int, String, ) } ^ @@ -127,4 +121,4 @@ trait SimpleType2 { def f: (Int, ) } trailing-commas.scala:48: error: ')' expected but '}' found. trait SimpleType2 { def f: (Int, ) } ^ -43 errors +41 errors diff --git a/test/files/pos/t5606.scala b/test/files/pos/t5606.scala index 2545271e32d8..8daffaf1e783 100644 --- a/test/files/pos/t5606.scala +++ b/test/files/pos/t5606.scala @@ -1,9 +1,9 @@ +// was: _ taken as ident of type param, now a fresh name +case class CaseTest[_](someData: String) +// was: _ already defined, now a fresh name +case class CaseTest2[_, _](someData: String) - - - - - - -case class CaseTest[_](someData:String) +class C { + def f[_](x: Int) = ??? +} From 13a981a5c874b086018e4ce904a9c517b166eb68 Mon Sep 17 00:00:00 2001 From: danicheg Date: Wed, 6 Oct 2021 21:54:46 +0300 Subject: [PATCH 401/769] Enhance warning message in the GeneratedClassHandler --- .../tools/nsc/backend/jvm/GeneratedClassHandler.scala | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GeneratedClassHandler.scala b/src/compiler/scala/tools/nsc/backend/jvm/GeneratedClassHandler.scala index 5853b52a3142..945d9b539bca 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/GeneratedClassHandler.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/GeneratedClassHandler.scala @@ -60,7 +60,13 @@ private[jvm] object GeneratedClassHandler { case maxThreads => if (settings.areStatisticsEnabled) - runReporting.warning(NoPosition, "jvm statistics are not reliable with multi-threaded jvm class writing", WarningCategory.Other, site = "") + runReporting.warning( + NoPosition, + "JVM statistics are not reliable with multi-threaded JVM class writing.\n" + + "To collect compiler statistics remove the " + settings.YaddBackendThreads.name + " setting.", + WarningCategory.Other, + site = "" + ) val additionalThreads = maxThreads - 1 // The thread pool queue is limited in size. When it's full, the `CallerRunsPolicy` causes // a new task to be executed on the main thread, which provides back-pressure. From 2f65f62d3f04545e49a7dc9c93a791cee8c4774b Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Thu, 30 Sep 2021 16:43:27 +0200 Subject: [PATCH 402/769] [backport] emit deprecations for classOf arguments --- .../scala/tools/nsc/typechecker/RefChecks.scala | 4 ++++ test/files/neg/classOfDeprecation.check | 9 +++++++++ test/files/neg/classOfDeprecation.scala | 8 ++++++++ test/files/run/t4813.check | 16 +++++++++++++++- test/files/run/t4813.scala | 2 ++ 5 files changed, 38 insertions(+), 1 deletion(-) create mode 100644 test/files/neg/classOfDeprecation.check create mode 100644 test/files/neg/classOfDeprecation.scala diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala index ea5f8295dc25..abbc25953319 100644 --- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala +++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala @@ -1857,6 +1857,10 @@ abstract class RefChecks extends Transform { case x @ Select(_, _) => transformSelect(x) + case Literal(Constant(tp: Type)) => + checkTypeRef(tp, tree, skipBounds = false) + tree + case UnApply(fun, args) => transform(fun) // just make sure we enterReference for unapply symbols, note that super.transform(tree) would not transform(fun) // transformTrees(args) // TODO: is this necessary? could there be forward references in the args?? diff --git a/test/files/neg/classOfDeprecation.check b/test/files/neg/classOfDeprecation.check new file mode 100644 index 000000000000..e80b2d643a2b --- /dev/null +++ b/test/files/neg/classOfDeprecation.check @@ -0,0 +1,9 @@ +classOfDeprecation.scala:6: warning: class C is deprecated (since like, forever): no no! + val t = classOf[C] + ^ +classOfDeprecation.scala:7: warning: class C is deprecated (since like, forever): no no! + @ann(classOf[C]) def u = 1 + ^ +error: No warnings can be incurred under -Xfatal-warnings. +two warnings found +one error found diff --git a/test/files/neg/classOfDeprecation.scala b/test/files/neg/classOfDeprecation.scala new file mode 100644 index 000000000000..497d11ad6197 --- /dev/null +++ b/test/files/neg/classOfDeprecation.scala @@ -0,0 +1,8 @@ +// scalac: -deprecation -Xfatal-warnings + +@deprecated("no no!", "like, forever") class C +class ann(x: Any) extends annotation.Annotation +object T { + val t = classOf[C] + @ann(classOf[C]) def u = 1 +} diff --git a/test/files/run/t4813.check b/test/files/run/t4813.check index 2986ff957198..42f17f47f4b5 100644 --- a/test/files/run/t4813.check +++ b/test/files/run/t4813.check @@ -1 +1,15 @@ -warning: two deprecations (since 2.11.0); re-run with -deprecation for details +t4813.scala:19: warning: object DoubleLinkedList in package mutable is deprecated (since 2.11.0): low-level linked lists are deprecated + runTest(DoubleLinkedList(1,2,3))(_.clone) { buf => buf transform (_ + 1) } + ^ +t4813.scala:19: warning: class DoubleLinkedList in package mutable is deprecated (since 2.11.0): low-level linked lists are deprecated due to idiosyncrasies in interface and incomplete features + runTest(DoubleLinkedList(1,2,3))(_.clone) { buf => buf transform (_ + 1) } + ^ +t4813.scala:22: warning: object LinkedList in package mutable is deprecated (since 2.11.0): low-level linked lists are deprecated + runTest(LinkedList(1,2,3))(_.clone) { buf => buf transform (_ + 1) } + ^ +t4813.scala:22: warning: class LinkedList in package mutable is deprecated (since 2.11.0): low-level linked lists are deprecated due to idiosyncrasies in interface and incomplete features + runTest(LinkedList(1,2,3))(_.clone) { buf => buf transform (_ + 1) } + ^ +t4813.scala:26: warning: class Stack in package mutable is deprecated (since 2.12.0): Stack is an inelegant and potentially poorly-performing wrapper around List. Use a List assigned to a var instead. + runTest(Stack(1,2,3))(_.clone) { buf => buf transform (_ + 1) } + ^ diff --git a/test/files/run/t4813.scala b/test/files/run/t4813.scala index 6d48ca87588e..99cf28bdb43f 100644 --- a/test/files/run/t4813.scala +++ b/test/files/run/t4813.scala @@ -1,3 +1,5 @@ +// scalac: -deprecation + import collection.mutable._ import reflect._ From 675a7c0ca7bcc5ba23f5055eed960cf0cd7eec71 Mon Sep 17 00:00:00 2001 From: NthPortal Date: Mon, 30 Aug 2021 18:16:10 -0400 Subject: [PATCH 403/769] Add more addAll benchmarks for `ArrayBuffer` --- .../mutable/ArrayBufferBenchmark.scala | 39 +++++++++++++++---- 1 file changed, 32 insertions(+), 7 deletions(-) diff --git a/test/benchmarks/src/main/scala/scala/collection/mutable/ArrayBufferBenchmark.scala b/test/benchmarks/src/main/scala/scala/collection/mutable/ArrayBufferBenchmark.scala index aafa899e3442..50c14dbfe777 100644 --- a/test/benchmarks/src/main/scala/scala/collection/mutable/ArrayBufferBenchmark.scala +++ b/test/benchmarks/src/main/scala/scala/collection/mutable/ArrayBufferBenchmark.scala @@ -28,11 +28,15 @@ class ArrayBufferBenchmark { @Param(Array(/*"0", "1",*/ "10", "100", "1000", "10000")) var size: Int = _ - var ref: ArrayBuffer[Int] = _ + var ref : ArrayBuffer[Int] = _ + var set : scala.collection.immutable.Set[Int] = _ + var list: List[Int] = _ @Setup(Level.Trial) def init: Unit = { ref = new ArrayBuffer - for(i <- 0 until size) ref += i + for (i <- 0 until size) ref += i + set = ref.toSet + list = ref.toList } @Benchmark def filterInPlace(bh: Blackhole): Unit = { @@ -44,24 +48,45 @@ class ArrayBufferBenchmark { @Benchmark def update(bh: Blackhole): Unit = { val b = ref.clone() var i = 0 - while(i < size) { + while (i < size) { b.update(i, -1) i += 2 } bh.consume(b) } - @Benchmark def addAll(bh: Blackhole): Unit = { + // append `ArrayBuffer` + @Benchmark def addAll1(bh: Blackhole): Unit = { val b1 = ref.clone() val b2 = ref.clone() - var i = 0 b1.addAll(b2) bh.consume(b1) } + // append collection with known size + @Benchmark def addAll2(bh: Blackhole): Unit = { + val b1 = ref.clone() + b1.addAll(set) + bh.consume(b1) + } + + // append collection without known size + @Benchmark def addAll3(bh: Blackhole): Unit = { + val b1 = ref.clone() + b1.addAll(list) + bh.consume(b1) + } + + // append `IterableOnce` with no known size + @Benchmark def addAll4(bh: Blackhole): Unit = { + val b1 = ref.clone() + b1.addAll(list.iterator) + bh.consume(b1) + } + @Benchmark def flatMapInPlace1(bh: Blackhole): Unit = { - val b = ref.clone() - val seq = Seq(0,0) + val b = ref.clone() + val seq = scala.Seq(0, 0) b.flatMapInPlace { _ => seq } bh.consume(b) } From 447a22b9254d505a43c6fca64d6dc5e2629a25cd Mon Sep 17 00:00:00 2001 From: NthPortal Date: Mon, 30 Aug 2021 18:19:12 -0400 Subject: [PATCH 404/769] [bug#12284] Ensure `ArrayBufferView` is consistent with its buffer Ensure `ArrayBufferView` is consistent with its buffer. Simplify `ArrayBuffer#insertAll` code. --- .../collection/mutable/ArrayBuffer.scala | 57 +++++++++++-------- .../collection/mutable/ArrayBufferTest.scala | 17 ++++++ 2 files changed, 49 insertions(+), 25 deletions(-) diff --git a/src/library/scala/collection/mutable/ArrayBuffer.scala b/src/library/scala/collection/mutable/ArrayBuffer.scala index e60f50587fa9..db2fae1bfcd9 100644 --- a/src/library/scala/collection/mutable/ArrayBuffer.scala +++ b/src/library/scala/collection/mutable/ArrayBuffer.scala @@ -114,7 +114,8 @@ class ArrayBuffer[A] private (initialElements: Array[AnyRef], initialSize: Int) def length = size0 - override def view: ArrayBufferView[A] = new ArrayBufferView(array, size0, () => mutationCount) + // TODO: return `IndexedSeqView` rather than `ArrayBufferView` + override def view: ArrayBufferView[A] = new ArrayBufferView(this, () => mutationCount) override def iterableFactory: SeqFactory[ArrayBuffer] = ArrayBuffer @@ -176,27 +177,21 @@ class ArrayBuffer[A] private (initialElements: Array[AnyRef], initialSize: Int) case elems: collection.Iterable[A] => val elemsLength = elems.size if (elemsLength > 0) { - ensureSize(length + elemsLength) - Array.copy(array, index, array, index + elemsLength, size0 - index) - size0 = size0 + elemsLength - elems match { - case elems: ArrayBuffer[_] => - // if `elems eq this`, this works because `elems.array eq this.array`, - // we didn't overwrite the values being inserted after moving them in - // the previous copy a few lines up, and `System.arraycopy` will - // effectively "read" all the values before overwriting any of them. - Array.copy(elems.array, 0, array, index, elemsLength) - case _ => - var i = 0 - val it = elems.iterator - while (i < elemsLength) { - this(index + i) = it.next() - i += 1 - } - } + val len = size0 + val newSize = len + elemsLength + ensureSize(newSize) + Array.copy(array, index, array, index + elemsLength, len - index) + // if `elems eq this`, this copy is safe because + // - `elems.array eq this.array` + // - we didn't overwrite the values being inserted after moving them in + // the previous line + // - `copyElemsToArray` will call `System.arraycopy` + // - `System.arraycopy` will effectively "read" all the values before + // overwriting any of them when two arrays are the the same reference + IterableOnce.copyElemsToArray(elems, array.asInstanceOf[Array[Any]], index, elemsLength) + size0 = newSize // update size AFTER the copy, in case we're inserting a proxy } - case _ => - insertAll(index, ArrayBuffer.from(elems)) + case _ => insertAll(index, ArrayBuffer.from(elems)) } } @@ -317,18 +312,30 @@ object ArrayBuffer extends StrictOptimizedSeqFactory[ArrayBuffer] { } } -final class ArrayBufferView[A] private[mutable](val array: Array[AnyRef], val length: Int, mutationCount: () => Int) +// TODO: use `CheckedIndexedSeqView.Id` once we can change the return type of `ArrayBuffer#view` +final class ArrayBufferView[A] private[mutable](underlying: ArrayBuffer[A], mutationCount: () => Int) extends AbstractIndexedSeqView[A] { - @deprecated("never intended to be public; call ArrayBuffer#view instead", since = "2.13.6") + @deprecated("never intended to be public; call ArrayBuffer#view instead", since = "2.13.7") def this(array: Array[AnyRef], length: Int) = { // this won't actually track mutation, but it would be a pain to have the implementation // check if we have a method to get the current mutation count or not on every method and // change what it does based on that. hopefully no one ever calls this. - this(array, length, () => 0) + this({ + val _array = array + val _length = length + new ArrayBuffer[A](0) { + this.array = _array + this.size0 = _length + } + }, () => 0) } + @deprecated("never intended to be public", since = "2.13.7") + def array: Array[AnyRef] = underlying.toArray[Any].asInstanceOf[Array[AnyRef]] + @throws[IndexOutOfBoundsException] - def apply(n: Int): A = if (n < length) array(n).asInstanceOf[A] else throw new IndexOutOfBoundsException(s"$n is out of bounds (min 0, max ${length - 1})") + def apply(n: Int): A = underlying(n) + def length: Int = underlying.length override protected[this] def className = "ArrayBufferView" // we could inherit all these from `CheckedIndexedSeqView`, except this class is public diff --git a/test/junit/scala/collection/mutable/ArrayBufferTest.scala b/test/junit/scala/collection/mutable/ArrayBufferTest.scala index fcdd04cc3875..1d934a63b967 100644 --- a/test/junit/scala/collection/mutable/ArrayBufferTest.scala +++ b/test/junit/scala/collection/mutable/ArrayBufferTest.scala @@ -455,4 +455,21 @@ class ArrayBufferTest { buf.insertAll(1, buf) assertSameElements(List(1, 1, 2, 3, 2, 3), buf) } + + // scala/bug#12284 + @Test + def viewConsistency(): Unit = { + def check[U](op: ArrayBuffer[Int] => U): Unit = { + val buf = ArrayBuffer.from(1 to 50) + val view = buf.view + op(buf) + assertSameElements(buf, view) + } + + check(_.clear()) + check(_.dropRightInPlace(30)) + check(_.dropInPlace(30)) + check(_ ++= (1 to 100)) + check(_.insertAll(1, 1 to 100)) + } } From eed4f0149f536da530aa56fd80999c3d02c24c6d Mon Sep 17 00:00:00 2001 From: NthPortal Date: Fri, 24 Sep 2021 00:56:41 -0400 Subject: [PATCH 405/769] Fix unreported bug in `ArrayOps.ArrayIterator` Fix unreported bug in `ArrayOps.ArrayIterator` where index in array can overflow to negative, and in such cases the iterator reports `hasNext` incorrectly as `true`. Improve code documentation and readability of `Iterator.patch` implementation, which was bumped into while investigating the bug. Discovered while writing tests for bug in `View.Patched` (see following commit). --- src/library/scala/collection/ArrayOps.scala | 9 ++++- src/library/scala/collection/Iterator.scala | 28 +++++++------ .../junit/scala/collection/ArrayOpsTest.scala | 12 ++++++ .../scala/collection/IteratorProperties.scala | 11 ++++-- .../mutable/ArrayBufferProperties.scala | 39 +++++++++++++++++++ 5 files changed, 83 insertions(+), 16 deletions(-) create mode 100644 test/scalacheck/scala/collection/mutable/ArrayBufferProperties.scala diff --git a/src/library/scala/collection/ArrayOps.scala b/src/library/scala/collection/ArrayOps.scala index aec8156599be..a4948ac01f2e 100644 --- a/src/library/scala/collection/ArrayOps.scala +++ b/src/library/scala/collection/ArrayOps.scala @@ -123,7 +123,7 @@ object ArrayOps { private[collection] final class ArrayIterator[@specialized(Specializable.Everything) A](xs: Array[A]) extends AbstractIterator[A] with Serializable { private[this] var pos = 0 private[this] val len = xs.length - override def knownSize = len - pos + override def knownSize: Int = len - pos def hasNext: Boolean = pos < len def next(): A = try { val r = xs(pos) @@ -131,7 +131,12 @@ object ArrayOps { r } catch { case _: ArrayIndexOutOfBoundsException => Iterator.empty.next() } override def drop(n: Int): Iterator[A] = { - if (n > 0) pos = Math.min(xs.length, pos + n) + if (n > 0) { + val newPos = pos + n + pos = + if (newPos < 0 /* overflow */) len + else Math.min(len, newPos) + } this } } diff --git a/src/library/scala/collection/Iterator.scala b/src/library/scala/collection/Iterator.scala index 40f697c3fe82..911ff34f1912 100644 --- a/src/library/scala/collection/Iterator.scala +++ b/src/library/scala/collection/Iterator.scala @@ -907,31 +907,37 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite def patch[B >: A](from: Int, patchElems: Iterator[B], replaced: Int): Iterator[B] = new AbstractIterator[B] { private[this] var origElems = self - private[this] var i = if (from > 0) from else 0 // Counts down, switch to patch on 0, -1 means use patch first - def hasNext: Boolean = { - if (i == 0) { + // > 0 => that many more elems from `origElems` before switching to `patchElems` + // 0 => need to drop elems from `origElems` and start using `patchElems` + // -1 => have dropped elems from `origElems`, will be using `patchElems` until it's empty + // and then using what's left of `origElems` after the drop + private[this] var state = if (from > 0) from else 0 + + // checks state and handles 0 => -1 + @inline private[this] def switchToPatchIfNeeded(): Unit = + if (state == 0) { origElems = origElems drop replaced - i = -1 + state = -1 } + + def hasNext: Boolean = { + switchToPatchIfNeeded() origElems.hasNext || patchElems.hasNext } def next(): B = { - if (i == 0) { - origElems = origElems drop replaced - i = -1 - } - if (i < 0) { + switchToPatchIfNeeded() + if (state < 0 /* == -1 */) { if (patchElems.hasNext) patchElems.next() else origElems.next() } else { if (origElems.hasNext) { - i -= 1 + state -= 1 origElems.next() } else { - i = -1 + state = -1 patchElems.next() } } diff --git a/test/junit/scala/collection/ArrayOpsTest.scala b/test/junit/scala/collection/ArrayOpsTest.scala index 3283caa252fd..d8f9eb4229f3 100644 --- a/test/junit/scala/collection/ArrayOpsTest.scala +++ b/test/junit/scala/collection/ArrayOpsTest.scala @@ -142,4 +142,16 @@ class ArrayOpsTest { assertEquals(classOf[Double], something.intersect(empty).getClass.getComponentType) assertTrue(something.intersect(empty).isEmpty) } + + // discovered while working on scala/scala#9388 + @Test + def iterator_drop(): Unit = { + val it = Array(1, 2, 3) + .iterator + .drop(Int.MaxValue) + .drop(Int.MaxValue) // potential index overflow to negative + assert(!it.hasNext) // bug had index as negative and this returning true + // even though the index is both out of bounds and should + // always be between `0` and `Array#length`. + } } diff --git a/test/scalacheck/scala/collection/IteratorProperties.scala b/test/scalacheck/scala/collection/IteratorProperties.scala index d20e24c33b7d..62481d6a4895 100644 --- a/test/scalacheck/scala/collection/IteratorProperties.scala +++ b/test/scalacheck/scala/collection/IteratorProperties.scala @@ -29,17 +29,22 @@ object IteratorProperties extends Properties("Iterator") { case it: Iterator[Int] => View.dropRightIterator(it, n) case x => throw new MatchError(x) }) + property("patch") = check((it, n) => it match { + case it: Iterable[Int] => it.iterator.patch(1, Iterator.empty, n) + case it: Iterator[Int] => it.patch(1, Iterator.empty, n) + case x => throw new MatchError(x) + }) def check(f: (IterableOnceOps[Int, IterableOnce, IterableOnce[Int]], Int) => IterableOnce[Int]): Prop = forAll(Arbitrary.arbitrary[Seq[Int]], smallInteger) { (s: Seq[Int], n: Int) => val indexed = s.toIndexedSeq // IndexedSeqs and their Iterators have a knownSize val simple = new SimpleIterable(s) // SimpleIterable and its Iterator don't - val stream = LazyList.from(s) // Lazy + val lazyList = LazyList.from(s) // Lazy val indexed1 = f(indexed, n).iterator.to(Seq) val indexed2 = f(indexed.iterator, n).iterator.to(Seq) val simple1 = f(simple, n).iterator.to(Seq) val simple2 = f(simple.iterator, n).iterator.to(Seq) - val stream1 = f(stream, n).iterator.to(Seq) - val stream2 = f(stream.iterator, n).iterator.to(Seq) + val stream1 = f(lazyList, n).iterator.to(Seq) + val stream2 = f(lazyList.iterator, n).iterator.to(Seq) (indexed1 == indexed2) :| s"indexed: $indexed1 != $indexed2" && (simple1 == simple2) :| s"simple: $simple1 != $simple2" && (stream1 == stream2) :| s"stream: $stream1 != $stream2" && diff --git a/test/scalacheck/scala/collection/mutable/ArrayBufferProperties.scala b/test/scalacheck/scala/collection/mutable/ArrayBufferProperties.scala new file mode 100644 index 000000000000..193c49d47f9b --- /dev/null +++ b/test/scalacheck/scala/collection/mutable/ArrayBufferProperties.scala @@ -0,0 +1,39 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.mutable + +import org.scalacheck._ +import org.scalacheck.Prop._ + +object ArrayBufferProperties extends Properties("mutable.ArrayBuffer") { + + type Elem = Int + + property("view consistency after modifications") = forAll { (buf: ArrayBuffer[Elem]) => + def check[U](op: ArrayBuffer[Elem] => U): Prop = { + val b = buf.clone() + val view = b.view + op(b) // modifies the buffer + b.sameElements(view) + } + + val spaceForMoreElems = buf.sizeIs <= (Int.MaxValue / 2 - 101) + + (check(_.clear()) :| "_.clear()") && + (check(_.dropRightInPlace(1)) :| "_.dropRightInPlace(1)") && + (check(_.dropInPlace(1)) :| "_.dropInPlace(1)") && + (spaceForMoreElems ==> (check(_ ++= (1 to 100)) :| "_ ++= (1 to 100)")) && + (spaceForMoreElems ==> (check(_.prependAll(1 to 100)) :| "_.prependAll(1 to 100)")) && + ((!buf.isEmpty && spaceForMoreElems) ==> (check(_.insertAll(1, 1 to 100)) :| "_.insertAll(1, 1 to 100)")) + } +} From 8853372306cb8871981d65d712e91a1c203d6d8c Mon Sep 17 00:00:00 2001 From: NthPortal Date: Wed, 1 Sep 2021 22:30:10 -0400 Subject: [PATCH 406/769] Fix unreported bug in `View.Patched` Fix unreported bug in `View.Patched` where iterator is incorrect due to the patch only being iterable once, and already having been exhausted. Discovered while attempting to optimise `ArrayBuffer` in an earlier version of the previous commit. --- src/library/scala/collection/View.scala | 10 +++- test/junit/scala/collection/ViewTest.scala | 19 ++++++- .../scala/collection/ViewProperties.scala | 57 +++++++++++++++++++ 3 files changed, 83 insertions(+), 3 deletions(-) create mode 100644 test/scalacheck/scala/collection/ViewProperties.scala diff --git a/src/library/scala/collection/View.scala b/src/library/scala/collection/View.scala index c84c126626f6..441790c3c6e5 100644 --- a/src/library/scala/collection/View.scala +++ b/src/library/scala/collection/View.scala @@ -404,8 +404,14 @@ object View extends IterableFactory[View] { @SerialVersionUID(3L) private[collection] class Patched[A](underlying: SomeIterableOps[A], from: Int, other: IterableOnce[A], replaced: Int) extends AbstractView[A] { - def iterator: Iterator[A] = underlying.iterator.patch(from, other.iterator, replaced) - override def knownSize: Int = if (underlying.knownSize == 0 && other.knownSize == 0) 0 else super.knownSize + // we may be unable to traverse `other` more than once, so we need to cache it if that's the case + private val _other: Iterable[A] = other match { + case other: Iterable[A] => other + case other => LazyList.from(other) + } + + def iterator: Iterator[A] = underlying.iterator.patch(from, _other.iterator, replaced) + override def knownSize: Int = if (underlying.knownSize == 0 && _other.knownSize == 0) 0 else super.knownSize override def isEmpty: Boolean = if (knownSize == 0) true else iterator.isEmpty } diff --git a/test/junit/scala/collection/ViewTest.scala b/test/junit/scala/collection/ViewTest.scala index 89418aa6a024..cb5814654e37 100644 --- a/test/junit/scala/collection/ViewTest.scala +++ b/test/junit/scala/collection/ViewTest.scala @@ -1,10 +1,10 @@ package scala.collection -import scala.collection.immutable.List import org.junit.Assert._ import org.junit.Test import scala.collection.mutable.{ArrayBuffer, ListBuffer} +import scala.tools.testkit.AssertUtil.assertSameElements class ViewTest { @@ -113,4 +113,21 @@ class ViewTest { def _toString(): Unit = { assertEquals("View()", View(1, 2, 3).toString) } + + // see scala/scala#9388 + @Test + def patch(): Unit = { + // test re-iterability + val v1 = List(2).view.patch(1, List(3, 4, 5).iterator, 0) + assertSameElements(Seq(2, 3, 4, 5), v1.toList) + assertSameElements(Seq(2, 3, 4, 5), v1.toList) // check that it works twice + + // https://github.com/scala/scala/pull/9388#discussion_r709392221 + val v2 = List(2).view.patch(1, Nil, 0) + assert(!v2.isEmpty) + + // https://github.com/scala/scala/pull/9388#discussion_r709481748 + val v3 = Nil.view.patch(0, List(1).iterator, 0) + assert(v3.knownSize != 0) + } } diff --git a/test/scalacheck/scala/collection/ViewProperties.scala b/test/scalacheck/scala/collection/ViewProperties.scala new file mode 100644 index 000000000000..1814adc1c690 --- /dev/null +++ b/test/scalacheck/scala/collection/ViewProperties.scala @@ -0,0 +1,57 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection + +import org.scalacheck._ +import org.scalacheck.Prop._ + +import scala.collection.mutable.ListBuffer + +object ViewProperties extends Properties("View") { + + type Elem = Int + type SomeSeqOps = SeqOps[Elem, Iterable, Iterable[Elem]] + + private def expectedPatch(seq: SomeSeqOps, from: Int, other: Iterable[Elem], replaced: Int): Seq[Elem] = { + val (prefix, suffix) = seq.splitAt(from) + ListBuffer.empty[Elem] ++= prefix ++= other ++= suffix.drop(replaced) + } + + property("`SeqOps#patch(...)` (i.e. `iterableFactory.from(View.Patched(...))`) correctness") = { + // we use `mutable.ArraySeq` because it uses the default `patch` + // implementation, rather than one from `StrictOptimisedSeqOps` + forAll { (seq: mutable.ArraySeq[Elem], from: Int, other: Iterable[Elem], replaced: Int) => + val expected = expectedPatch(seq, from, other, replaced) + val patchedWithIterable = seq.patch(from, other, replaced) + val patchedWithIterableOnce = seq.patch(from, other.iterator, replaced) + + // we don't need to use `sameElements` like below, because + // both `expected` and patched are `Seq` this time + ((expected =? patchedWithIterable) :| "`patch(_, Iterable, _)` is performed correctly") && + ((expected =? patchedWithIterableOnce) :| "`patch(_, IterableOnce, _)` is performed correctly") + } + } + + + property("`SeqOps#view.patch(...)` (i.e. `View.Patched` used directly) correctness and consistency") = + forAll { (seq: Seq[Elem], from: Int, other: Iterable[Elem], replaced: Int) => + val expected = expectedPatch(seq, from, other, replaced) + val patchedWithIterable = seq.view.patch(from, other, replaced) + val patchedWithIterableOnce = seq.view.patch(from, other.iterator, replaced) + + (expected.sameElements(patchedWithIterable) :| "`patch(_, Iterable, _)` is performed correctly") && + (expected.sameElements(patchedWithIterable) :| "`view.patch(_, Iterable, _)` remains the same after multiple iterations") && + (expected.sameElements(patchedWithIterableOnce) :| "`patch(_, IterableOnce, _)` is performed correctly") && + (expected.sameElements(patchedWithIterableOnce) :| "`view.patch(_, IterableOnce, _)` remains the same after multiple iterations") + } +} From 631715112ac9bc660bbc68cd81d1c9b7fd05c3c0 Mon Sep 17 00:00:00 2001 From: Dale Wijnand Date: Fri, 1 Oct 2021 09:04:31 +0100 Subject: [PATCH 407/769] Fix outer tests when pattern has a refined prefix --- src/reflect/scala/reflect/internal/TreeGen.scala | 1 + test/files/pos/t12467.scala | 15 +++++++++++++++ 2 files changed, 16 insertions(+) create mode 100644 test/files/pos/t12467.scala diff --git a/src/reflect/scala/reflect/internal/TreeGen.scala b/src/reflect/scala/reflect/internal/TreeGen.scala index 6ae62eb81581..c3aae72e7788 100644 --- a/src/reflect/scala/reflect/internal/TreeGen.scala +++ b/src/reflect/scala/reflect/internal/TreeGen.scala @@ -149,6 +149,7 @@ abstract class TreeGen { def mkAttributedQualifierIfPossible(prefix: Type): Option[Tree] = prefix match { case NoType | NoPrefix | ErrorType => None case TypeRef(_, sym, _) if sym.isModule || sym.isClass || sym.isType => None + case RefinedType(parents, _) if !parents.exists(_.isStable) => None case pre => Some(mkAttributedQualifier(prefix)) } diff --git a/test/files/pos/t12467.scala b/test/files/pos/t12467.scala new file mode 100644 index 000000000000..a0cb4f79dd4f --- /dev/null +++ b/test/files/pos/t12467.scala @@ -0,0 +1,15 @@ +object PagedResponse { + type Aux[Item0] = PagedResponse { type Item = Item0 } +} + +trait PagedResponse { + type Item + sealed trait NextPage + case class NoMorePages() extends NextPage +} + +object Test { + def foo[A](next: PagedResponse.Aux[A]#NextPage): Unit = next match { + case _: PagedResponse.Aux[A]#NoMorePages => ??? + } +} From b755e64c46c8c4a5ab2fbc7dca79791b35c2ac5a Mon Sep 17 00:00:00 2001 From: NthPortal Date: Tue, 12 Oct 2021 03:21:27 -0400 Subject: [PATCH 408/769] Update `ArrayBuffer#mutationCount` more precisely Update `ArrayBuffer#mutationCount` only when elements of the buffer are changed or moved, and not when the backing array is resized without changing the collection. `s.c.m.PriorityQueue`, which uses an `ArrayBuffer` as part of its implementation, does not track mutation perfectly. --- .../scala/collection/mutable/ArrayBuffer.scala | 14 ++++++++------ .../collection/mutable/MutationTrackingTest.scala | 4 ++-- 2 files changed, 10 insertions(+), 8 deletions(-) diff --git a/src/library/scala/collection/mutable/ArrayBuffer.scala b/src/library/scala/collection/mutable/ArrayBuffer.scala index db2fae1bfcd9..5fb2357996e1 100644 --- a/src/library/scala/collection/mutable/ArrayBuffer.scala +++ b/src/library/scala/collection/mutable/ArrayBuffer.scala @@ -66,7 +66,6 @@ class ArrayBuffer[A] private (initialElements: Array[AnyRef], initialSize: Int) /** Ensure that the internal array has at least `n` cells. */ protected def ensureSize(n: Int): Unit = { - mutationCount += 1 array = ArrayBuffer.ensureSize(array, size0, n) } @@ -86,7 +85,6 @@ class ArrayBuffer[A] private (initialElements: Array[AnyRef], initialSize: Int) * This allows releasing some unused memory. */ def trimToSize(): Unit = { - mutationCount += 1 resize(length) } @@ -136,10 +134,11 @@ class ArrayBuffer[A] private (initialElements: Array[AnyRef], initialSize: Int) } def addOne(elem: A): this.type = { - val i = size0 - ensureSize(size0 + 1) - size0 += 1 - this(i) = elem + mutationCount += 1 + val oldSize = size0 + ensureSize(oldSize + 1) + size0 = oldSize + 1 + this(oldSize) = elem this } @@ -149,6 +148,7 @@ class ArrayBuffer[A] private (initialElements: Array[AnyRef], initialSize: Int) case elems: ArrayBuffer[_] => val elemsLength = elems.size0 if (elemsLength > 0) { + mutationCount += 1 ensureSize(length + elemsLength) Array.copy(elems.array, 0, array, length, elemsLength) size0 = length + elemsLength @@ -160,6 +160,7 @@ class ArrayBuffer[A] private (initialElements: Array[AnyRef], initialSize: Int) def insert(@deprecatedName("n", "2.13.0") index: Int, elem: A): Unit = { checkWithinBounds(index, index) + mutationCount += 1 ensureSize(size0 + 1) Array.copy(array, index, array, index + 1, size0 - index) size0 += 1 @@ -177,6 +178,7 @@ class ArrayBuffer[A] private (initialElements: Array[AnyRef], initialSize: Int) case elems: collection.Iterable[A] => val elemsLength = elems.size if (elemsLength > 0) { + mutationCount += 1 val len = size0 val newSize = len + elemsLength ensureSize(newSize) diff --git a/test/junit/scala/collection/mutable/MutationTrackingTest.scala b/test/junit/scala/collection/mutable/MutationTrackingTest.scala index c5a03270f01a..98ed439ee0bd 100644 --- a/test/junit/scala/collection/mutable/MutationTrackingTest.scala +++ b/test/junit/scala/collection/mutable/MutationTrackingTest.scala @@ -245,9 +245,9 @@ package MutationTrackingTestImpl { def clearAndShrink(): Unit = checkThrows { _ clearAndShrink 2 } @Test - def trimToSize(): Unit = checkThrows { _.trimToSize() } + def trimToSize(): Unit = checkFine { _.trimToSize() } @Test - def sizeHint(): Unit = checkThrows { _ sizeHint 16 } + def sizeHint(): Unit = checkFine { _ sizeHint 16 } } } From ca88d99a6213cf07940ef9403c4efdcb76dafd8f Mon Sep 17 00:00:00 2001 From: NthPortal Date: Tue, 12 Oct 2021 03:45:07 -0400 Subject: [PATCH 409/769] Add `insertAll` benchmarks for `ArrayBuffer` --- .../mutable/ArrayBufferBenchmark.scala | 49 +++++++++++++++---- 1 file changed, 39 insertions(+), 10 deletions(-) diff --git a/test/benchmarks/src/main/scala/scala/collection/mutable/ArrayBufferBenchmark.scala b/test/benchmarks/src/main/scala/scala/collection/mutable/ArrayBufferBenchmark.scala index 50c14dbfe777..7da2ea9f0d37 100644 --- a/test/benchmarks/src/main/scala/scala/collection/mutable/ArrayBufferBenchmark.scala +++ b/test/benchmarks/src/main/scala/scala/collection/mutable/ArrayBufferBenchmark.scala @@ -63,27 +63,56 @@ class ArrayBufferBenchmark { bh.consume(b1) } - // append collection with known size + // append `Iterable` with known size @Benchmark def addAll2(bh: Blackhole): Unit = { - val b1 = ref.clone() - b1.addAll(set) - bh.consume(b1) + val b = ref.clone() + b.addAll(set) + bh.consume(b) } - // append collection without known size + // append `Iterable` without known size @Benchmark def addAll3(bh: Blackhole): Unit = { - val b1 = ref.clone() - b1.addAll(list) - bh.consume(b1) + val b = ref.clone() + b.addAll(list) + bh.consume(b) } - // append `IterableOnce` with no known size + // append `IterableOnce` without known size @Benchmark def addAll4(bh: Blackhole): Unit = { + val b = ref.clone() + b.addAll(list.iterator) + bh.consume(b) + } + + // insert `ArrayBuffer` + @Benchmark def insertAll1(bh: Blackhole): Unit = { val b1 = ref.clone() - b1.addAll(list.iterator) + val b2 = ref.clone() + b1.insertAll(size / 2, b2) bh.consume(b1) } + // insert `Iterable` with known size + @Benchmark def insertAll2(bh: Blackhole): Unit = { + val b = ref.clone() + b.insertAll(size / 2, set) + bh.consume(b) + } + + // insert `Iterable` without known size + @Benchmark def insertAll3(bh: Blackhole): Unit = { + val b = ref.clone() + b.insertAll(size / 2, list) + bh.consume(b) + } + + // insert `IterableOnce` without known size + @Benchmark def insertAll4(bh: Blackhole): Unit = { + val b = ref.clone() + b.insertAll(size / 2, list.iterator) + bh.consume(b) + } + @Benchmark def flatMapInPlace1(bh: Blackhole): Unit = { val b = ref.clone() val seq = scala.Seq(0, 0) From 6c9dd4d2eb6272619c90f55e01f97ec3abefbc72 Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Tue, 12 Oct 2021 10:24:57 +0200 Subject: [PATCH 410/769] ClassValueCompat to support systems without java.lang.ClassValue On runtime systems where `java.lang.ClassValue` is not available `ClassValueCompat` re-computes the value on each `get` invocation. Co-authored-by: nwk37011 --- project/MimaFilters.scala | 10 ++++ src/library/scala/reflect/ClassTag.scala | 4 +- .../scala/runtime/ClassValueCompat.scala | 53 +++++++++++++++++++ .../runtime/ModuleSerializationProxy.java | 51 ------------------ .../runtime/ModuleSerializationProxy.scala | 43 +++++++++++++++ .../scala/reflect/macros/Attachments.scala | 3 +- .../scala/reflect/runtime/JavaMirrors.scala | 4 +- 7 files changed, 112 insertions(+), 56 deletions(-) create mode 100644 src/library/scala/runtime/ClassValueCompat.scala delete mode 100644 src/library/scala/runtime/ModuleSerializationProxy.java create mode 100644 src/library/scala/runtime/ModuleSerializationProxy.scala diff --git a/project/MimaFilters.scala b/project/MimaFilters.scala index 31f5633182e6..c29288cb2467 100644 --- a/project/MimaFilters.scala +++ b/project/MimaFilters.scala @@ -78,6 +78,16 @@ object MimaFilters extends AutoPlugin { // #9741 ProblemFilters.exclude[MissingClassProblem]("scala.collection.immutable.SeqMap$SeqMapBuilderImpl"), // private[SeqMap] + + // #9752 + ProblemFilters.exclude[MissingTypesProblem]("scala.reflect.ClassTag$cache$"), + ProblemFilters.exclude[MissingClassProblem]("scala.runtime.ModuleSerializationProxy$"), + ProblemFilters.exclude[MissingTypesProblem]("scala.reflect.runtime.JavaMirrors$JavaMirror$typeTagCache$"), + ProblemFilters.exclude[MissingClassProblem]("scala.runtime.ClassValueCompat"), + ProblemFilters.exclude[MissingClassProblem]("scala.runtime.ClassValueCompat$"), + ProblemFilters.exclude[MissingClassProblem]("scala.runtime.ClassValueCompat$ClassValueInterface"), + ProblemFilters.exclude[MissingClassProblem]("scala.runtime.ClassValueCompat$JavaClassValue"), + ProblemFilters.exclude[MissingClassProblem]("scala.runtime.ClassValueCompat$FallbackClassValue"), ) override val buildSettings = Seq( diff --git a/src/library/scala/reflect/ClassTag.scala b/src/library/scala/reflect/ClassTag.scala index b3ef8f781a9d..5226bb5577a8 100644 --- a/src/library/scala/reflect/ClassTag.scala +++ b/src/library/scala/reflect/ClassTag.scala @@ -15,8 +15,8 @@ package reflect import java.lang.{Class => jClass} import java.lang.ref.{WeakReference => jWeakReference} - import scala.annotation.{implicitNotFound, nowarn} +import scala.runtime.ClassValueCompat /** * @@ -116,7 +116,7 @@ object ClassTag { val Null : ClassTag[scala.Null] = Manifest.Null private val cacheDisabled = java.lang.Boolean.getBoolean("scala.reflect.classtag.cache.disable") - private[this] object cache extends ClassValue[jWeakReference[ClassTag[_]]] { + private[this] object cache extends ClassValueCompat[jWeakReference[ClassTag[_]]] { override def computeValue(runtimeClass: jClass[_]): jWeakReference[ClassTag[_]] = new jWeakReference(computeTag(runtimeClass)) diff --git a/src/library/scala/runtime/ClassValueCompat.scala b/src/library/scala/runtime/ClassValueCompat.scala new file mode 100644 index 000000000000..908c36c6ef3b --- /dev/null +++ b/src/library/scala/runtime/ClassValueCompat.scala @@ -0,0 +1,53 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.runtime + + +import scala.runtime.ClassValueCompat._ + +private[scala] abstract class ClassValueCompat[T] extends ClassValueInterface[T] { self => + private val instance: ClassValueInterface[T] = + if (classValueAvailable) new JavaClassValue() + else new FallbackClassValue() + + private class JavaClassValue extends ClassValue[T] with ClassValueInterface[T] { + override def computeValue(cls: Class[_]): T = self.computeValue(cls) + } + + private class FallbackClassValue extends ClassValueInterface[T] { + override def get(cls: Class[_]): T = self.computeValue(cls) + + override def remove(cls: Class[_]): Unit = {} + } + + def get(cls: Class[_]): T = instance.get(cls) + + def remove(cls: Class[_]): Unit = instance.remove(cls) + + protected def computeValue(cls: Class[_]): T +} + +private[scala] object ClassValueCompat { + trait ClassValueInterface[T] { + def get(cls: Class[_]): T + + def remove(cls: Class[_]): Unit + } + + private val classValueAvailable: Boolean = try { + Class.forName("java.lang.ClassValue", false, classOf[Object].getClassLoader) + true + } catch { + case _: ClassNotFoundException => false + } +} diff --git a/src/library/scala/runtime/ModuleSerializationProxy.java b/src/library/scala/runtime/ModuleSerializationProxy.java deleted file mode 100644 index d023faa1389c..000000000000 --- a/src/library/scala/runtime/ModuleSerializationProxy.java +++ /dev/null @@ -1,51 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala.runtime; - -import java.io.Serializable; -import java.security.PrivilegedActionException; -import java.security.PrivilegedExceptionAction; -import java.util.HashSet; -import java.util.Set; - -/** A serialization proxy for singleton objects */ -public final class ModuleSerializationProxy implements Serializable { - private static final long serialVersionUID = 1L; - private final Class moduleClass; - private static final ClassValue instances = new ClassValue() { - @Override - @SuppressWarnings("removal") // JDK 17 deprecates AccessController - protected Object computeValue(Class type) { - try { - return java.security.AccessController.doPrivileged((PrivilegedExceptionAction) () -> type.getField("MODULE$").get(null)); - } catch (PrivilegedActionException e) { - return rethrowRuntime(e.getCause()); - } - } - }; - - private static Object rethrowRuntime(Throwable e) { - Throwable cause = e.getCause(); - if (cause instanceof RuntimeException) throw (RuntimeException) cause; - else throw new RuntimeException(cause); - } - - public ModuleSerializationProxy(Class moduleClass) { - this.moduleClass = moduleClass; - } - - @SuppressWarnings("unused") - private Object readResolve() { - return instances.get(moduleClass); - } -} diff --git a/src/library/scala/runtime/ModuleSerializationProxy.scala b/src/library/scala/runtime/ModuleSerializationProxy.scala new file mode 100644 index 000000000000..cbb75d8e1032 --- /dev/null +++ b/src/library/scala/runtime/ModuleSerializationProxy.scala @@ -0,0 +1,43 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.runtime + +import java.io.Serializable +import java.security.PrivilegedActionException +import java.security.PrivilegedExceptionAction + +private[runtime] object ModuleSerializationProxy { + private val instances = new ClassValueCompat[Object] { + override protected def computeValue(cls: Class[_]): Object = { + try { + java.security.AccessController.doPrivileged((() => cls.getField("MODULE$").get(null)): PrivilegedExceptionAction[Object]) + } catch { + case e: PrivilegedActionException => + rethrowRuntime(e.getCause) + } + } + } + + private def rethrowRuntime(e: Throwable): Object = { + val cause = e.getCause + cause match { + case exception: RuntimeException => throw exception + case _ => throw new RuntimeException(cause) + } + } +} + +@SerialVersionUID(1L) +final class ModuleSerializationProxy(moduleClass: Class[_]) extends Serializable { + private def readResolve = ModuleSerializationProxy.instances.get(moduleClass) +} diff --git a/src/reflect/scala/reflect/macros/Attachments.scala b/src/reflect/scala/reflect/macros/Attachments.scala index 688721e410e3..05318a84ba5f 100644 --- a/src/reflect/scala/reflect/macros/Attachments.scala +++ b/src/reflect/scala/reflect/macros/Attachments.scala @@ -15,6 +15,7 @@ package reflect package macros import reflect.internal.util.Position +import scala.runtime.ClassValueCompat /** * EXPERIMENTAL @@ -109,7 +110,7 @@ abstract class Attachments { self => } private object Attachments { - private val matchesTagCache = new ClassValue[Function1[Any, Boolean]] { + private val matchesTagCache = new ClassValueCompat[Function1[Any, Boolean]] { override def computeValue(cls: Class[_]): Function[Any, Boolean] = cls.isInstance(_) } } diff --git a/src/reflect/scala/reflect/runtime/JavaMirrors.scala b/src/reflect/scala/reflect/runtime/JavaMirrors.scala index d0f318bedd37..69ff6474c8cb 100644 --- a/src/reflect/scala/reflect/runtime/JavaMirrors.scala +++ b/src/reflect/scala/reflect/runtime/JavaMirrors.scala @@ -38,7 +38,7 @@ import internal.Flags._ import ReflectionUtils._ import scala.annotation.nowarn import scala.reflect.api.TypeCreator -import scala.runtime.{ BoxesRunTime, ScalaRunTime } +import scala.runtime.{BoxesRunTime, ClassValueCompat, ScalaRunTime} private[scala] trait JavaMirrors extends internal.SymbolTable with api.JavaUniverse with TwoWayCaches { thisUniverse: SymbolTable => @@ -120,7 +120,7 @@ private[scala] trait JavaMirrors extends internal.SymbolTable with api.JavaUnive private[this] val fieldCache = new TwoWayCache[jField, TermSymbol] private[this] val tparamCache = new TwoWayCache[jTypeVariable[_ <: GenericDeclaration], TypeSymbol] - private[this] object typeTagCache extends ClassValue[jWeakReference[TypeTag[_]]]() { + private[this] object typeTagCache extends ClassValueCompat[jWeakReference[TypeTag[_]]]() { val typeCreator = new ThreadLocal[TypeCreator]() override protected def computeValue(cls: jClass[_]): jWeakReference[TypeTag[_]] = { From 548c776deb42bc545296a1469291777a16e0b3cf Mon Sep 17 00:00:00 2001 From: Seth Tisue Date: Tue, 12 Oct 2021 09:11:15 -0600 Subject: [PATCH 411/769] one more try at fixing spec publishing context: scala/scala-dev#791 --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 9435f22b3493..7da7ef852558 100644 --- a/.travis.yml +++ b/.travis.yml @@ -155,7 +155,7 @@ env: global: - ADOPTOPENJDK=8 - secure: "P8EqpZoin/YTnwel9TTxSSAHtXfZ4M262BKXlYUZmjoQsjyvXDAeZ7yAqgAvX5BeRFrGkBToPiE+V60stdWkPKs3+9COw2BDUB1CULBHhRY9Lxordmz0xVhgEfsoH4f6r6wOlIQ9kuaWhmP+JdB/mzOHZhLch9ziPi8O46Z8t4k=" # PRIV_KEY_SECRET, for scripts/travis-publish-spec.sh - - secure: "TuJOUtALynPd+MV1AuMeIpVb8BUBHr7Ul7FS48XhS2PyuTRpEBkSWybYcNg3AXyzmWDAuOjUxbaNMQBvP8vvehTbIYls5H5wTGKvj0D0 - secure: "T1fxtvLTxioyXJYiC/zVYdNYsBOt+0Piw+xE04rB1pzeKahm9+G2mISdcAyqv6/vze9eIJt6jNHHpKX32/Z3Cs1/Ruha4m3k+jblj3S0SbxV6ht2ieJXLT5WoUPFRrU68KXI8wqUadXpjxeJJV53qF2FC4lhfMUsw1IwwMhdaE8=" # PRIVATE_REPO_PASS, for publishing to scala-ci Artifactory + - secure: "T1fxtvLTxioyXJYiC/zVYdNYsBOt+0Piw+xE04rB1pzeKahm9+G2mISdcAyqv6/vze9eIJt6jNHHpKX32/Z3Cs1/Ruha4m3k+jblj3S0SbxV6ht2ieJXLT5WoUPFRrU68KXI8wqUadXpjxeJJV53qF2FC4lhfMUsw1IwwMhdaE8=" # PRIVATE_REPO_PASS, for publishing to scala-ci Artifactory - secure: "dbAvl6KEuLwZ0MVQPZihFsPzCdiLbX0EFk3so+hcfEbksrmLQ1tn4X5ZM7Wy1UDR8uN9lxngEwHch7a7lKqpugzmXMew9Wnikr9WBWbJT77Z+XJ/jHI6YuiCRpRo+nvxXGp9Ry80tSIgx5eju0J83IaJL41BWlBkvyAd7YAHORI=" # GPG_SUBKEY_SECRET, so we can sign JARs - secure: "RTyzS6nUgthupw5M0fPwTlcOym1sWgBo8eXYepB2xGiQnRu4g583BGuNBW1UZ3vIjRETi/UKQ1HtMR+i7D8ptF1cNpomopncVJA1iy7pU2w0MJ0xgIPMuvtkIa3kxocd/AnxAp+UhUad3nC8lDpkvZsUhhyA0fb4iPKipd2b2xY=" # TRAVIS_TOKEN (login with GitHub as SethTisue), for triggering scala-dist job - secure: "FvhicbSeys7VNTj9ZP/aNT0NhiQP/NNV0KRfK7IHxi3uOeaxFVfaQsln4lzqZn8dkntgzzNrE/VhvMIknfnISAPX7bShy6SRyj3V2BlcUpuem8WtwmkCaZ42xlCJteBL7NW0auG/8rxrNIAJXbRObqF+YdK6XsRMWaBMQHky+ss=" # SONA_USER, token username for publishing to Sonatype From e228998e93ad613f03ecd97607a6a198a7819f80 Mon Sep 17 00:00:00 2001 From: Seth Tisue Date: Tue, 12 Oct 2021 14:03:17 -0600 Subject: [PATCH 412/769] forward-port PR 9751 test case to 2.13.x --- .../tools/nsc/backend/jvm/BTypesTest.scala | 33 +++++++++++++++++-- 1 file changed, 30 insertions(+), 3 deletions(-) diff --git a/test/junit/scala/tools/nsc/backend/jvm/BTypesTest.scala b/test/junit/scala/tools/nsc/backend/jvm/BTypesTest.scala index 27cd78a375ef..f41dce93959b 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/BTypesTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/BTypesTest.scala @@ -8,6 +8,7 @@ import org.junit.runners.JUnit4 import scala.collection.mutable import scala.tools.asm.Opcodes +import scala.tools.testkit.AssertUtil.assertThrows import scala.tools.testkit.BytecodeTesting @RunWith(classOf[JUnit4]) @@ -19,7 +20,8 @@ class BTypesTest extends BytecodeTesting { } import global.genBCode.bTypes._ - def classBTFS(sym: global.Symbol) = global.exitingDelambdafy(classBTypeFromSymbol(sym)) + def duringBackend[T](f: => T) = global.exitingDelambdafy(f) + def classBTFS(sym: global.Symbol) = duringBackend { classBTypeFromSymbol(sym) } def jlo = global.definitions.ObjectClass def jls = global.definitions.StringClass @@ -50,7 +52,7 @@ class BTypesTest extends BytecodeTesting { assert(FLOAT.typedOpcode(Opcodes.IALOAD) == Opcodes.FALOAD) assert(LONG.typedOpcode(Opcodes.IALOAD) == Opcodes.LALOAD) assert(DOUBLE.typedOpcode(Opcodes.IALOAD) == Opcodes.DALOAD) - assert(classBTFS(jls).typedOpcode(Opcodes.IALOAD) == Opcodes.AALOAD) + assert(s.typedOpcode(Opcodes.IALOAD) == Opcodes.AALOAD) assert(UNIT.typedOpcode(Opcodes.IRETURN) == Opcodes.RETURN) assert(BOOL.typedOpcode(Opcodes.IRETURN) == Opcodes.IRETURN) @@ -61,7 +63,7 @@ class BTypesTest extends BytecodeTesting { assert(FLOAT.typedOpcode(Opcodes.IRETURN) == Opcodes.FRETURN) assert(LONG.typedOpcode(Opcodes.IRETURN) == Opcodes.LRETURN) assert(DOUBLE.typedOpcode(Opcodes.IRETURN) == Opcodes.DRETURN) - assert(classBTFS(jls).typedOpcode(Opcodes.IRETURN) == Opcodes.ARETURN) + assert(s.typedOpcode(Opcodes.IRETURN) == Opcodes.ARETURN) } @Test @@ -234,4 +236,29 @@ class BTypesTest extends BytecodeTesting { } assertTrue(ArrayBType(s).conformsTo(ArrayBType(o)).get) } + + @Test + def maxValueTypeATest(): Unit = duringBackend { + assertEquals(LONG, LONG.maxValueType(BYTE)) + assertEquals(LONG, LONG.maxValueType(SHORT)) + assertEquals(LONG, LONG.maxValueType(CHAR)) + assertEquals(LONG, LONG.maxValueType(INT)) + assertEquals(LONG, LONG.maxValueType(LONG)) + assertEquals(FLOAT, LONG.maxValueType(FLOAT)) + assertEquals(DOUBLE, LONG.maxValueType(DOUBLE)) + + assertUncomparable(LONG, UNIT) + assertUncomparable(LONG, BOOL) + assertUncomparable(LONG, o) + assertUncomparable(LONG, s) + assertUncomparable(LONG, oArr) + assertUncomparable(LONG, method) + + def assertUncomparable(t1: PrimitiveBType, t2: BType): Unit = { + assertThrows[AssertionError]( + t1.maxValueType(t2), + _.equals(s"Cannot compute maxValueType: $t1, $t2") + ) + } + } } From db9b07cbd8e26649472f87ed7c471dc60b27a566 Mon Sep 17 00:00:00 2001 From: Seth Tisue Date: Tue, 12 Oct 2021 14:51:05 -0600 Subject: [PATCH 413/769] suppress JDK 17 deprecation warning in ModuleSerializationProxy a tiny sequel to #9752 --- .../scala/runtime/ModuleSerializationProxy.scala | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/src/library/scala/runtime/ModuleSerializationProxy.scala b/src/library/scala/runtime/ModuleSerializationProxy.scala index cbb75d8e1032..0a15f38e8667 100644 --- a/src/library/scala/runtime/ModuleSerializationProxy.scala +++ b/src/library/scala/runtime/ModuleSerializationProxy.scala @@ -15,17 +15,20 @@ package scala.runtime import java.io.Serializable import java.security.PrivilegedActionException import java.security.PrivilegedExceptionAction +import scala.annotation.nowarn private[runtime] object ModuleSerializationProxy { private val instances = new ClassValueCompat[Object] { - override protected def computeValue(cls: Class[_]): Object = { - try { - java.security.AccessController.doPrivileged((() => cls.getField("MODULE$").get(null)): PrivilegedExceptionAction[Object]) - } catch { + @deprecated("", "") // because AccessController is deprecated on JDK 17 + def getModule(cls: Class[_]): Object = + java.security.AccessController.doPrivileged( + (() => cls.getField("MODULE$").get(null)): PrivilegedExceptionAction[Object]) + override protected def computeValue(cls: Class[_]): Object = + try getModule(cls): @nowarn("cat=deprecation") + catch { case e: PrivilegedActionException => rethrowRuntime(e.getCause) } - } } private def rethrowRuntime(e: Throwable): Object = { From 6013cd3fd0b3f49160e377a5f4d56e15ec2c93c6 Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Tue, 12 Oct 2021 11:51:04 +0200 Subject: [PATCH 414/769] Deprecate anyVal.formatted(formatString) Java 15 added an instance method `formatted` with inverted parameters --- src/compiler/scala/tools/ant/sabbus/Compilers.scala | 2 +- src/library/scala/Predef.scala | 1 + src/reflect/scala/reflect/internal/util/ChromeTrace.scala | 2 +- 3 files changed, 3 insertions(+), 2 deletions(-) diff --git a/src/compiler/scala/tools/ant/sabbus/Compilers.scala b/src/compiler/scala/tools/ant/sabbus/Compilers.scala index 5d71bdb27304..4da9b81be51e 100644 --- a/src/compiler/scala/tools/ant/sabbus/Compilers.scala +++ b/src/compiler/scala/tools/ant/sabbus/Compilers.scala @@ -45,5 +45,5 @@ object Compilers extends scala.collection.DefaultMap[String, Compiler] { } private def freeMemoryString: String = - (Runtime.getRuntime.freeMemory/1048576.0).formatted("%10.2f") + " MB" + f"${Runtime.getRuntime.freeMemory/1048576.0}%10.2f MB" } diff --git a/src/library/scala/Predef.scala b/src/library/scala/Predef.scala index e9194d34a12b..b6e548c043cc 100644 --- a/src/library/scala/Predef.scala +++ b/src/library/scala/Predef.scala @@ -327,6 +327,7 @@ object Predef extends LowPriorityImplicits with DeprecatedPredef { * Format strings are as for `String.format` * (@see java.lang.String.format). */ + @deprecated("Use `formatString.format(value)` instead of `value.formatted(formatString)`,\nor use the `f\"\"` string interpolator. In Java 15 and later, `formatted` resolves to the new method in String which has reversed parameters.", "2.12.16") @inline def formatted(fmtstr: String): String = fmtstr format self } diff --git a/src/reflect/scala/reflect/internal/util/ChromeTrace.scala b/src/reflect/scala/reflect/internal/util/ChromeTrace.scala index ac0207c521a6..be0a1bb5018b 100644 --- a/src/reflect/scala/reflect/internal/util/ChromeTrace.scala +++ b/src/reflect/scala/reflect/internal/util/ChromeTrace.scala @@ -43,7 +43,7 @@ final class ChromeTrace(f: Path) extends Closeable { private val traceWriter = FileUtils.newAsyncBufferedWriter(f) private val context = mutable.ArrayStack[JsonContext](TopContext) private val tidCache = new ThreadLocal[String]() { - override def initialValue(): String = Thread.currentThread().getId.formatted("%05d") + override def initialValue(): String = f"${Thread.currentThread().getId}%05d" } objStart() fld("traceEvents") From ca6ca6ca5579a5bf3877e695e5fd0abeef66b562 Mon Sep 17 00:00:00 2001 From: Seth Tisue Date: Wed, 13 Oct 2021 13:43:12 -0600 Subject: [PATCH 415/769] clean up deprecation warning suppression fixes scala/scala-dev#794 --- .../scala/runtime/ModuleSerializationProxy.scala | 4 ++-- test/files/run/t2318.scala | 10 ++++++---- 2 files changed, 8 insertions(+), 6 deletions(-) diff --git a/src/library/scala/runtime/ModuleSerializationProxy.scala b/src/library/scala/runtime/ModuleSerializationProxy.scala index 0a15f38e8667..42b3f992d626 100644 --- a/src/library/scala/runtime/ModuleSerializationProxy.scala +++ b/src/library/scala/runtime/ModuleSerializationProxy.scala @@ -19,12 +19,12 @@ import scala.annotation.nowarn private[runtime] object ModuleSerializationProxy { private val instances = new ClassValueCompat[Object] { - @deprecated("", "") // because AccessController is deprecated on JDK 17 + @nowarn("cat=deprecation") // AccessController is deprecated on JDK 17 def getModule(cls: Class[_]): Object = java.security.AccessController.doPrivileged( (() => cls.getField("MODULE$").get(null)): PrivilegedExceptionAction[Object]) override protected def computeValue(cls: Class[_]): Object = - try getModule(cls): @nowarn("cat=deprecation") + try getModule(cls) catch { case e: PrivilegedActionException => rethrowRuntime(e.getCause) diff --git a/test/files/run/t2318.scala b/test/files/run/t2318.scala index 03501b755f2e..ac30df0e902e 100644 --- a/test/files/run/t2318.scala +++ b/test/files/run/t2318.scala @@ -4,13 +4,14 @@ import java.security._ import scala.language.reflectiveCalls +import scala.annotation.nowarn -// SecurityManager is deprecated on JDK 17, so we sprinkle `@deprecated` around +// SecurityManager is deprecated on JDK 17, so we sprinkle `@nowarn` around object Test { trait Bar { def bar: Unit } - @deprecated + @nowarn("cat=deprecation") object Mgr extends SecurityManager { def allowedProperty(name: String) = name == "sun.net.inetaddr.ttl" || @@ -32,7 +33,8 @@ object Test { def doDestroy( obj : Destroyable ) : Unit = obj.destroy(); doDestroy( p ); } - @deprecated + + @nowarn("cat=deprecation") def t2() = { System.setSecurityManager(Mgr) @@ -48,6 +50,6 @@ object Test { try t1() catch { case _: java.io.IOException => () } - t2(): @annotation.nowarn("cat=deprecation") + t2() } } From 36393e6e8e59940f049dae1224afe4e88d13d956 Mon Sep 17 00:00:00 2001 From: megri Date: Wed, 20 Oct 2021 17:49:15 +0200 Subject: [PATCH 416/769] Fix documentation for filter MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit "a new iterator consisting of…" => "a new $coll consisting of…" --- src/library/scala/collection/IterableOnce.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/library/scala/collection/IterableOnce.scala b/src/library/scala/collection/IterableOnce.scala index acea80075acc..a9ab03a00117 100644 --- a/src/library/scala/collection/IterableOnce.scala +++ b/src/library/scala/collection/IterableOnce.scala @@ -325,7 +325,7 @@ trait IterableOnceOps[+A, +CC[_], +C] extends Any { this: IterableOnce[A] => /** Selects all elements of this $coll which satisfy a predicate. * * @param p the predicate used to test elements. - * @return a new iterator consisting of all elements of this $coll that satisfy the given + * @return a new $coll consisting of all elements of this $coll that satisfy the given * predicate `p`. The order of the elements is preserved. */ def filter(p: A => Boolean): C From 82a5a390e5aa70a2224c49f7eb0af57342631deb Mon Sep 17 00:00:00 2001 From: Matt Dziuban Date: Thu, 23 Sep 2021 10:41:37 -0400 Subject: [PATCH 417/769] Add -Vimplicits-max-modules option to control how types are printed in -Vimplicits errors. --- .../tools/nsc/settings/ScalaSettings.scala | 1 + .../typechecker/splain/SplainFormatting.scala | 12 ++++---- test/files/run/splain-max-modules.check | 28 +++++++++++++++++++ test/files/run/splain-max-modules.scala | 27 ++++++++++++++++++ 4 files changed, 63 insertions(+), 5 deletions(-) create mode 100644 test/files/run/splain-max-modules.check create mode 100644 test/files/run/splain-max-modules.scala diff --git a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala index 1b25f95f46c6..007332e26b58 100644 --- a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala +++ b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala @@ -503,6 +503,7 @@ trait ScalaSettings extends StandardScalaSettings with Warnings { _: MutableSett val Ytyperdebug = BooleanSetting("-Vtyper", "Trace type assignments.") withAbbreviation "-Ytyper-debug" val Vimplicits = BooleanSetting("-Vimplicits", "Print dependent missing implicits.").withAbbreviation("-Xlog-implicits") val VimplicitsVerboseTree = BooleanSetting("-Vimplicits-verbose-tree", "Display all intermediate implicits in a chain.") + val VimplicitsMaxModules = IntSetting("-Vimplicits-max-modules", "max modules to display when printing types, set to 0 to only print type names", 0, Some((0, Int.MaxValue)), _ => None) val VimplicitsMaxRefined = IntSetting("-Vimplicits-max-refined", "max chars for printing refined types, abbreviate to `F {...}`", Int.MaxValue, Some((0, Int.MaxValue)), _ => None) val VtypeDiffs = BooleanSetting("-Vtype-diffs", "Print found/required error messages as colored diffs.") val logImplicitConv = BooleanSetting("-Vimplicit-conversions", "Print a message whenever an implicit conversion is inserted.") diff --git a/src/compiler/scala/tools/nsc/typechecker/splain/SplainFormatting.scala b/src/compiler/scala/tools/nsc/typechecker/splain/SplainFormatting.scala index 4665bb0cd67f..338fd78185b2 100644 --- a/src/compiler/scala/tools/nsc/typechecker/splain/SplainFormatting.scala +++ b/src/compiler/scala/tools/nsc/typechecker/splain/SplainFormatting.scala @@ -224,10 +224,8 @@ trait SplainFormatting extends SplainFormatters { def qualifiedName(path: List[String], name: String): String = s"${pathPrefix(path)}$name" - def stripModules(path: List[String], name: String): Option[Int] => String = { - case Some(keep) => qualifiedName(path.takeRight(keep), name) - case None => name - } + def stripModules(path: List[String], name: String, keep: Int): String = + qualifiedName(path.takeRight(keep), name) case class TypeParts(sym: Symbol, tt: Type) { def modulePath: List[String] = (tt, sym) match { @@ -351,6 +349,9 @@ trait SplainFormatting extends SplainFormatters { def truncateDecls(decls: List[Formatted]): Boolean = settings.VimplicitsMaxRefined.value < decls.map(_.length).sum + def showFormattedQualified(path: List[String], name: String): TypeRepr = + FlatType(stripModules(path, name, settings.VimplicitsMaxModules.value)) + def formattedDiff(left: Formatted, right: Formatted): String = (left, right) match { case (Qualified(lpath, lname), Qualified(rpath, rname)) if lname == rname => val prefix = lpath.reverseIterator.zip(rpath.reverseIterator).takeWhile { case (l, r) => l == r }.size + 1 @@ -363,7 +364,8 @@ trait SplainFormatting extends SplainFormatters { def showFormattedLImpl(tpe: Formatted, break: Boolean): TypeRepr = tpe match { case Simple(name) => FlatType(name) - case Qualified(_, name) => FlatType(name) + case Qualified(Nil, name) => FlatType(name) + case Qualified(path, name) => showFormattedQualified(path, name) case Applied(cons, args) => showTypeApply(showFormatted(cons), args.map(showFormattedL(_, break)), break) case tpe @ Infix(_, _, _, top) => wrapParensRepr(if (break) breakInfix(flattenInfix(tpe)) else FlatType(flattenInfix(tpe).map(showFormatted).mkString(" ")), top) case UnitForm => FlatType("Unit") diff --git a/test/files/run/splain-max-modules.check b/test/files/run/splain-max-modules.check new file mode 100644 index 000000000000..61bfd7240a9c --- /dev/null +++ b/test/files/run/splain-max-modules.check @@ -0,0 +1,28 @@ +newSource1.scala:4: error: implicit error; +!I e: Type + implicitly[Type] + ^ +newSource1.scala:4: error: implicit error; +!I e: Type + implicitly[Type] + ^ +newSource1.scala:4: error: implicit error; +!I e: z.Type + implicitly[Type] + ^ +newSource1.scala:4: error: implicit error; +!I e: y.z.Type + implicitly[Type] + ^ +newSource1.scala:4: error: implicit error; +!I e: x.y.z.Type + implicitly[Type] + ^ +newSource1.scala:4: error: implicit error; +!I e: p.q.r.s.t.u.v.w.x.y.z.Type + implicitly[Type] + ^ +newSource1.scala:4: error: implicit error; +!I e: a.b.c.d.e.f.g.h.i.j.k.l.m.n.o.p.q.r.s.t.u.v.w.x.y.z.Type + implicitly[Type] + ^ diff --git a/test/files/run/splain-max-modules.scala b/test/files/run/splain-max-modules.scala new file mode 100644 index 000000000000..b135f5b312cb --- /dev/null +++ b/test/files/run/splain-max-modules.scala @@ -0,0 +1,27 @@ +import scala.tools.partest._ + +object Test extends DirectTest { + val code: String = """ +package a.b.c.d.e.f.g.h.i.j.k.l.m.n.o.p.q.r.s.t.u.v.w.x.y { + object z { + type Type + implicitly[Type] + } +} +""" + + def show(): Unit = { + def run(modules: Option[Int]): Unit = + compileString(newCompiler(( + "-Vimplicits" +: modules.toSeq.flatMap(i => Seq("-Vimplicits-max-modules", i.toString)) + ):_*))(code.trim) + + run(None) + run(Some(0)) + run(Some(1)) + run(Some(2)) + run(Some(3)) + run(Some(11)) + run(Some(Int.MaxValue)) + } +} From 96c5b42fba3da7c124997942603320c3be94e1e9 Mon Sep 17 00:00:00 2001 From: Matt Dziuban Date: Thu, 21 Oct 2021 12:25:46 -0400 Subject: [PATCH 418/769] Only truncate qualified types when full path is a shorthand type. --- .../tools/nsc/settings/ScalaSettings.scala | 1 - .../typechecker/splain/SplainFormatData.scala | 18 ++-- .../typechecker/splain/SplainFormatting.scala | 30 ++++--- test/files/neg/t6323a.check | 2 +- test/files/run/splain-max-modules.check | 28 ------ test/files/run/splain-max-modules.scala | 27 ------ test/files/run/splain-tree.check | 42 ++++----- test/files/run/splain-truncrefined.check | 2 +- test/files/run/splain.check | 87 ++++++++++++------- test/files/run/splain.scala | 20 +++++ 10 files changed, 129 insertions(+), 128 deletions(-) delete mode 100644 test/files/run/splain-max-modules.check delete mode 100644 test/files/run/splain-max-modules.scala diff --git a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala index 007332e26b58..1b25f95f46c6 100644 --- a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala +++ b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala @@ -503,7 +503,6 @@ trait ScalaSettings extends StandardScalaSettings with Warnings { _: MutableSett val Ytyperdebug = BooleanSetting("-Vtyper", "Trace type assignments.") withAbbreviation "-Ytyper-debug" val Vimplicits = BooleanSetting("-Vimplicits", "Print dependent missing implicits.").withAbbreviation("-Xlog-implicits") val VimplicitsVerboseTree = BooleanSetting("-Vimplicits-verbose-tree", "Display all intermediate implicits in a chain.") - val VimplicitsMaxModules = IntSetting("-Vimplicits-max-modules", "max modules to display when printing types, set to 0 to only print type names", 0, Some((0, Int.MaxValue)), _ => None) val VimplicitsMaxRefined = IntSetting("-Vimplicits-max-refined", "max chars for printing refined types, abbreviate to `F {...}`", Int.MaxValue, Some((0, Int.MaxValue)), _ => None) val VtypeDiffs = BooleanSetting("-Vtype-diffs", "Print found/required error messages as colored diffs.") val logImplicitConv = BooleanSetting("-Vimplicit-conversions", "Print a message whenever an implicit conversion is inserted.") diff --git a/src/compiler/scala/tools/nsc/typechecker/splain/SplainFormatData.scala b/src/compiler/scala/tools/nsc/typechecker/splain/SplainFormatData.scala index 0b473cdd57ad..4dae6b1b5e31 100644 --- a/src/compiler/scala/tools/nsc/typechecker/splain/SplainFormatData.scala +++ b/src/compiler/scala/tools/nsc/typechecker/splain/SplainFormatData.scala @@ -19,9 +19,9 @@ import scala.annotation.tailrec object Formatted { @tailrec def comparator(formatted: Formatted): String = formatted match { case Infix(left, _, _, _) => comparator(left) - case Simple(tpe) => tpe - case Qualified(Nil, tpe) => tpe - case Qualified(path, tpe) => s"${path.mkString}$tpe" + case Simple(tpe) => tpe.name + case Qualified(Nil, tpe) => tpe.name + case Qualified(path, tpe) => s"${path.mkString}${tpe.name}" case UnitForm => "()" case Applied(cons, _) => comparator(cons) case TupleForm(Nil) => "()" @@ -42,8 +42,8 @@ object Formatted { sealed trait Formatted { def length: Int = this match { case Infix(infix, left, right, top) => infix.length + left.length + right.length + 2 - case Simple(tpe) => tpe.length - case Qualified(path, tpe) => path.map(_.length).sum + path.length + tpe.length + case Simple(tpe) => tpe.name.length + case Qualified(path, tpe) => path.map(_.length).sum + path.length + tpe.name.length case UnitForm => 4 case Applied(cons, args) => args.map(_.length).sum + ( args.length - 1) * 2 + cons.length + 2 case TupleForm(elems) => elems.map(_.length).sum + (elems.length - 1) + 2 @@ -56,9 +56,13 @@ sealed trait Formatted { } } +sealed trait FormattedName { val name: String } +case class SimpleName(name: String) extends FormattedName +case class InfixName(name: String) extends FormattedName + case class Infix(infix: Formatted, left: Formatted, right: Formatted, top: Boolean) extends Formatted -case class Simple(tpe: String) extends Formatted -case class Qualified(path: List[String], tpe: String) extends Formatted +case class Simple(tpe: FormattedName) extends Formatted +case class Qualified(path: List[String], tpe: FormattedName) extends Formatted case object UnitForm extends Formatted case class Applied(cons: Formatted, args: List[Formatted]) extends Formatted case class TupleForm(elems: List[Formatted]) extends Formatted diff --git a/src/compiler/scala/tools/nsc/typechecker/splain/SplainFormatting.scala b/src/compiler/scala/tools/nsc/typechecker/splain/SplainFormatting.scala index 338fd78185b2..909a52c8de2b 100644 --- a/src/compiler/scala/tools/nsc/typechecker/splain/SplainFormatting.scala +++ b/src/compiler/scala/tools/nsc/typechecker/splain/SplainFormatting.scala @@ -76,7 +76,7 @@ trait SplainFormatters { object RefinedFormatter extends SpecialFormatter { object DeclSymbol { def unapply(sym: Symbol): Option[(Formatted, Formatted)] = - if (sym.hasRawInfo) Some((Simple(sym.simpleName.toString), formatType(sym.rawInfo, true))) + if (sym.hasRawInfo) Some((Simple(SimpleName(sym.simpleName.toString)), formatType(sym.rawInfo, true))) else None } @@ -89,7 +89,7 @@ trait SplainFormatters { def formatDecl: Symbol => Formatted = { case DeclSymbol(n, t) => Decl(n, t) - case sym => Simple(sym.toString) + case sym => Simple(SimpleName(sym.toString)) } def apply[A]( @@ -100,7 +100,7 @@ trait SplainFormatters { case _ => None } - val none: Formatted = Simple("") + val none: Formatted = Simple(SimpleName("")) def separate[A](left: List[A], right: List[A]): (List[A], List[A], List[A]) = { val leftS = Set(left: _*) @@ -222,10 +222,15 @@ trait SplainFormatting extends SplainFormatters { case a => a.mkString("", ".", ".") } - def qualifiedName(path: List[String], name: String): String = s"${pathPrefix(path)}$name" + def qualifiedName(path: List[String], name: FormattedName): String = name match { + case SimpleName(name) => s"${pathPrefix(path)}$name" + case InfixName(name) => name + } - def stripModules(path: List[String], name: String, keep: Int): String = - qualifiedName(path.takeRight(keep), name) + def stripModules(path: List[String], name: FormattedName): String = { + val qName = qualifiedName(path, name) + if (shorthands(qName)) name.name else qName + } case class TypeParts(sym: Symbol, tt: Type) { def modulePath: List[String] = (tt, sym) match { @@ -349,8 +354,8 @@ trait SplainFormatting extends SplainFormatters { def truncateDecls(decls: List[Formatted]): Boolean = settings.VimplicitsMaxRefined.value < decls.map(_.length).sum - def showFormattedQualified(path: List[String], name: String): TypeRepr = - FlatType(stripModules(path, name, settings.VimplicitsMaxModules.value)) + def showFormattedQualified(path: List[String], name: FormattedName): TypeRepr = + FlatType(stripModules(path, name)) def formattedDiff(left: Formatted, right: Formatted): String = (left, right) match { case (Qualified(lpath, lname), Qualified(rpath, rname)) if lname == rname => @@ -363,8 +368,7 @@ trait SplainFormatting extends SplainFormatters { } def showFormattedLImpl(tpe: Formatted, break: Boolean): TypeRepr = tpe match { - case Simple(name) => FlatType(name) - case Qualified(Nil, name) => FlatType(name) + case Simple(name) => FlatType(name.name) case Qualified(path, name) => showFormattedQualified(path, name) case Applied(cons, args) => showTypeApply(showFormatted(cons), args.map(showFormattedL(_, break)), break) case tpe @ Infix(_, _, _, top) => wrapParensRepr(if (break) breakInfix(flattenInfix(tpe)) else FlatType(flattenInfix(tpe).map(showFormatted).mkString(" ")), top) @@ -398,7 +402,7 @@ trait SplainFormatting extends SplainFormatters { def formatInfix[A]( path: List[String], simple: String, left: A, right: A, top: Boolean, )(rec: (A, Boolean) => Formatted): Formatted = - Infix(Qualified(path, simple), rec(left, false), rec(right, false), top) + Infix(Qualified(path, InfixName(simple)), rec(left, false), rec(right, false), top) def formatWithInfix[A](tpe: Type, args: List[A], top: Boolean)(rec: (A, Boolean) => Formatted): Formatted = { val (path, simple) = formatSimpleType(tpe) @@ -406,8 +410,8 @@ trait SplainFormatting extends SplainFormatters { formatSpecial(tpe, simple, args, formattedArgs, top)(rec).getOrElse { args match { case left :: right :: Nil if isSymbolic(tpe) => formatInfix(path, simple, left, right, top)(rec) - case _ :: _ => Applied(Qualified(path, simple), formattedArgs) - case _ => Qualified(path, simple) + case _ :: _ => Applied(Qualified(path, SimpleName(simple)), formattedArgs) + case _ => Qualified(path, SimpleName(simple)) } } } diff --git a/test/files/neg/t6323a.check b/test/files/neg/t6323a.check index d8622cd22e1c..399514cb1ce9 100644 --- a/test/files/neg/t6323a.check +++ b/test/files/neg/t6323a.check @@ -1,5 +1,5 @@ t6323a.scala:13: error: implicit error; -!I ttag: TypeTag[Test] +!I ttag: reflect.runtime.universe.TypeTag[Test] No TypeTag available for Test val value = u.typeOf[Test] diff --git a/test/files/run/splain-max-modules.check b/test/files/run/splain-max-modules.check deleted file mode 100644 index 61bfd7240a9c..000000000000 --- a/test/files/run/splain-max-modules.check +++ /dev/null @@ -1,28 +0,0 @@ -newSource1.scala:4: error: implicit error; -!I e: Type - implicitly[Type] - ^ -newSource1.scala:4: error: implicit error; -!I e: Type - implicitly[Type] - ^ -newSource1.scala:4: error: implicit error; -!I e: z.Type - implicitly[Type] - ^ -newSource1.scala:4: error: implicit error; -!I e: y.z.Type - implicitly[Type] - ^ -newSource1.scala:4: error: implicit error; -!I e: x.y.z.Type - implicitly[Type] - ^ -newSource1.scala:4: error: implicit error; -!I e: p.q.r.s.t.u.v.w.x.y.z.Type - implicitly[Type] - ^ -newSource1.scala:4: error: implicit error; -!I e: a.b.c.d.e.f.g.h.i.j.k.l.m.n.o.p.q.r.s.t.u.v.w.x.y.z.Type - implicitly[Type] - ^ diff --git a/test/files/run/splain-max-modules.scala b/test/files/run/splain-max-modules.scala deleted file mode 100644 index b135f5b312cb..000000000000 --- a/test/files/run/splain-max-modules.scala +++ /dev/null @@ -1,27 +0,0 @@ -import scala.tools.partest._ - -object Test extends DirectTest { - val code: String = """ -package a.b.c.d.e.f.g.h.i.j.k.l.m.n.o.p.q.r.s.t.u.v.w.x.y { - object z { - type Type - implicitly[Type] - } -} -""" - - def show(): Unit = { - def run(modules: Option[Int]): Unit = - compileString(newCompiler(( - "-Vimplicits" +: modules.toSeq.flatMap(i => Seq("-Vimplicits-max-modules", i.toString)) - ):_*))(code.trim) - - run(None) - run(Some(0)) - run(Some(1)) - run(Some(2)) - run(Some(3)) - run(Some(11)) - run(Some(Int.MaxValue)) - } -} diff --git a/test/files/run/splain-tree.check b/test/files/run/splain-tree.check index 08f373071066..2e3c5b2597db 100644 --- a/test/files/run/splain-tree.check +++ b/test/files/run/splain-tree.check @@ -1,47 +1,47 @@ newSource1.scala:28: error: implicit error; -!I e: I1 +!I e: tpes.I1 i1a invalid because -!I p: I2 +!I p: tpes.I2 ――i2 invalid because - !I p: I3 + !I p: tpes.I3 ――――i3a invalid because - !I p: I4 + !I p: tpes.I4 ――――――i4 invalid because - !I p: I5 + !I p: tpes.I5 ――――――――i5 invalid because - !I p: I6 + !I p: tpes.I6 ――――――――――i6a invalid because - !I p: I7 + !I p: tpes.I7 ――――――――――――i7 invalid because - !I p: I8 + !I p: tpes.I8 ――――――――――――――i8 invalid because - !I p: I9 + !I p: tpes.I9 ――――――――――i6b invalid because - !I p: I8 + !I p: tpes.I8 ――――――――――――i8 invalid because - !I p: I9 + !I p: tpes.I9 ――――i3b invalid because - !I p: I4 + !I p: tpes.I4 ――――――i4 invalid because - !I p: I5 + !I p: tpes.I5 ――――――――i5 invalid because - !I p: I6 + !I p: tpes.I6 ――――――――――i6a invalid because - !I p: I7 + !I p: tpes.I7 ――――――――――――i7 invalid because - !I p: I8 + !I p: tpes.I8 ――――――――――――――i8 invalid because - !I p: I9 + !I p: tpes.I9 i1b invalid because -!I p: I6 +!I p: tpes.I6 ――i6a invalid because - !I p: I7 + !I p: tpes.I7 ――――i7 invalid because - !I p: I8 + !I p: tpes.I8 ――――――i8 invalid because - !I p: I9 + !I p: tpes.I9 implicitly[I1] ^ diff --git a/test/files/run/splain-truncrefined.check b/test/files/run/splain-truncrefined.check index b940efbf3678..bf112963fd65 100644 --- a/test/files/run/splain-truncrefined.check +++ b/test/files/run/splain-truncrefined.check @@ -1,4 +1,4 @@ newSource1.scala:7: error: type mismatch; - D|C {...} + TruncRefined.D|TruncRefined.C {...} f(new D { type X = C; type Y = D }) ^ diff --git a/test/files/run/splain.check b/test/files/run/splain.check index 60b373684230..9dbb8db96b7c 100644 --- a/test/files/run/splain.check +++ b/test/files/run/splain.check @@ -1,22 +1,22 @@ newSource1.scala:13: error: implicit error; -!I e: II +!I e: ImplicitChain.II ImplicitChain.g invalid because -!I impPar3: I1 +!I impPar3: ImplicitChain.I1 ⋮ ――ImplicitChain.i1 invalid because - !I impPar7: I3 + !I impPar7: ImplicitChain.I3 implicitly[II] ^ newSource1.scala:6: error: type mismatch; - L|R + FoundReq.L|FoundReq.R f(new L) ^ newSource1.scala:7: error: implicit error; -!I e: F[Arg] +!I e: Bounds.F[Bounds.Arg] implicitly[F[Arg]] ^ newSource1.scala:4: error: implicit error; -!I ec: ExecutionContext +!I ec: scala.concurrent.ExecutionContext Cannot find an implicit ExecutionContext. You might add an (implicit ec: ExecutionContext) parameter to your method. @@ -33,46 +33,55 @@ newSource1.scala:4: error: implicit error; long ^ newSource1.scala:10: error: implicit error; -!I e: String +!I e: java.lang.String f invalid because !I impPar4: List[ ( - VeryLongTypeName :::: - VeryLongTypeName :::: - VeryLongTypeName :::: - VeryLongTypeName + InfixBreak.VeryLongTypeName :::: + InfixBreak.VeryLongTypeName :::: + InfixBreak.VeryLongTypeName :::: + InfixBreak.VeryLongTypeName ) :::: - (Short :::: Short) :::: + (InfixBreak.Short :::: InfixBreak.Short) :::: ( - VeryLongTypeName :::: - VeryLongTypeName :::: - VeryLongTypeName :::: - VeryLongTypeName + InfixBreak.VeryLongTypeName :::: + InfixBreak.VeryLongTypeName :::: + InfixBreak.VeryLongTypeName :::: + InfixBreak.VeryLongTypeName ) :::: - VeryLongTypeName :::: - VeryLongTypeName :::: - VeryLongTypeName :::: - VeryLongTypeName + InfixBreak.VeryLongTypeName :::: + InfixBreak.VeryLongTypeName :::: + InfixBreak.VeryLongTypeName :::: + InfixBreak.VeryLongTypeName ] (No implicit view available from Int => InfixBreak.T2.) implicitly[String] ^ newSource1.scala:11: error: implicit error; -!I e: C1[T3[T1[List[String], ?], T2[Id, C4, ?], ?]] +!I e: + DeepHole.C1[ + DeepHole.T3[ + DeepHole.T1[List[java.lang.String], ?] + , + DeepHole.T2[DeepHole.Id, DeepHole.C4, ?] + , + ? + ] + ] implicitly[C1[T3]] ^ newSource1.scala:9: error: implicit error; -!I e: F.Aux[C, D] +!I e: Aux.F.Aux[Aux.C, Aux.D] Aux.f invalid because -!I impPar10: C +!I impPar10: Aux.C implicitly[F.Aux[C, D]] ^ newSource1.scala:11: error: type mismatch; - A with B with E|C with F| {type X = Int|String; type Y = String; type Z = |String} + Refined.A with Refined.B with Refined.E|Refined.C with Refined.F| {type X = scala.Int|java.lang.String; type Y = java.lang.String; type Z = |java.lang.String} f(x) ^ newSource1.scala:25: error: type mismatch; @@ -80,11 +89,11 @@ newSource1.scala:25: error: type mismatch; f(x: C.X.Y.T) ^ newSource1.scala:6: error: type mismatch; - Int|(=> A) => B + scala.Int|(=> Foo.A) => Foo.B f(1: Int) ^ newSource1.scala:3: error: type mismatch; - String|Tuple1[String] + java.lang.String|Tuple1[java.lang.String] val a: Tuple1[String] = "Tuple1": String ^ newSource1.scala:7: error: implicit error; @@ -100,15 +109,35 @@ newSource1.scala:6: error: implicit error; implicitly[a.type *** b.type] ^ newSource1.scala:5: error: implicit error; -!I ev: Ordering[Object] +!I ev: scala.math.Ordering[java.lang.Object] No implicit Ordering[Object] found to build a SortedSet[Object]. You may want to upcast to a Set[Int] first by calling `unsorted`. Ordering.ordered invalid because -!I asComparable: Object => Comparable[_$2] +!I asComparable: java.lang.Object => java.lang.Comparable[_$2] No implicit view available from Object => Comparable[_ >: Object]. ⋮ Ordering.comparatorToOrdering invalid because -!I cmp: Comparator[Object] +!I cmp: java.util.Comparator[java.lang.Object] ms.map(_ => o) ^ +newSource1.scala:9: error: implicit error; +!I e: List[a.TypeA] + (No implicit view available from Int => a.TypeA.) + + implicitly[List[TypeA]] + ^ +newSource1.scala:10: error: implicit error; +!I e: Seq[a.b.TypeB] + (No implicit view available from Int => a.b.TypeB.) + + implicitly[Seq[TypeB]] + ^ +newSource1.scala:11: error: implicit error; +!I e: Iterable[a.b.c.TypeC] + implicitly[Traversable[TypeC]] + ^ +newSource1.scala:12: error: implicit error; +!I e: Iterator[a.b.c.d.TypeD] + implicitly[Iterator[TypeD]] + ^ diff --git a/test/files/run/splain.scala b/test/files/run/splain.scala index 5c851b76ba9a..57f3b4ef569e 100644 --- a/test/files/run/splain.scala +++ b/test/files/run/splain.scala @@ -200,6 +200,25 @@ object SingleImp } """ + def shorthandTypes: String = """ +object a { + type TypeA + object b { + type TypeB + object c { + type TypeC + object d { + type TypeD + implicitly[List[TypeA]] + implicitly[Seq[TypeB]] + implicitly[Traversable[TypeC]] + implicitly[Iterator[TypeD]] + } + } + } +} +""" + def show(): Unit = { val global = newCompiler() @@ -221,5 +240,6 @@ object SingleImp run(singleTypeInFunction) run(singleTypeWithFreeSymbol) run(parameterAnnotation) + run(shorthandTypes) } } From e5a2e0dd3c1897c0b20a9d46a8efacdad039a204 Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Fri, 22 Oct 2021 15:54:52 +0200 Subject: [PATCH 419/769] update TASTy to 28.1-0, patch tests --- project/DottySupport.scala | 2 +- src/compiler/scala/tools/tasty/TastyFormat.scala | 2 +- .../dotty/tools/vulpix/ParallelTesting.scala | 11 ----------- src/tastytest/scala/tools/tastytest/Dotc.scala | 2 +- .../scala/tools/tastytest/package.scala | 5 ----- .../neg-move-macros/src-3/MacroCompat.scala | 7 ++++++- test/tasty/neg/src-3/ErasedTypes.scala | 7 +++++++ .../pos/src-3/tastytest/AnythingIsPossible.scala | 3 ++- .../run/src-3/tastytest/AnythingIsPossible.scala | 3 ++- .../tasty/run/src-3/tastytest/InlineCompat.scala | 5 +++++ .../run/src-3/tastytest/InlineCompat2.scala | 5 +++++ test/tasty/run/src-3/tastytest/MacroCompat.scala | 16 +++++++++++++++- 12 files changed, 45 insertions(+), 23 deletions(-) delete mode 100644 src/tastytest/dotty/tools/vulpix/ParallelTesting.scala diff --git a/project/DottySupport.scala b/project/DottySupport.scala index 05f805f739cd..00535f0fa4dd 100644 --- a/project/DottySupport.scala +++ b/project/DottySupport.scala @@ -12,7 +12,7 @@ import sbt.librarymanagement.{ * Settings to support validation of TastyUnpickler against the release of dotty with the matching TASTy version */ object TastySupport { - val supportedTASTyRelease = "3.0.0" // TASTy version 28.0-0 + val supportedTASTyRelease = "3.1.0" // TASTy version 28.1-0 val scala3Compiler = "org.scala-lang" % "scala3-compiler_3" % supportedTASTyRelease val scala3Library = "org.scala-lang" % "scala3-library_3" % supportedTASTyRelease diff --git a/src/compiler/scala/tools/tasty/TastyFormat.scala b/src/compiler/scala/tools/tasty/TastyFormat.scala index 858579cf8ac2..d62fdfef6434 100644 --- a/src/compiler/scala/tools/tasty/TastyFormat.scala +++ b/src/compiler/scala/tools/tasty/TastyFormat.scala @@ -35,7 +35,7 @@ object TastyFormat { * compatibility, but remains backwards compatible, with all * preceding `MinorVersion`. */ - final val MinorVersion: Int = 0 + final val MinorVersion: Int = 1 /**Natural Number. The `ExperimentalVersion` allows for * experimentation with changes to TASTy without committing diff --git a/src/tastytest/dotty/tools/vulpix/ParallelTesting.scala b/src/tastytest/dotty/tools/vulpix/ParallelTesting.scala deleted file mode 100644 index fc1245e47de7..000000000000 --- a/src/tastytest/dotty/tools/vulpix/ParallelTesting.scala +++ /dev/null @@ -1,11 +0,0 @@ -package dotty.tools.vulpix - -/** As of Scala 3.0.0-RC2, dotty compiler will enable the - * usage of experimental features if the compiler is invoked - * within a method on the class `dotty.tools.vulpix.ParallelTesting` - * - * We use this to test experimental features on non-nightly releases. - */ -class ParallelTesting { - def unlockExperimentalFeatures[T](op: => T): T = op -} diff --git a/src/tastytest/scala/tools/tastytest/Dotc.scala b/src/tastytest/scala/tools/tastytest/Dotc.scala index e36399c5e08a..3814c9a8150b 100644 --- a/src/tastytest/scala/tools/tastytest/Dotc.scala +++ b/src/tastytest/scala/tools/tastytest/Dotc.scala @@ -41,7 +41,7 @@ object Dotc extends Script.Command { val Main_process = mainClass.getMethod("process", classOf[Array[String]]) val Reporter_hasErrors = reporterClass.getMethod("hasErrors") Try { - val reporter = unlockExperimentalFeatures(invokeStatic(Main_process, Seq(args.toArray))) + val reporter = invokeStatic(Main_process, Seq(args.toArray)) val hasErrors = invoke(Reporter_hasErrors, reporter, Seq.empty).asInstanceOf[Boolean] !hasErrors } diff --git a/src/tastytest/scala/tools/tastytest/package.scala b/src/tastytest/scala/tools/tastytest/package.scala index 1d5d745cd066..95167f2e030e 100644 --- a/src/tastytest/scala/tools/tastytest/package.scala +++ b/src/tastytest/scala/tools/tastytest/package.scala @@ -1,16 +1,11 @@ package scala.tools -import dotty.tools.vulpix.ParallelTesting - package object tastytest { import scala.util.Try import Files.{pathSep, classpathSep} - def unlockExperimentalFeatures[T](op: => T): T = - new ParallelTesting().unlockExperimentalFeatures(op) - def printerrln(str: String): Unit = System.err.println(red(str)) def printwarnln(str: String): Unit = System.err.println(yellow(str)) def printsuccessln(str: String): Unit = System.err.println(green(str)) diff --git a/test/tasty/neg-move-macros/src-3/MacroCompat.scala b/test/tasty/neg-move-macros/src-3/MacroCompat.scala index d3c3374b17c3..8a0021a42660 100644 --- a/test/tasty/neg-move-macros/src-3/MacroCompat.scala +++ b/test/tasty/neg-move-macros/src-3/MacroCompat.scala @@ -2,9 +2,14 @@ package tastytest import scala.language.experimental.macros +import scala.annotation.experimental + object MacroCompat { + @experimental implicit def pos: Position = macro Macros.posImpl // implemented in test/tasty/run/pre/tastytest/package.scala + + @experimental implicit inline def pos: Position = ${ Macros3.posImpl } def testCase(test: => Any)(using Position): String = @@ -16,7 +21,7 @@ object MacroCompat { def posImpl(using quotes: Quotes): Expr[Position] = { import quotes.reflect.given val pos = quotes.reflect.Position.ofMacroExpansion - val name = pos.sourceFile.jpath.getFileName.toString + val name = pos.sourceFile.getJPath.map(_.getFileName.toString).getOrElse("?.scala") val line = pos.startLine + 1 '{ Position(${Expr(name)}, ${Expr(line)}) } } diff --git a/test/tasty/neg/src-3/ErasedTypes.scala b/test/tasty/neg/src-3/ErasedTypes.scala index a535369ebbdb..432dcc306093 100644 --- a/test/tasty/neg/src-3/ErasedTypes.scala +++ b/test/tasty/neg/src-3/ErasedTypes.scala @@ -2,17 +2,24 @@ package tastytest import language.experimental.erasedDefinitions +import scala.annotation.experimental + object ErasedTypes { + @experimental trait Foo { def foo1(erased x: String): Int def foo2(using erased x: String): Int } + @experimental class Bar[F <: Foo { def foo1(erased x: String): 0 }] + + @experimental class Baz[F <: Foo { def foo2(using erased x: String): 0 }] object ErasedCompileTimeOps { + @experimental erased def theNothing: Nothing = ??? } diff --git a/test/tasty/pos/src-3/tastytest/AnythingIsPossible.scala b/test/tasty/pos/src-3/tastytest/AnythingIsPossible.scala index 31fa2f8da1e2..148b2d9caa21 100644 --- a/test/tasty/pos/src-3/tastytest/AnythingIsPossible.scala +++ b/test/tasty/pos/src-3/tastytest/AnythingIsPossible.scala @@ -2,7 +2,8 @@ package tastytest object AnythingIsPossible { - class Box[A](val a: A) + class Box[A](accept: => A): + val a: A = accept class Class extends Box({ class X { final val x = Map(("", 3)) } ; val foo = new X(); foo.x: foo.x.type }) diff --git a/test/tasty/run/src-3/tastytest/AnythingIsPossible.scala b/test/tasty/run/src-3/tastytest/AnythingIsPossible.scala index 31fa2f8da1e2..148b2d9caa21 100644 --- a/test/tasty/run/src-3/tastytest/AnythingIsPossible.scala +++ b/test/tasty/run/src-3/tastytest/AnythingIsPossible.scala @@ -2,7 +2,8 @@ package tastytest object AnythingIsPossible { - class Box[A](val a: A) + class Box[A](accept: => A): + val a: A = accept class Class extends Box({ class X { final val x = Map(("", 3)) } ; val foo = new X(); foo.x: foo.x.type }) diff --git a/test/tasty/run/src-3/tastytest/InlineCompat.scala b/test/tasty/run/src-3/tastytest/InlineCompat.scala index 286a30dd0f46..80600b8ed85f 100644 --- a/test/tasty/run/src-3/tastytest/InlineCompat.scala +++ b/test/tasty/run/src-3/tastytest/InlineCompat.scala @@ -4,9 +4,14 @@ import scala.language.experimental.macros import scala.reflect.macros.blackbox.Context +import scala.annotation.experimental + object InlineCompat { + @experimental def foo(code: String): String = macro InlineCompatScala2Macro.foo + + @experimental inline def foo(inline code: String): String = code // inline method, not macro } diff --git a/test/tasty/run/src-3/tastytest/InlineCompat2.scala b/test/tasty/run/src-3/tastytest/InlineCompat2.scala index c6fcbd6090fa..15e67f43e3b8 100644 --- a/test/tasty/run/src-3/tastytest/InlineCompat2.scala +++ b/test/tasty/run/src-3/tastytest/InlineCompat2.scala @@ -4,9 +4,14 @@ import scala.language.experimental.macros import scala.reflect.macros.blackbox.Context +import scala.annotation.experimental + object InlineCompat2 { + @experimental def foo(code: String): String = macro InnerScala2MacroImpl.fooImpl + + @experimental inline def foo(inline code: String): String = code // inline method, not macro object InnerScala2MacroImpl { diff --git a/test/tasty/run/src-3/tastytest/MacroCompat.scala b/test/tasty/run/src-3/tastytest/MacroCompat.scala index 71ba8c03f063..8b2ca3e5fcb3 100644 --- a/test/tasty/run/src-3/tastytest/MacroCompat.scala +++ b/test/tasty/run/src-3/tastytest/MacroCompat.scala @@ -2,19 +2,33 @@ package tastytest import scala.language.experimental.macros +import scala.annotation.experimental + object MacroCompat { + @experimental implicit def pos: Position = macro Position.posImpl + + @experimental implicit inline def pos: Position = ${ Macros3.posImpl } + @experimental def constInt[T](x: T): Int = macro Macros.constIntImpl[T] + + @experimental inline def constInt[T](x: T): Int = ${ Macros3.constIntImpl[T]('x) } object Bundles { + @experimental def mono: Int = macro MacroImpl.mono + + @experimental inline def mono: Int = ${ Macros3.monoImpl } + @experimental def poly[T]: String = macro MacroImpl.poly[T] + + @experimental inline def poly[T]: String = ${ Macros3.polyImpl[T] } } @@ -30,7 +44,7 @@ object MacroCompat { def posImpl(using quotes: Quotes): Expr[Position] = { import quotes.reflect.given val pos = quotes.reflect.Position.ofMacroExpansion - val name = pos.sourceFile.jpath.getFileName.toString + val name = pos.sourceFile.getJPath.map(_.getFileName.toString).getOrElse("?.scala") val line = pos.startLine + 1 '{ Position(${Expr(name)}, ${Expr(line)}) } } From d6816dbd8e773c10239bbad2daa9fa96c6e39f73 Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Fri, 22 Oct 2021 17:34:57 +0200 Subject: [PATCH 420/769] add test for given class; add printTasty script --- .../scala/tools/nsc/tasty/TreeUnpickler.scala | 13 +++--- .../scala/tools/tastytest/Dotc.scala | 45 +++++++++++++------ .../tools/tastytest/DotcDecompiler.scala | 12 ++--- .../scala/tools/tastytest/PrintTasty.scala | 24 ++++++++++ .../pos/src-2/tastytest/TestTCGivens.scala | 8 ++++ .../tasty/pos/src-3/tastytest/givens/TC.scala | 12 +++++ .../test/scala/tools/tastytest/Scripted.scala | 2 +- 7 files changed, 88 insertions(+), 28 deletions(-) create mode 100644 src/tastytest/scala/tools/tastytest/PrintTasty.scala create mode 100644 test/tasty/pos/src-2/tastytest/TestTCGivens.scala create mode 100644 test/tasty/pos/src-3/tastytest/givens/TC.scala diff --git a/src/compiler/scala/tools/nsc/tasty/TreeUnpickler.scala b/src/compiler/scala/tools/nsc/tasty/TreeUnpickler.scala index 89d42b9f48ba..c080d3dd769a 100644 --- a/src/compiler/scala/tools/nsc/tasty/TreeUnpickler.scala +++ b/src/compiler/scala/tools/nsc/tasty/TreeUnpickler.scala @@ -461,6 +461,8 @@ class TreeUnpickler[Tasty <: TastyUniverse]( private def addInferredFlags(tag: Int, tastyFlags: TastyFlagSet, name: TastyName, isAbsType: Boolean, isClass: Boolean, rhsIsEmpty: Boolean)(implicit ctx: Context): TastyFlagSet = { var flags = tastyFlags + if (flags.is(Given)) + flags |= Implicit val lacksDefinition = rhsIsEmpty && name.isTermName && !name.isConstructorName && !flags.isOneOf(FlagSets.TermParamOrAccessor) || @@ -668,7 +670,7 @@ class TreeUnpickler[Tasty <: TastyUniverse]( case HASDEFAULT => addFlag(HasDefault) case STABLE => addFlag(Stable) case EXTENSION => addFlag(Extension) - case GIVEN => addFlag(Implicit) + case GIVEN => addFlag(Given) case PARAMsetter => addFlag(ParamSetter) case PARAMalias => addFlag(ParamAlias) case EXPORTED => addFlag(Exported) @@ -803,7 +805,8 @@ class TreeUnpickler[Tasty <: TastyUniverse]( def DefDef(repr: TastyRepr, localCtx: Context)(implicit ctx: Context): Unit = { val isMacro = repr.tflags.is(Erased | Macro) - checkUnsupportedFlags(repr.unsupportedFlags &~ (Extension | Exported | Infix | optFlag(isMacro)(Erased))) + val supportedFlags = Extension | Exported | Infix | Given | optFlag(isMacro)(Erased) + checkUnsupportedFlags(repr.unsupportedFlags &~ supportedFlags) val isCtor = sym.isConstructor val paramDefss = readParamss()(localCtx).map(_.map(symFromNoCycle)) val typeParams = { @@ -842,7 +845,7 @@ class TreeUnpickler[Tasty <: TastyUniverse]( def ValDef(repr: TastyRepr, localCtx: Context)(implicit ctx: Context): Unit = { // valdef in TASTy is either a singleton object or a method forwarder to a local value. - checkUnsupportedFlags(repr.unsupportedFlags &~ (Enum | Extension | Exported)) + checkUnsupportedFlags(repr.unsupportedFlags &~ (Enum | Extension | Exported | Given)) val tpe = readTpt()(localCtx).tpe ctx.setInfo(sym, if (repr.tflags.is(FlagSets.SingletonEnum)) { @@ -856,7 +859,7 @@ class TreeUnpickler[Tasty <: TastyUniverse]( } def TypeDef(repr: TastyRepr, localCtx: Context)(implicit ctx: Context): Unit = { - val allowedShared = Enum | Opaque | Infix + val allowedShared = Enum | Opaque | Infix | Given val allowedTypeFlags = allowedShared | Exported val allowedClassFlags = allowedShared | Open | Transparent if (sym.isClass) { @@ -881,7 +884,7 @@ class TreeUnpickler[Tasty <: TastyUniverse]( } def TermParam(repr: TastyRepr, localCtx: Context)(implicit ctx: Context): Unit = { - checkUnsupportedFlags(repr.unsupportedFlags &~ (ParamAlias | Exported)) + checkUnsupportedFlags(repr.unsupportedFlags &~ (ParamAlias | Exported | Given)) val tpt = readTpt()(localCtx) ctx.setInfo(sym, if (nothingButMods(end) && sym.not(ParamSetter)) tpt.tpe diff --git a/src/tastytest/scala/tools/tastytest/Dotc.scala b/src/tastytest/scala/tools/tastytest/Dotc.scala index 3814c9a8150b..8be7725c0810 100644 --- a/src/tastytest/scala/tools/tastytest/Dotc.scala +++ b/src/tastytest/scala/tools/tastytest/Dotc.scala @@ -16,6 +16,15 @@ object Dotc extends Script.Command { def initClassloader(): Try[Dotc.ClassLoader] = Try(Dotc.ClassLoader(ScalaClassLoader.fromURLs(Classpaths.dottyCompiler.asURLs))) + def processIn(op: Dotc.ClassLoader => Int): Int = { + Dotc.initClassloader() match { + case Success(cl) => op(cl) + case Failure(err) => + println(red(s"could not initialise Scala 3 classpath: $err")) + 1 + } + } + def loadClass(name: String)(implicit cl: Dotc.ClassLoader) = Class.forName(name, true, cl.parent) @@ -24,6 +33,18 @@ object Dotc extends Script.Command { invoke(method, null, args) } + def invokeStatic( + className: String, + methodName: String, + args: Seq[String] + )(implicit cl: Dotc.ClassLoader): Try[Object] = { + val cls = loadClass(className) + val method = cls.getMethod(methodName, classOf[Array[String]]) + Try { + invokeStatic(method, Seq(args.toArray)) + } + } + def invoke(method: Method, obj: AnyRef, args: Seq[Any])(implicit cl: Dotc.ClassLoader) = { try cl.parent.asContext[AnyRef] { method.invoke(obj, args.toArray:_*) @@ -35,18 +56,18 @@ object Dotc extends Script.Command { private def dotcProcess(args: Seq[String])(implicit cl: Dotc.ClassLoader) = processMethod("dotty.tools.dotc.Main")(args) - def processMethod(mainClassName: String)(args: Seq[String])(implicit cl: Dotc.ClassLoader): Try[Boolean] = { - val mainClass = loadClass(mainClassName) - val reporterClass = loadClass("dotty.tools.dotc.reporting.Reporter") - val Main_process = mainClass.getMethod("process", classOf[Array[String]]) - val Reporter_hasErrors = reporterClass.getMethod("hasErrors") - Try { - val reporter = invokeStatic(Main_process, Seq(args.toArray)) + def processMethod(className: String)(args: Seq[String])(implicit cl: Dotc.ClassLoader): Try[Boolean] = { + val reporterCls = loadClass("dotty.tools.dotc.reporting.Reporter") + val Reporter_hasErrors = reporterCls.getMethod("hasErrors") + for (reporter <- invokeStatic(className, "process", args)) yield { val hasErrors = invoke(Reporter_hasErrors, reporter, Seq.empty).asInstanceOf[Boolean] !hasErrors } } + def mainMethod(className: String)(args: Seq[String])(implicit cl: Dotc.ClassLoader): Try[Unit] = + for (_ <- invokeStatic(className, "main", args)) yield () + def dotcVersion(implicit cl: Dotc.ClassLoader): String = { val compilerPropertiesClass = loadClass("dotty.tools.dotc.config.Properties") val Properties_simpleVersionString = compilerPropertiesClass.getMethod("simpleVersionString") @@ -81,14 +102,10 @@ object Dotc extends Script.Command { return 1 } val Seq(out, src, additional @ _*) = args: @unchecked - implicit val scala3classloader: Dotc.ClassLoader = initClassloader() match { - case Success(cl) => cl - case Failure(err) => - println(red(s"could not initialise Scala 3 classpath: $err")) - return 1 + Dotc.processIn { implicit scala3classloader => + val success = dotc(out, out, additional, src).get + if (success) 0 else 1 } - val success = dotc(out, out, additional, src).get - if (success) 0 else 1 } } diff --git a/src/tastytest/scala/tools/tastytest/DotcDecompiler.scala b/src/tastytest/scala/tools/tastytest/DotcDecompiler.scala index c10582a42bd5..41f842b43f33 100644 --- a/src/tastytest/scala/tools/tastytest/DotcDecompiler.scala +++ b/src/tastytest/scala/tools/tastytest/DotcDecompiler.scala @@ -1,6 +1,6 @@ package scala.tools.tastytest -import scala.util.{Try, Success, Failure} +import scala.util.Try object DotcDecompiler extends Script.Command { @@ -19,14 +19,10 @@ object DotcDecompiler extends Script.Command { return 1 } val Seq(tasty, additionalSettings @ _*) = args: @unchecked - implicit val scala3classloader: Dotc.ClassLoader = Dotc.initClassloader() match { - case Success(cl) => cl - case Failure(err) => - println(red(s"could not initialise Scala 3 classpath: $err")) - return 1 + Dotc.processIn { implicit scala3classloader => + val success = decompile(tasty, additionalSettings).get + if (success) 0 else 1 } - val success = decompile(tasty, additionalSettings).get - if (success) 0 else 1 } } diff --git a/src/tastytest/scala/tools/tastytest/PrintTasty.scala b/src/tastytest/scala/tools/tastytest/PrintTasty.scala new file mode 100644 index 000000000000..f9fcf655b50a --- /dev/null +++ b/src/tastytest/scala/tools/tastytest/PrintTasty.scala @@ -0,0 +1,24 @@ +package scala.tools.tastytest + +import scala.util.Try + +object PrintTasty extends Script.Command { + + def printTasty(tasty: String)(implicit cl: Dotc.ClassLoader): Try[Unit] = + Dotc.mainMethod("dotty.tools.dotc.core.tasty.TastyPrinter")(Seq(tasty)) + + val commandName: String = "printTasty" + val describe: String = s"$commandName " + + def process(args: String*): Int = { + if (args.length != 1) { + println(red(s"please provide 1 argument in sub-command: $describe")) + return 1 + } + Dotc.processIn { implicit scala3classloader => + val success = printTasty(tasty = args.head).isSuccess + if (success) 0 else 1 + } + } + +} diff --git a/test/tasty/pos/src-2/tastytest/TestTCGivens.scala b/test/tasty/pos/src-2/tastytest/TestTCGivens.scala new file mode 100644 index 000000000000..4569dc47fc4e --- /dev/null +++ b/test/tasty/pos/src-2/tastytest/TestTCGivens.scala @@ -0,0 +1,8 @@ +package tastytest + +import givens._ + +object TestTCGivens { + def exported = TCModule.TC.mkTCFromInt[1] + def original: TCInstances.TC.mkTCFromInt[1] = TCInstances.TC.mkTCFromInt[1] +} diff --git a/test/tasty/pos/src-3/tastytest/givens/TC.scala b/test/tasty/pos/src-3/tastytest/givens/TC.scala new file mode 100644 index 000000000000..279f34136045 --- /dev/null +++ b/test/tasty/pos/src-3/tastytest/givens/TC.scala @@ -0,0 +1,12 @@ +package tastytest.givens + +object TCModule: + trait TC[V] + object TC: + export TCInstances.TC.given + +object TCInstances: + object TC: + import TCModule.TC + given mkTCFromInt[V <: Int]: TC[V] with + type Out = Int diff --git a/test/tasty/test/scala/tools/tastytest/Scripted.scala b/test/tasty/test/scala/tools/tastytest/Scripted.scala index ae102fa68e7b..04433e0c00da 100644 --- a/test/tasty/test/scala/tools/tastytest/Scripted.scala +++ b/test/tasty/test/scala/tools/tastytest/Scripted.scala @@ -2,7 +2,7 @@ package scala.tools.tastytest object Scripted extends Script { - val subcommands = List(Dotc, DotcDecompiler, Scalac, Runner, Javac) + val subcommands = List(Dotc, DotcDecompiler, PrintTasty, Scalac, Runner, Javac) val commandName = "Scripted" } From ed3ce1aa457ff4c71f989d35dca1ef73ebe993ef Mon Sep 17 00:00:00 2001 From: Seth Tisue Date: Fri, 22 Oct 2021 14:45:40 -0700 Subject: [PATCH 421/769] junit-interface 0.13.2 (was 0.11) I hadn't been aware that newer versions of this were available under a different org. I don't see anything risky-looking in the notes at https://github.com/sbt/junit-interface/releases --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index 37445b02f6c5..71c745ce596a 100644 --- a/build.sbt +++ b/build.sbt @@ -36,7 +36,7 @@ import scala.build._, VersionUtil._ // Non-Scala dependencies: val junitDep = "junit" % "junit" % "4.13.2" -val junitInterfaceDep = "com.novocode" % "junit-interface" % "0.11" % Test +val junitInterfaceDep = "com.github.sbt" % "junit-interface" % "0.13.2" % Test val scalacheckDep = "org.scalacheck" %% "scalacheck" % "1.15.4" % Test val jolDep = "org.openjdk.jol" % "jol-core" % "0.13" val asmDep = "org.scala-lang.modules" % "scala-asm" % versionProps("scala-asm.version") From 35dbc1efdeb1359dfb9763c09cf519c610725002 Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Mon, 25 Oct 2021 14:23:12 +0200 Subject: [PATCH 422/769] test inner class singleton enum --- .../scala/tools/nsc/tasty/TreeUnpickler.scala | 2 +- .../run/src-2/tastytest/TestNestedEnum.scala | 17 +++++++++++++++++ test/tasty/run/src-3/tastytest/NestedEnum.scala | 6 ++++++ 3 files changed, 24 insertions(+), 1 deletion(-) create mode 100644 test/tasty/run/src-2/tastytest/TestNestedEnum.scala create mode 100644 test/tasty/run/src-3/tastytest/NestedEnum.scala diff --git a/src/compiler/scala/tools/nsc/tasty/TreeUnpickler.scala b/src/compiler/scala/tools/nsc/tasty/TreeUnpickler.scala index c080d3dd769a..498402926185 100644 --- a/src/compiler/scala/tools/nsc/tasty/TreeUnpickler.scala +++ b/src/compiler/scala/tools/nsc/tasty/TreeUnpickler.scala @@ -476,7 +476,7 @@ class TreeUnpickler[Tasty <: TastyUniverse]( flags |= FieldAccessor if (flags.not(Mutable)) flags |= Stable - if (flags.is(Case | Static | Enum)) // singleton enum case + if (flags.is(Case | Enum)) // singleton enum case flags |= Object | Stable // encode as a module (this needs to be corrected in bytecode) } if (ctx.owner.isClass) { diff --git a/test/tasty/run/src-2/tastytest/TestNestedEnum.scala b/test/tasty/run/src-2/tastytest/TestNestedEnum.scala new file mode 100644 index 000000000000..fc9ab156eb40 --- /dev/null +++ b/test/tasty/run/src-2/tastytest/TestNestedEnum.scala @@ -0,0 +1,17 @@ +package tastytest + +object TestNestedEnum extends Suite("TestNestedEnum") { + + test("call toString on enum of inner class") { + val n = new NestedEnum() + assert(n.Mode.On.toString == "On") + } + + test("nested enums do not have same type") { + val n1 = new NestedEnum() + val n2 = new NestedEnum() + implicitly[scala.util.NotGiven[n1.Mode.Off.type =:= n2.Mode.Off.type]] + assert(n1.Mode.Off != n2.Mode.Off) + } + +} diff --git a/test/tasty/run/src-3/tastytest/NestedEnum.scala b/test/tasty/run/src-3/tastytest/NestedEnum.scala new file mode 100644 index 000000000000..5a89c90fb64e --- /dev/null +++ b/test/tasty/run/src-3/tastytest/NestedEnum.scala @@ -0,0 +1,6 @@ +package tastytest + +class NestedEnum: + + enum Mode: + case On, Off From 92f1948cb746b1afbf71c90bde0f20e577b8a9fd Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Mon, 25 Oct 2021 15:07:56 +0200 Subject: [PATCH 423/769] add existential flag to wildcards --- src/compiler/scala/tools/nsc/tasty/bridge/ContextOps.scala | 2 +- src/compiler/scala/tools/nsc/tasty/bridge/FlagOps.scala | 6 +++++- src/compiler/scala/tools/nsc/transform/Erasure.scala | 1 - 3 files changed, 6 insertions(+), 3 deletions(-) diff --git a/src/compiler/scala/tools/nsc/tasty/bridge/ContextOps.scala b/src/compiler/scala/tools/nsc/tasty/bridge/ContextOps.scala index ce414b67a0f0..42c6e9cc4edd 100644 --- a/src/compiler/scala/tools/nsc/tasty/bridge/ContextOps.scala +++ b/src/compiler/scala/tools/nsc/tasty/bridge/ContextOps.scala @@ -269,7 +269,7 @@ trait ContextOps { self: TastyUniverse => owner.newTypeParameter( name = u.freshTypeName("_$")(u.currentFreshNameCreator), pos = u.NoPosition, - newFlags = FlagSets.Creation.Default + newFlags = FlagSets.Creation.Wildcard ).setInfo(info) final def newConstructor(owner: Symbol, info: Type): Symbol = unsafeNewSymbol( diff --git a/src/compiler/scala/tools/nsc/tasty/bridge/FlagOps.scala b/src/compiler/scala/tools/nsc/tasty/bridge/FlagOps.scala index b7894f726465..28fc84e1657e 100644 --- a/src/compiler/scala/tools/nsc/tasty/bridge/FlagOps.scala +++ b/src/compiler/scala/tools/nsc/tasty/bridge/FlagOps.scala @@ -42,6 +42,7 @@ trait FlagOps { self: TastyUniverse => object Creation { val ObjectDef: TastyFlagSet = Object | Lazy | Final | Stable val ObjectClassDef: TastyFlagSet = Object | Final + val Wildcard: u.FlagSet = newSymbolFlagSetFromEncoded(Flags.EXISTENTIAL) val Default: u.FlagSet = newSymbolFlagSet(EmptyTastyFlags) } def withAccess(flags: TastyFlagSet, inheritedAccess: TastyFlagSet): TastyFlagSet = @@ -56,7 +57,10 @@ trait FlagOps { self: TastyUniverse => /** For purpose of symbol initialisation, encode a `TastyFlagSet` as a `symbolTable.FlagSet`. */ private[bridge] def newSymbolFlagSet(tflags: TastyFlagSet): u.FlagSet = - unsafeEncodeTastyFlagSet(tflags) | ModifierFlags.SCALA3X + newSymbolFlagSetFromEncoded(unsafeEncodeTastyFlagSet(tflags)) + + private[bridge] def newSymbolFlagSetFromEncoded(flags: u.FlagSet): u.FlagSet = + flags | ModifierFlags.SCALA3X implicit final class SymbolFlagOps(val sym: Symbol) { def reset(tflags: TastyFlagSet)(implicit ctx: Context): sym.type = diff --git a/src/compiler/scala/tools/nsc/transform/Erasure.scala b/src/compiler/scala/tools/nsc/transform/Erasure.scala index 751134fd6b07..9109bff9945f 100644 --- a/src/compiler/scala/tools/nsc/transform/Erasure.scala +++ b/src/compiler/scala/tools/nsc/transform/Erasure.scala @@ -1297,7 +1297,6 @@ abstract class Erasure extends InfoTransform if (ct.tag == ClazzTag && ct.typeValue.typeSymbol != definitions.UnitClass) { val typeValue = ct.typeValue.dealiasWiden val erased = erasure(typeValue.typeSymbol) applyInArray typeValue - treeCopy.Literal(cleanLiteral, Constant(erased)) } else cleanLiteral From 783323246f58c9e0fc4a587eeda9414216caf7c5 Mon Sep 17 00:00:00 2001 From: Jamie Thompson Date: Mon, 25 Oct 2021 16:00:17 +0200 Subject: [PATCH 424/769] remove force of annotation constructor --- .../scala/tools/nsc/tasty/ForceKinds.scala | 3 --- .../scala/tools/nsc/tasty/TastyModes.scala | 5 +++++ .../scala/tools/nsc/tasty/TreeUnpickler.scala | 9 +++++---- .../scala/tools/nsc/tasty/bridge/ContextOps.scala | 3 +++ .../scala/tools/nsc/tasty/bridge/TreeOps.scala | 15 ++++----------- test/tasty/pos/src-3/tastytest/Annotated.scala | 6 ++++++ .../pos/src-3/tastytest/overloadedAnnot.scala | 6 ++++++ 7 files changed, 29 insertions(+), 18 deletions(-) create mode 100644 test/tasty/pos/src-3/tastytest/overloadedAnnot.scala diff --git a/src/compiler/scala/tools/nsc/tasty/ForceKinds.scala b/src/compiler/scala/tools/nsc/tasty/ForceKinds.scala index a0577f9eb5f9..137bbfe854bc 100644 --- a/src/compiler/scala/tools/nsc/tasty/ForceKinds.scala +++ b/src/compiler/scala/tools/nsc/tasty/ForceKinds.scala @@ -18,8 +18,6 @@ import ForceKinds._ object ForceKinds { - /** When forcing the constructor of an annotation */ - final val AnnotCtor: ForceKinds.Single = of(1 << 0) /** When forcing the companion of a module */ final val DeepForce: ForceKinds.Single = of(1 << 1) /** When forcing the owner of a symbol */ @@ -51,7 +49,6 @@ class ForceKinds(val toInt: Int) extends AnyVal { def describe: List[String] = { var xs = List.empty[String] - if (is(AnnotCtor)) xs ::= "reading annotation constructor" if (is(DeepForce)) xs ::= "deep" if (is(CompleteOwner)) xs ::= "class owner is required" if (is(OverloadedSym)) xs ::= "overload resolution" diff --git a/src/compiler/scala/tools/nsc/tasty/TastyModes.scala b/src/compiler/scala/tools/nsc/tasty/TastyModes.scala index d826e367db7d..a8e5e8454599 100644 --- a/src/compiler/scala/tools/nsc/tasty/TastyModes.scala +++ b/src/compiler/scala/tools/nsc/tasty/TastyModes.scala @@ -34,10 +34,14 @@ object TastyModes { final val InnerScope: TastyMode = TastyMode(1 << 5) /** When reading the tree of an Opaque type */ final val OpaqueTypeDef: TastyMode = TastyMode(1 << 6) + /** When reading trees of an annotation */ + final val ReadAnnotationCtor: TastyMode = TastyMode(1 << 7) /** The union of `IndexStats` and `InnerScope` */ final val IndexScopedStats: TastyMode = IndexStats | InnerScope + final val ReadAnnotTopLevel: TastyMode = ReadAnnotation | ReadAnnotationCtor + case class TastyMode(val toInt: Int) extends AnyVal { mode => def |(other: TastyMode): TastyMode = TastyMode(toInt | other.toInt) @@ -58,6 +62,7 @@ object TastyModes { if (mode.is(ReadMacro)) sb += "ReadMacro" if (mode.is(InnerScope)) sb += "InnerScope" if (mode.is(OpaqueTypeDef)) sb += "OpaqueTypeDef" + if (mode.is(ReadAnnotationCtor)) sb += "ReadAnnotationCtor" sb.mkString(" | ") } } diff --git a/src/compiler/scala/tools/nsc/tasty/TreeUnpickler.scala b/src/compiler/scala/tools/nsc/tasty/TreeUnpickler.scala index 498402926185..4f38b9dd6d86 100644 --- a/src/compiler/scala/tools/nsc/tasty/TreeUnpickler.scala +++ b/src/compiler/scala/tools/nsc/tasty/TreeUnpickler.scala @@ -396,7 +396,7 @@ class TreeUnpickler[Tasty <: TastyUniverse]( val lo = readType() if (nothingButMods(end)) readVariances(lo) else defn.TypeBounds(lo, readVariances(readType())) - case ANNOTATEDtype => defn.AnnotatedType(readType(), readTerm()(ctx.addMode(ReadAnnotation))) + case ANNOTATEDtype => defn.AnnotatedType(readType(), readTerm()(ctx.addMode(ReadAnnotTopLevel))) case ANDtype => defn.IntersectionType(readType(), readType()) case ORtype => unionIsUnsupported case SUPERtype => defn.SuperType(readType(), readType()) @@ -694,7 +694,7 @@ class TreeUnpickler[Tasty <: TastyUniverse]( private val readTypedWithin: Context => Symbol = implicit ctx => readType().typeSymbolDirect private val readTypedAnnot: Context => DeferredAnnotation = { implicit ctx => - val annotCtx = ctx.addMode(ReadAnnotation) + val annotCtx = ctx.addMode(ReadAnnotTopLevel) val start = currentAddr readByte() // tag val end = readEnd() @@ -1134,7 +1134,8 @@ class TreeUnpickler[Tasty <: TastyUniverse]( until(end)(skipTree()) tpd.TypeTree(fnResult(fn.tpe)) } else { - tpd.Apply(fn, until(end)(readTerm())) + val argsCtx = ctx.argumentCtx(fn) + tpd.Apply(fn, until(end)(readTerm()(argsCtx))) } case TYPEAPPLY => tpd.TypeApply(readTerm(), until(end)(readTpt())) case TYPED => tpd.Typed(readTerm(), readTpt()) @@ -1158,7 +1159,7 @@ class TreeUnpickler[Tasty <: TastyUniverse]( // wrong number of arguments in some scenarios reading F-bounded // types. This came up in #137 of collection strawman. tpd.AppliedTypeTree(readTpt(), until(end)(readTpt())) - case ANNOTATEDtpt => tpd.Annotated(readTpt(), readTerm()(ctx.addMode(ReadAnnotation))) + case ANNOTATEDtpt => tpd.Annotated(readTpt(), readTerm()(ctx.addMode(ReadAnnotTopLevel))) case LAMBDAtpt => tpd.LambdaTypeTree(readParams[NoCycle](TYPEPARAM).map(symFromNoCycle), readTpt()) case MATCHtpt => matchTypeIsUnsupported case TYPEBOUNDStpt => diff --git a/src/compiler/scala/tools/nsc/tasty/bridge/ContextOps.scala b/src/compiler/scala/tools/nsc/tasty/bridge/ContextOps.scala index 42c6e9cc4edd..c4f5aeec6fc9 100644 --- a/src/compiler/scala/tools/nsc/tasty/bridge/ContextOps.scala +++ b/src/compiler/scala/tools/nsc/tasty/bridge/ContextOps.scala @@ -557,6 +557,9 @@ trait ContextOps { self: TastyUniverse => final def newRefinementClassSymbol: Symbol = owner.newRefinementClass(u.NoPosition) + final def argumentCtx(fn: Tree): Context = + if (fn.symbol.isPrimaryConstructor) retractMode(ReadAnnotationCtor) else thisCtx + final def setInfo(sym: Symbol, info: Type): Unit = sym.info = info final def markAsEnumSingleton(sym: Symbol): Unit = diff --git a/src/compiler/scala/tools/nsc/tasty/bridge/TreeOps.scala b/src/compiler/scala/tools/nsc/tasty/bridge/TreeOps.scala index 7faac4e3c313..82d82af03a44 100644 --- a/src/compiler/scala/tools/nsc/tasty/bridge/TreeOps.scala +++ b/src/compiler/scala/tools/nsc/tasty/bridge/TreeOps.scala @@ -12,7 +12,7 @@ package scala.tools.nsc.tasty.bridge -import scala.tools.nsc.tasty.{TastyUniverse, TastyModes, ForceKinds}, TastyModes._, ForceKinds._ +import scala.tools.nsc.tasty.{TastyUniverse, TastyModes}, TastyModes._ import scala.tools.tasty.TastyName import scala.reflect.internal.Flags @@ -70,17 +70,10 @@ trait TreeOps { self: TastyUniverse => def selectCtor(qual: Tree) = u.Select(qual, u.nme.CONSTRUCTOR).setType(qual.tpe.typeSymbol.primaryConstructor.tpe) - if (ctx.mode.is(ReadAnnotation) && name.isSignedConstructor) { - val cls = qual.tpe.typeSymbol - cls.ensureCompleted(AnnotCtor) - if (cls.isJavaAnnotation) - selectCtor(qual) - else - selectName(qual, name)(lookup) - } - else { + if (ctx.mode.is(ReadAnnotationCtor) && name.isSignedConstructor) + selectCtor(qual) + else selectName(qual, name)(lookup) - } } diff --git a/test/tasty/pos/src-3/tastytest/Annotated.scala b/test/tasty/pos/src-3/tastytest/Annotated.scala index 6a5a8d43fe75..a9fe6ed2bfde 100644 --- a/test/tasty/pos/src-3/tastytest/Annotated.scala +++ b/test/tasty/pos/src-3/tastytest/Annotated.scala @@ -6,6 +6,12 @@ trait Annotated @rootAnnot(1) trait RootAnnotated +@overloadedAnnot(123) +trait OverloadedAnnotated1 + +@overloadedAnnot(false, "hello") +trait OverloadedAnnotated2 + trait OuterClassAnnotated extends OuterClass { @basicAnnot(xyz) def foo = 1 diff --git a/test/tasty/pos/src-3/tastytest/overloadedAnnot.scala b/test/tasty/pos/src-3/tastytest/overloadedAnnot.scala new file mode 100644 index 000000000000..05179494cbc6 --- /dev/null +++ b/test/tasty/pos/src-3/tastytest/overloadedAnnot.scala @@ -0,0 +1,6 @@ +package tastytest + +final class overloadedAnnot(str: String, int: Int, boolean: Boolean) extends scala.annotation.StaticAnnotation { + def this(int: Int) = this("abc", int, false) + def this(boolean: Boolean, str: String) = this(str, 123, boolean) +} From de20527eeb9370f680d6af31f9a1c3f7451db323 Mon Sep 17 00:00:00 2001 From: Chris Kipp Date: Wed, 27 Oct 2021 22:59:32 +0200 Subject: [PATCH 425/769] Add -h abbreviation to scaladoc help. MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit I've never used scaladoc cli before so tonight I downloaded it and the first thing I tried was: ``` ❯ scaladoc -h scaladoc error: bad option: '-h' scaladoc -help gives more information error: IO error while decoding -h with UTF-8: -h (No such file or directory) Please try specifying another one using the -encoding option ``` This is great that it then tells you to use `-help`, but I'm always slightly annoyed when cli tools don't just default all `-help`, `--help`, and `-h` to help. So if this doesn't conflict with anything else (and looking I didn't see that it does), would it be alright to also have `-h` as an abbreviation for `-help`? --- .../scala/tools/nsc/settings/StandardScalaSettings.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/compiler/scala/tools/nsc/settings/StandardScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/StandardScalaSettings.scala index 7da06bb6c7bd..15527257a3b5 100644 --- a/src/compiler/scala/tools/nsc/settings/StandardScalaSettings.scala +++ b/src/compiler/scala/tools/nsc/settings/StandardScalaSettings.scala @@ -48,7 +48,7 @@ trait StandardScalaSettings { _: MutableSettings => else Wconf.tryToSet(List(s"cat=feature:s")) } val g = ChoiceSetting ("-g", "level", "Set level of generated debugging info.", List("none", "source", "line", "vars", "notailcalls"), "vars") - val help = BooleanSetting ("-help", "Print a synopsis of standard options") withAbbreviation "--help" + val help = BooleanSetting ("-help", "Print a synopsis of standard options") withAbbreviation "--help" withAbbreviation("-h") val nowarn = BooleanSetting ("-nowarn", "Generate no warnings.") withAbbreviation "--no-warnings" withPostSetHook { s => if (s) maxwarns.value = 0 } val optimise: BooleanSetting // depends on post hook which mutates other settings val print = BooleanSetting ("-print", "Print program with Scala-specific features removed.") withAbbreviation "--print" From 8232bb231492685fac4d897f71858bcaf17e313d Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Wed, 27 Oct 2021 13:48:13 -0700 Subject: [PATCH 426/769] Template with empty body/parents has good position The position of those absent elements was incorrectly taken as following on the next line. --- .../scala/tools/nsc/ast/parser/Parsers.scala | 4 ++-- .../nsc/backend/jvm/BCodeSkelBuilder.scala | 3 +-- .../scala/tools/testkit/BytecodeTesting.scala | 2 +- test/files/run/dynamic-applyDynamic.check | 6 ++--- .../files/run/dynamic-applyDynamicNamed.check | 6 ++--- test/files/run/dynamic-selectDynamic.check | 6 ++--- test/files/run/dynamic-updateDynamic.check | 6 ++--- test/files/run/existential-rangepos.check | 6 ++--- test/files/run/literals-parsing.check | 2 +- test/files/run/sd187.check | 6 ++--- test/files/run/string-switch-pos.check | 10 ++++---- test/files/run/t10203.check | 6 ++--- test/files/run/t10751.check | 6 ++--- test/files/run/t12062.check | 16 ++++++------- test/files/run/t5064.check | 6 ++--- test/files/run/t5603.check | 2 +- test/files/run/t6288.check | 24 +++++++++---------- test/files/run/t7271.check | 2 +- test/files/run/t7569.check | 6 ++--- .../tools/nsc/backend/jvm/BytecodeTest.scala | 22 ++++++++++++++--- 20 files changed, 81 insertions(+), 66 deletions(-) diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala index 7de107517da2..2be3a1c81722 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala @@ -3246,8 +3246,8 @@ self => // regarding AnyVal constructor in AddInterfaces. DefDef(NoMods, nme.CONSTRUCTOR, Nil, ListOfNil, TypeTree(), Block(Nil, literalUnit)) ) - val parentPos = o2p(in.offset) val tstart1 = if (body.isEmpty && in.lastOffset < tstart) in.lastOffset else tstart + val parentPos = if (parents.isEmpty) o2p(tstart1) else o2p(in.offset) // we can't easily check this later, because `gen.mkParents` adds the default AnyRef parent, and we need to warn based on what the user wrote if (name == nme.PACKAGEkw && parents.nonEmpty && settings.isScala3) @@ -3260,7 +3260,7 @@ self => Template(parents, self, anyvalConstructor() :: body) else gen.mkTemplate(gen.mkParents(mods, parents, parentPos), - self, constrMods, vparamss, body, o2p(tstart)) + self, constrMods, vparamss, body, o2p(tstart1)) } } diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeSkelBuilder.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeSkelBuilder.scala index 48d8290535d0..952a33fc6554 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BCodeSkelBuilder.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeSkelBuilder.scala @@ -478,8 +478,7 @@ abstract class BCodeSkelBuilder extends BCodeHelpers { def isAtProgramPoint(lbl: asm.Label): Boolean = { (lastInsn match { case labnode: asm.tree.LabelNode => (labnode.getLabel == lbl); case _ => false } ) } - def lineNumber(tree: Tree): Unit = { - if (!emitLines || !tree.pos.isDefined) return + def lineNumber(tree: Tree): Unit = if (emitLines && tree.pos.isDefined) { val nr = tree.pos.finalPosition.line if (nr != lastEmittedLineNr) { lastEmittedLineNr = nr diff --git a/src/testkit/scala/tools/testkit/BytecodeTesting.scala b/src/testkit/scala/tools/testkit/BytecodeTesting.scala index 21ca25c629aa..1f3b370f8be2 100644 --- a/src/testkit/scala/tools/testkit/BytecodeTesting.scala +++ b/src/testkit/scala/tools/testkit/BytecodeTesting.scala @@ -32,7 +32,7 @@ import scala.tools.testkit.ASMConverters._ trait BytecodeTesting extends ClearAfterClass { /** - * Overwrite to set additional compiler flags + * Override to set additional compiler flags. */ def compilerArgs = "" diff --git a/test/files/run/dynamic-applyDynamic.check b/test/files/run/dynamic-applyDynamic.check index a496e6259626..0631de014b1c 100644 --- a/test/files/run/dynamic-applyDynamic.check +++ b/test/files/run/dynamic-applyDynamic.check @@ -1,8 +1,8 @@ [[syntax trees at end of typer]] // newSource1.scala [0:67]package [0:0] { - [0:67]object X extends [9:67][67]scala.AnyRef { - [67]def (): [9]X.type = [67]{ - [67][67][67]X.super.(); + [0:67]object X extends [9:67][9]scala.AnyRef { + [9]def (): [9]X.type = [9]{ + [9][9][9]X.super.(); [9]() }; [17:30]private[this] val d: [21]D = [25:30][25:30][25:30]new [29:30]D(); diff --git a/test/files/run/dynamic-applyDynamicNamed.check b/test/files/run/dynamic-applyDynamicNamed.check index 09ddf2cf7ad5..20fb5e870477 100644 --- a/test/files/run/dynamic-applyDynamicNamed.check +++ b/test/files/run/dynamic-applyDynamicNamed.check @@ -1,8 +1,8 @@ [[syntax trees at end of typer]] // newSource1.scala [0:97]package [0:0] { - [0:97]object X extends [9:97][97]scala.AnyRef { - [97]def (): [9]X.type = [97]{ - [97][97][97]X.super.(); + [0:97]object X extends [9:97][9]scala.AnyRef { + [9]def (): [9]X.type = [9]{ + [9][9][9]X.super.(); [9]() }; [17:30]private[this] val d: [21]D = [25:30][25:30][25:30]new [29:30]D(); diff --git a/test/files/run/dynamic-selectDynamic.check b/test/files/run/dynamic-selectDynamic.check index 29a2a1a3e06c..82cd656e6602 100644 --- a/test/files/run/dynamic-selectDynamic.check +++ b/test/files/run/dynamic-selectDynamic.check @@ -1,8 +1,8 @@ [[syntax trees at end of typer]] // newSource1.scala [0:50]package [0:0] { - [0:50]object X extends [9:50][50]scala.AnyRef { - [50]def (): [9]X.type = [50]{ - [50][50][50]X.super.(); + [0:50]object X extends [9:50][9]scala.AnyRef { + [9]def (): [9]X.type = [9]{ + [9][9][9]X.super.(); [9]() }; [17:30]private[this] val d: [21]D = [25:30][25:30][25:30]new [29:30]D(); diff --git a/test/files/run/dynamic-updateDynamic.check b/test/files/run/dynamic-updateDynamic.check index b320ab129312..5180f3e7bfd8 100644 --- a/test/files/run/dynamic-updateDynamic.check +++ b/test/files/run/dynamic-updateDynamic.check @@ -1,8 +1,8 @@ [[syntax trees at end of typer]] // newSource1.scala [0:69]package [0:0] { - [0:69]object X extends [9:69][69]scala.AnyRef { - [69]def (): [9]X.type = [69]{ - [69][69][69]X.super.(); + [0:69]object X extends [9:69][9]scala.AnyRef { + [9]def (): [9]X.type = [9]{ + [9][9][9]X.super.(); [9]() }; [17:30]private[this] val d: [21]D = [25:30][25:30][25:30]new [29:30]D(); diff --git a/test/files/run/existential-rangepos.check b/test/files/run/existential-rangepos.check index 984baeaaf8e3..39efe241688b 100644 --- a/test/files/run/existential-rangepos.check +++ b/test/files/run/existential-rangepos.check @@ -1,8 +1,8 @@ [[syntax trees at end of patmat]] // newSource1.scala [0:76]package [0:0] { - [0:76]abstract class A[[17:18]T[17:18]] extends [20:76][76]scala.AnyRef { - [76]def (): [20]A[T] = [76]{ - [76][76][76]A.super.(); + [0:76]abstract class A[[17:18]T[17:18]] extends [20:76][20]scala.AnyRef { + [20]def (): [20]A[T] = [20]{ + [20][20][20]A.super.(); [20]() }; [24:51]private[this] val foo: [28]Set[_ <: T] = [47:51]null; diff --git a/test/files/run/literals-parsing.check b/test/files/run/literals-parsing.check index 25a57dd41d27..e1b3cac77718 100644 --- a/test/files/run/literals-parsing.check +++ b/test/files/run/literals-parsing.check @@ -1,6 +1,6 @@ [[syntax trees at end of parser]] // newSource1.scala [0:161]package [0:0] { - [0:161]abstract trait T extends [8:161][161]scala.AnyRef { + [0:161]abstract trait T extends [8:161][8]scala.AnyRef { [8]def $init$() = [8]{ [8]() }; diff --git a/test/files/run/sd187.check b/test/files/run/sd187.check index c8fcab58239d..2c97874a2a65 100644 --- a/test/files/run/sd187.check +++ b/test/files/run/sd187.check @@ -1,8 +1,8 @@ [[syntax trees at end of patmat]] // newSource1.scala [1:2302]package [1:1] { - [1:2302]class C extends [9:2302][2302]scala.AnyRef { - [2302]def (): [9]C = [2302]{ - [2302][2302][2302]C.super.(); + [1:2302]class C extends [9:2302][9]scala.AnyRef { + [9]def (): [9]C = [9]{ + [9][9][9]C.super.(); [9]() }; [103:904]def commonSubPattern([124:130]x: [127:130]): [107]AnyVal = [206:220]{ diff --git a/test/files/run/string-switch-pos.check b/test/files/run/string-switch-pos.check index 805f5a3143bd..27ea7da767af 100644 --- a/test/files/run/string-switch-pos.check +++ b/test/files/run/string-switch-pos.check @@ -1,8 +1,8 @@ [[syntax trees at end of patmat]] // newSource1.scala [0:216]package [0:0] { - [0:216]class Switch extends [13:216][216]scala.AnyRef { - [216]def (): [13]Switch = [216]{ - [216][216][216]Switch.super.(); + [0:216]class Switch extends [13:216][13]scala.AnyRef { + [13]def (): [13]Switch = [13]{ + [13][13][13]Switch.super.(); [13]() }; [17:214]def switch([28:37]s: [31:37], [39:52]cond: [45:52]): [21]Int = [56:57]{ @@ -67,8 +67,8 @@ } } }; - [216]def (): [13]Switch = [216]{ - [216][216][216]Switch.super.(); + [13]def (): [13]Switch = [13]{ + [13][13][13]Switch.super.(); [13]() } } diff --git a/test/files/run/t10203.check b/test/files/run/t10203.check index d7fa5ca5de37..c97fe36a70b7 100644 --- a/test/files/run/t10203.check +++ b/test/files/run/t10203.check @@ -1,8 +1,8 @@ [[syntax trees at end of typer]] // newSource1.scala [0:88]package [0:0] { - [0:88]object X extends [9:88][88]scala.AnyRef { - [88]def (): [9]X.type = [88]{ - [88][88][88]X.super.(); + [0:88]object X extends [9:88][9]scala.AnyRef { + [9]def (): [9]X.type = [9]{ + [9][9][9]X.super.(); [9]() }; [17:24][17:18][17:18]D.selectDynamic[[17]Nothing](<19:24>"aaaaa"); diff --git a/test/files/run/t10751.check b/test/files/run/t10751.check index 41c811ac2537..0142b6896a14 100644 --- a/test/files/run/t10751.check +++ b/test/files/run/t10751.check @@ -1,8 +1,8 @@ [[syntax trees at end of typer]] // newSource1.scala [0:201]package [0:0] { - [0:201]object Test extends [12:201][201]scala.AnyRef { - [201]def (): [12]Test.type = [201]{ - [201][201][201]Test.super.(); + [0:201]object Test extends [12:201][12]scala.AnyRef { + [12]def (): [12]Test.type = [12]{ + [12][12][12]Test.super.(); [12]() }; [20:43]private[this] val n: [38]Int = [42:43]1; diff --git a/test/files/run/t12062.check b/test/files/run/t12062.check index c578003008d7..c0456326b804 100644 --- a/test/files/run/t12062.check +++ b/test/files/run/t12062.check @@ -1,7 +1,7 @@ warning: 6 deprecations (since 2.13.0); re-run with -deprecation for details class TestByte -source-newSource1.scala,line-28 TestByte.super.() +source-newSource1.scala,line-2 TestByte.super.() source-newSource1.scala,line-3 1.toByte() source-newSource1.scala,line-6 java.lang.Byte.toString(TestByte.this.value()) source-newSource1.scala,line-6 TestByte.this.value() @@ -51,7 +51,7 @@ source-newSource1.scala,line-27 TestByte.this.value() class TestShort -source-newSource2.scala,line-28 TestShort.super.() +source-newSource2.scala,line-2 TestShort.super.() source-newSource2.scala,line-3 1.toShort() source-newSource2.scala,line-6 java.lang.Short.toString(TestShort.this.value()) source-newSource2.scala,line-6 TestShort.this.value() @@ -101,7 +101,7 @@ source-newSource2.scala,line-27 TestShort.this.value() class TestInt -source-newSource3.scala,line-28 TestInt.super.() +source-newSource3.scala,line-2 TestInt.super.() source-newSource3.scala,line-3 1.toInt() source-newSource3.scala,line-6 java.lang.Integer.toString(TestInt.this.value()) source-newSource3.scala,line-6 TestInt.this.value() @@ -150,7 +150,7 @@ source-newSource3.scala,line-27 TestInt.this.value() class TestLong -source-newSource4.scala,line-28 TestLong.super.() +source-newSource4.scala,line-2 TestLong.super.() source-newSource4.scala,line-3 1.toLong() source-newSource4.scala,line-6 java.lang.Long.toString(TestLong.this.value()) source-newSource4.scala,line-6 TestLong.this.value() @@ -200,7 +200,7 @@ source-newSource4.scala,line-27 TestLong.this.value() class TestBoolean -source-newSource5.scala,line-9 TestBoolean.super.() +source-newSource5.scala,line-2 TestBoolean.super.() source-newSource5.scala,line-6 java.lang.Boolean.toString(TestBoolean.this.value()) source-newSource5.scala,line-6 TestBoolean.this.value() source-newSource5.scala,line-7 java.lang.Boolean.hashCode(TestBoolean.this.value()) @@ -209,7 +209,7 @@ source-newSource5.scala,line-8 TestBoolean.this.value() class TestChar -source-newSource6.scala,line-9 TestChar.super.() +source-newSource6.scala,line-2 TestChar.super.() source-newSource6.scala,line-6 java.lang.Character.toString(TestChar.this.value()) source-newSource6.scala,line-6 TestChar.this.value() source-newSource6.scala,line-7 java.lang.Character.hashCode(TestChar.this.value()) @@ -219,7 +219,7 @@ source-newSource6.scala,line-8 TestChar.this.value() class TestFloat -source-newSource7.scala,line-39 TestFloat.super.() +source-newSource7.scala,line-2 TestFloat.super.() source-newSource7.scala,line-3 1.toFloat() source-newSource7.scala,line-6 java.lang.Float.toString(TestFloat.this.value()) source-newSource7.scala,line-6 TestFloat.this.value() @@ -296,7 +296,7 @@ source-newSource7.scala,line-38 TestFloat.this.value() class TestDouble -source-newSource8.scala,line-39 TestDouble.super.() +source-newSource8.scala,line-2 TestDouble.super.() source-newSource8.scala,line-3 1.toDouble() source-newSource8.scala,line-6 java.lang.Double.toString(TestDouble.this.value()) source-newSource8.scala,line-6 TestDouble.this.value() diff --git a/test/files/run/t5064.check b/test/files/run/t5064.check index 9d2c9a3bdec8..78ab21e65f5c 100644 --- a/test/files/run/t5064.check +++ b/test/files/run/t5064.check @@ -7,9 +7,9 @@ newSource1.scala:5: warning: a pure expression does nothing in statement positio newSource1.scala:6: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses Nil ^ -[53] T5064.super.() -[53] T5064.super. -[53] this +[12] T5064.super.() +[12] T5064.super. +[12] this [16:23] scala.`package`.List().apply(scala.runtime.ScalaRunTime.wrapIntArray(Array[Int]{1})) [16:20] scala.`package`.List().apply <16:20> scala.`package`.List() diff --git a/test/files/run/t5603.check b/test/files/run/t5603.check index c9ebb69ececb..14ee478343c4 100644 --- a/test/files/run/t5603.check +++ b/test/files/run/t5603.check @@ -1,6 +1,6 @@ [[syntax trees at end of parser]] // newSource1.scala [0:241]package [0:0] { - [0:82]abstract trait Greeting extends [15:82][83]scala.AnyRef { + [0:82]abstract trait Greeting extends [15:82][15]scala.AnyRef { [15]def $init$() = [15]{ [15]() }; diff --git a/test/files/run/t6288.check b/test/files/run/t6288.check index eb1ef1105679..a4ad1fd15e49 100644 --- a/test/files/run/t6288.check +++ b/test/files/run/t6288.check @@ -1,8 +1,8 @@ [[syntax trees at end of patmat]] // newSource1.scala [0:553]package [0:0] { - [0:151]object Case3 extends [13:151][152]scala.AnyRef { - [152]def (): [13]Case3.type = [152]{ - [152][152][152]Case3.super.(); + [0:151]object Case3 extends [13:151][13]scala.AnyRef { + [13]def (): [13]Case3.type = [13]{ + [13][13][13]Case3.super.(); [13]() }; [17:60]def unapply([29:35]z: [32:35]): [21]Option[Int] = [52:60][52:56][52:56]new [52:56]Some[Int]([57:59]-1); @@ -28,9 +28,9 @@ } } }; - [152:308]object Case4 extends [165:308][309]scala.AnyRef { - [309]def (): [165]Case4.type = [309]{ - [309][309][309]Case4.super.(); + [152:308]object Case4 extends [165:308][165]scala.AnyRef { + [165]def (): [165]Case4.type = [165]{ + [165][165][165]Case4.super.(); [165]() }; [169:217]def unapplySeq([184:190]z: [187:190]): [173]Option[List[Int]] = [213:217]scala.None; @@ -56,9 +56,9 @@ } } }; - [309:448]object Case5 extends [322:448][449]scala.AnyRef { - [449]def (): [322]Case5.type = [449]{ - [449][449][449]Case5.super.(); + [309:448]object Case5 extends [322:448][322]scala.AnyRef { + [322]def (): [322]Case5.type = [322]{ + [322][322][322]Case5.super.(); [322]() }; [326:361]def unapply([338:344]z: [341:344]): [330]Boolean = [357:361]true; @@ -84,9 +84,9 @@ } } }; - [449:553]object Case6 extends [462:553][553]scala.AnyRef { - [553]def (): [462]Case6.type = [553]{ - [553][553][553]Case6.super.(); + [449:553]object Case6 extends [462:553][462]scala.AnyRef { + [462]def (): [462]Case6.type = [462]{ + [462][462][462]Case6.super.(); [462]() }; [466:509]def unapply([478:484]z: [481:484]): [470]Option[Int] = [501:509][501:505][501:505]new [501:505]Some[Int]([506:508]-1); diff --git a/test/files/run/t7271.check b/test/files/run/t7271.check index 6db301c21ddb..ddfc0d560a8e 100644 --- a/test/files/run/t7271.check +++ b/test/files/run/t7271.check @@ -1,6 +1,6 @@ [[syntax trees at end of parser]] // newSource1.scala [0:91]package [0:0] { - [0:91]class C extends [8:91][91]scala.AnyRef { + [0:91]class C extends [8:91][8]scala.AnyRef { [8]def () = [8]{ [NoPosition][NoPosition][NoPosition]super.(); [8]() diff --git a/test/files/run/t7569.check b/test/files/run/t7569.check index 5153e9d6a4e9..0f6b70f96d9f 100644 --- a/test/files/run/t7569.check +++ b/test/files/run/t7569.check @@ -1,8 +1,8 @@ source-newSource1.scala,line-3,offset=49 A.this.one source-newSource1.scala,line-3,offset=49 A.this -source-newSource1.scala,line-4,offset=67 A.super.() -source-newSource1.scala,line-4,offset=67 A.super. -source-newSource1.scala,line-4,offset=67 this +source-newSource1.scala,line-2,offset=41 A.super.() +source-newSource1.scala,line-2,offset=41 A.super. +source-newSource1.scala,line-2,offset=41 this source-newSource1.scala,line-3,offset=49 A.this.one source-newSource1.scala,line-3,offset=49 A.this RangePosition(newSource1.scala, 55, 57, 65) java.lang.Integer.toString(1) diff --git a/test/junit/scala/tools/nsc/backend/jvm/BytecodeTest.scala b/test/junit/scala/tools/nsc/backend/jvm/BytecodeTest.scala index 8e5cdd220c56..f7a0de1c5372 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/BytecodeTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/BytecodeTest.scala @@ -17,12 +17,12 @@ class BytecodeTest extends BytecodeTesting { @Test def t10812(): Unit = { - val code = - """ A { def f: Object = null } + def code(prefix: String) = + s"""$prefix A { def f: Object = null } |object B extends A { override def f: String = "b" } """.stripMargin for (base <- List("trait", "class")) { - val List(a, bMirror, bModule) = compileClasses(base + code) + val List(a, bMirror, bModule) = compileClasses(code(base)) assertEquals(bMirror.name, "B") assertEquals(bMirror.methods.asScala.filter(_.name == "f").map(m => m.name + m.desc).toList, List("f()Ljava/lang/String;")) } @@ -202,6 +202,22 @@ class BytecodeTest extends BytecodeTesting { ) } + @Test def `class constructor has correct line numbers (12470)`: Unit = { + val code = + """class A + |class B + |object D + |class C + """.stripMargin + val lines = Map("A" -> 1, "B" -> 2, "D$" -> 3, "C" -> 4) + compileClasses(code).foreach { c => + c.methods.asScala.foreach(m => convertMethod(m).instructions.foreach { + case LineNumber(n, _) => assertEquals(s"class ${c.name} method ${m.name}", lines(c.name), n) + case _ => + }) + } + } + @Test def sd233(): Unit = { val code = "def f = { println(1); synchronized(println(2)) }" From 1234506ca6794f116d72e97f6513fa593032dda6 Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Thu, 28 Oct 2021 13:30:54 -0700 Subject: [PATCH 427/769] Warn on bracket after newline --- src/compiler/scala/tools/nsc/ast/parser/Parsers.scala | 3 +++ 1 file changed, 3 insertions(+) diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala index 2be3a1c81722..3b0554c4b11a 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala @@ -3078,10 +3078,13 @@ self => * }}} */ def classDef(start: Offset, mods: Modifiers): ClassDef = { + def isAfterLineEnd: Boolean = in.lastOffset < in.lineStartOffset && (in.lineStartOffset <= in.offset || in.lastOffset < in.lastLineStartOffset && in.lastLineStartOffset <= in.offset) in.nextToken() checkKeywordDefinition() val nameOffset = in.offset val name = identForType() + if (currentRun.isScala3 && in.token == LBRACKET && isAfterLineEnd) + deprecationWarning(in.offset, "type parameters should not follow newline", "2.13.7") atPos(start, if (name == tpnme.ERROR) start else nameOffset) { savingClassContextBounds { val contextBoundBuf = new ListBuffer[Tree] From 06d7dcf2381d9f29b635da50bddecbf6a388d8d9 Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Thu, 28 Oct 2021 17:03:52 -0700 Subject: [PATCH 428/769] Add comments and meaningful names per review --- .../scala/tools/nsc/ast/parser/Parsers.scala | 30 +++++++++---------- 1 file changed, 14 insertions(+), 16 deletions(-) diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala index 3b0554c4b11a..fe835239bf71 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala @@ -3233,7 +3233,7 @@ self => deprecationWarning(in.offset, "Using `<:` for `extends` is deprecated", since = "2.12.5") true } - val (parents, self, body) = ( + val (parents, self, body) = if (in.token == EXTENDS || in.token == SUBTYPE && mods.isTrait && deprecatedUsage()) { in.nextToken() template() @@ -3243,27 +3243,25 @@ self => val (self, body) = templateBodyOpt(parenMeansSyntaxError = mods.isTrait || name.isTermName) (List(), self, body) } - ) - def anyvalConstructor() = ( - // Not a well-formed constructor, has to be finished later - see note - // regarding AnyVal constructor in AddInterfaces. - DefDef(NoMods, nme.CONSTRUCTOR, Nil, ListOfNil, TypeTree(), Block(Nil, literalUnit)) - ) - val tstart1 = if (body.isEmpty && in.lastOffset < tstart) in.lastOffset else tstart - val parentPos = if (parents.isEmpty) o2p(tstart1) else o2p(in.offset) + // Not a well-formed constructor, has to be finished later - see note + // regarding AnyVal constructor in AddInterfaces. + def anyvalConstructor() = DefDef(NoMods, nme.CONSTRUCTOR, Nil, ListOfNil, TypeTree(), Block(Nil, literalUnit)) + // tstart is the offset of the token after `class C[A]` (which may be LPAREN, EXTENDS, LBRACE). + // if there is no template body, then tstart may be in the next program element, so back up to just after the `class C[A]`. + val templateOffset = if (body.isEmpty && in.lastOffset < tstart) in.lastOffset else tstart + val templatePos = o2p(templateOffset) - // we can't easily check this later, because `gen.mkParents` adds the default AnyRef parent, and we need to warn based on what the user wrote - if (name == nme.PACKAGEkw && parents.nonEmpty && settings.isScala3) - deprecationWarning(tstart, s"package object inheritance is deprecated (https://github.com/scala/scala-dev/issues/441);\n" + - s"drop the `extends` clause or use a regular object instead", "3.0.0") + // warn now if user wrote parents for package object; `gen.mkParents` adds AnyRef to parents + if (currentRun.isScala3 && name == nme.PACKAGEkw && !parents.isEmpty) + deprecationWarning(tstart, """package object inheritance is deprecated (https://github.com/scala/scala-dev/issues/441); + |drop the `extends` clause or use a regular object instead""".stripMargin, "3.0.0") - atPos(tstart1) { + atPos(templateOffset) { // Exclude only the 9 primitives plus AnyVal. if (inScalaRootPackage && ScalaValueClassNames.contains(name)) Template(parents, self, anyvalConstructor() :: body) else - gen.mkTemplate(gen.mkParents(mods, parents, parentPos), - self, constrMods, vparamss, body, o2p(tstart1)) + gen.mkTemplate(gen.mkParents(mods, parents, templatePos), self, constrMods, vparamss, body, templatePos) } } From ca605812ff51054c7bd0ab6526d265ea29d94aad Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Thu, 28 Oct 2021 19:11:12 -0700 Subject: [PATCH 429/769] Remove unused code for NL before LBRACKET If there is a newline between the identifier and type parameter section, it is consumed on next token. NL is not inserted because LBRACKET can no longer start a statement, although this behavior has reverted in Scala 3. --- src/compiler/scala/tools/nsc/ast/parser/Parsers.scala | 1 - 1 file changed, 1 deletion(-) diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala index fe835239bf71..a012521ccb3e 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala @@ -2607,7 +2607,6 @@ self => } param } - newLineOptWhenFollowedBy(LBRACKET) if (in.token == LBRACKET) inBrackets(commaSeparated(typeParam(NoMods withAnnotations annotations(skipNewLines = true)))) else Nil } From 2d36e7183d2ca2e8a98657630b6d421a27d64000 Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Thu, 28 Oct 2021 22:43:04 -0700 Subject: [PATCH 430/769] BigInt delegates for inputs not in domain It errors identically as the underlying BigInteger. --- src/library/scala/math/BigInt.scala | 16 ++++++------- test/junit/scala/math/BigIntTest.scala | 32 +++++++++++++++++++++++--- 2 files changed, 37 insertions(+), 11 deletions(-) diff --git a/src/library/scala/math/BigInt.scala b/src/library/scala/math/BigInt.scala index 6ea371328d9e..c48bab4445ee 100644 --- a/src/library/scala/math/BigInt.scala +++ b/src/library/scala/math/BigInt.scala @@ -56,9 +56,9 @@ object BigInt { * @return the constructed `BigInt` */ def apply(l: Long): BigInt = - if (minCached <= l && l <= maxCached) getCached(l.toInt) else { - if (l == Long.MinValue) longMinValue else new BigInt(null, l) - } + if (minCached <= l && l <= maxCached) getCached(l.toInt) + else if (l == Long.MinValue) longMinValue + else new BigInt(null, l) /** Translates a byte array containing the two's-complement binary * representation of a BigInt into a BigInt. @@ -436,7 +436,7 @@ final class BigInt private (private var _bigInteger: BigInteger, private val _lo * @param that A positive number */ def mod(that: BigInt): BigInt = - if (this.longEncoding && that.longEncoding) { + if (this.longEncoding && that.longEncoding && that._long > 0) { val res = this._long % that._long if (res >= 0) BigInt(res) else BigInt(res + that._long) } else BigInt(this.bigInteger.mod(that.bigInteger)) @@ -495,7 +495,7 @@ final class BigInt private (private var _bigInteger: BigInteger, private val _lo /** Returns true if and only if the designated bit is set. */ def testBit(n: Int): Boolean = - if (longEncoding) { + if (longEncoding && n >= 0) { if (n <= 63) (_long & (1L << n)) != 0 else @@ -505,17 +505,17 @@ final class BigInt private (private var _bigInteger: BigInteger, private val _lo /** Returns a BigInt whose value is equivalent to this BigInt with the designated bit set. */ def setBit(n: Int): BigInt = // note that we do not operate on the Long sign bit #63 - if (longEncoding && n <= 62) BigInt(_long | (1L << n)) else BigInt(this.bigInteger.setBit(n)) + if (longEncoding && n <= 62 && n >= 0) BigInt(_long | (1L << n)) else BigInt(this.bigInteger.setBit(n)) /** Returns a BigInt whose value is equivalent to this BigInt with the designated bit cleared. */ def clearBit(n: Int): BigInt = // note that we do not operate on the Long sign bit #63 - if (longEncoding && n <= 62) BigInt(_long & ~(1L << n)) else BigInt(this.bigInteger.clearBit(n)) + if (longEncoding && n <= 62 && n >= 0) BigInt(_long & ~(1L << n)) else BigInt(this.bigInteger.clearBit(n)) /** Returns a BigInt whose value is equivalent to this BigInt with the designated bit flipped. */ def flipBit(n: Int): BigInt = // note that we do not operate on the Long sign bit #63 - if (longEncoding && n <= 62) BigInt(_long ^ (1L << n)) else BigInt(this.bigInteger.flipBit(n)) + if (longEncoding && n <= 62 && n >= 0) BigInt(_long ^ (1L << n)) else BigInt(this.bigInteger.flipBit(n)) /** Returns the index of the rightmost (lowest-order) one bit in this BigInt * (the number of zero bits to the right of the rightmost one bit). diff --git a/test/junit/scala/math/BigIntTest.scala b/test/junit/scala/math/BigIntTest.scala index 5135cac6d28b..6a8c46e5704b 100644 --- a/test/junit/scala/math/BigIntTest.scala +++ b/test/junit/scala/math/BigIntTest.scala @@ -1,10 +1,36 @@ package scala.math import org.junit.Test +import org.junit.Assert.{assertFalse, assertTrue} +import scala.tools.testkit.AssertUtil.assertThrows class BigIntTest { - @Test - def testIsComparable(): Unit = - assert(BigInt(1).isInstanceOf[java.lang.Comparable[_]]) + private val bigint = BigInt(42) + + @Test def testIsComparable: Unit = assertTrue(BigInt(42).isInstanceOf[java.lang.Comparable[_]]) + + @Test def `mod respects BigInteger`: Unit = assertThrows[ArithmeticException](bigint mod BigInt(-3), _.contains("modulus not positive")) + + @Test def `modPow respects BigInteger`: Unit = assertThrows[ArithmeticException](bigint.modPow(BigInt(1), BigInt(-3)), _.contains("modulus not positive")) + + @Test def `modInverse respects BigInteger`: Unit = assertThrows[ArithmeticException](bigint.modInverse(BigInt(-3)), _.contains("modulus not positive")) + + @Test def `pow respects BigInteger`: Unit = assertThrows[ArithmeticException](bigint pow -2, _.contains("Negative exponent")) + + @Test def `% respects BigInteger`: Unit = assertThrows[ArithmeticException](bigint % 0, _.contains("/ by zero")) + + @Test def `setBit respects BigInteger`: Unit = assertThrows[ArithmeticException](bigint setBit -1, _.contains("Negative bit address")) + + @Test def `clearBit respects BigInteger`: Unit = assertThrows[ArithmeticException](bigint clearBit -1, _.contains("Negative bit address")) + + @Test def `flipBit respects BigInteger`: Unit = assertThrows[ArithmeticException](bigint flipBit -1, _.contains("Negative bit address")) + + @Test def `/ respects BigInteger`: Unit = assertThrows[ArithmeticException](bigint / BigInt(0), _.contains("/ by zero")) + + @Test def `/% respects BigInteger`: Unit = assertThrows[ArithmeticException](bigint /% BigInt(0), _.contains("/ by zero")) + + @Test def `testBit respects BigInteger`: Unit = assertThrows[ArithmeticException](bigint.testBit(-3), _.contains("Negative bit address")) + + @Test def `testBit 0`: Unit = assertFalse(bigint.testBit(0)) } From d8b6357f79da23686ab6a389d03fa5de4b1e6a27 Mon Sep 17 00:00:00 2001 From: Seth Tisue Date: Mon, 1 Nov 2021 09:15:21 -0700 Subject: [PATCH 431/769] re-STARR onto 2.13.7 --- build.sbt | 2 +- project/MimaFilters.scala | 60 +-------------------------------------- versions.properties | 2 +- 3 files changed, 3 insertions(+), 61 deletions(-) diff --git a/build.sbt b/build.sbt index 71c745ce596a..d8402b97e616 100644 --- a/build.sbt +++ b/build.sbt @@ -73,7 +73,7 @@ lazy val publishSettings : Seq[Setting[_]] = Seq( // should not be set directly. It is the same as the Maven version and derived automatically from `baseVersion` and // `baseVersionSuffix`. globalVersionSettings -Global / baseVersion := "2.13.7" +Global / baseVersion := "2.13.8" Global / baseVersionSuffix := "SNAPSHOT" ThisBuild / organization := "org.scala-lang" ThisBuild / homepage := Some(url("https://www.scala-lang.org")) diff --git a/project/MimaFilters.scala b/project/MimaFilters.scala index c29288cb2467..c263e18c278c 100644 --- a/project/MimaFilters.scala +++ b/project/MimaFilters.scala @@ -13,7 +13,7 @@ object MimaFilters extends AutoPlugin { import autoImport._ override val globalSettings = Seq( - mimaReferenceVersion := Some("2.13.6"), + mimaReferenceVersion := Some("2.13.7"), ) val mimaFilters: Seq[ProblemFilter] = Seq[ProblemFilter]( @@ -30,64 +30,6 @@ object MimaFilters extends AutoPlugin { ProblemFilters.exclude[DirectMissingMethodProblem]("scala.Predef#SeqCharSequence.isEmpty"), ProblemFilters.exclude[DirectMissingMethodProblem]("scala.Predef#ArrayCharSequence.isEmpty"), ProblemFilters.exclude[DirectMissingMethodProblem]("scala.runtime.ArrayCharSequence.isEmpty"), - - // #9425 Node is private[collection] - ProblemFilters.exclude[DirectMissingMethodProblem]("scala.collection.mutable.HashMap#Node.foreachEntry"), - - // Fixes for scala/bug#12009 - ProblemFilters.exclude[DirectMissingMethodProblem]("scala.collection.mutable.ArrayBufferView.this"), // private[mutable] - ProblemFilters.exclude[FinalClassProblem]("scala.collection.IndexedSeqView$IndexedSeqViewIterator"), // private[collection] - ProblemFilters.exclude[FinalClassProblem]("scala.collection.IndexedSeqView$IndexedSeqViewReverseIterator"), // private[collection] - ProblemFilters.exclude[MissingClassProblem]("scala.collection.mutable.CheckedIndexedSeqView"), // private[mutable] - ProblemFilters.exclude[MissingClassProblem]("scala.collection.mutable.CheckedIndexedSeqView$"), // private[mutable] - ProblemFilters.exclude[MissingClassProblem]("scala.collection.mutable.CheckedIndexedSeqView$CheckedIterator"), // private[mutable] - ProblemFilters.exclude[MissingClassProblem]("scala.collection.mutable.CheckedIndexedSeqView$CheckedReverseIterator"), // private[mutable] - ProblemFilters.exclude[MissingClassProblem]("scala.collection.mutable.CheckedIndexedSeqView$Id"), // private[mutable] - ProblemFilters.exclude[MissingClassProblem]("scala.collection.mutable.CheckedIndexedSeqView$Appended"), // private[mutable] - ProblemFilters.exclude[MissingClassProblem]("scala.collection.mutable.CheckedIndexedSeqView$Prepended"), // private[mutable] - ProblemFilters.exclude[MissingClassProblem]("scala.collection.mutable.CheckedIndexedSeqView$Concat"), // private[mutable] - ProblemFilters.exclude[MissingClassProblem]("scala.collection.mutable.CheckedIndexedSeqView$Take"), // private[mutable] - ProblemFilters.exclude[MissingClassProblem]("scala.collection.mutable.CheckedIndexedSeqView$TakeRight"), // private[mutable] - ProblemFilters.exclude[MissingClassProblem]("scala.collection.mutable.CheckedIndexedSeqView$Drop"), // private[mutable] - ProblemFilters.exclude[MissingClassProblem]("scala.collection.mutable.CheckedIndexedSeqView$DropRight"), // private[mutable] - ProblemFilters.exclude[MissingClassProblem](s"scala.collection.mutable.CheckedIndexedSeqView$$Map"), // private[mutable] - ProblemFilters.exclude[MissingClassProblem]("scala.collection.mutable.CheckedIndexedSeqView$Reverse"), // private[mutable] - ProblemFilters.exclude[MissingClassProblem]("scala.collection.mutable.CheckedIndexedSeqView$Slice"), // private[mutable] - - // #8835 - ProblemFilters.exclude[ReversedMissingMethodProblem]("scala.reflect.runtime.SynchronizedOps#SynchronizedBaseTypeSeq.scala$reflect$runtime$SynchronizedOps$SynchronizedBaseTypeSeq$$super$maxDepthOfElems"), - - // this is an internal class and adding a final override here should not be a problem - ProblemFilters.exclude[FinalMethodProblem]("scala.concurrent.impl.Promise#DefaultPromise.zipWith"), - - // private[scala] Internal API - ProblemFilters.exclude[IncompatibleMethTypeProblem]("scala.reflect.io.FileZipArchive#LeakyEntry.this"), - ProblemFilters.exclude[IncompatibleMethTypeProblem]("scala.reflect.io.FileZipArchive#LeakyEntry.this"), - ProblemFilters.exclude[MissingClassProblem]("scala.reflect.io.FileZipArchive$zipFilePool$"), - - // #9727 - ProblemFilters.exclude[DirectMissingMethodProblem]("scala.collection.concurrent.TrieMap.filterInPlaceImpl"), // private[collection] - ProblemFilters.exclude[DirectMissingMethodProblem]("scala.collection.concurrent.TrieMap.mapValuesInPlaceImpl"), // private[collection] - ProblemFilters.exclude[DirectMissingMethodProblem]("scala.collection.convert.JavaCollectionWrappers#JConcurrentMapWrapper.filterInPlaceImpl"), // private[collection] - ProblemFilters.exclude[DirectMissingMethodProblem]("scala.collection.convert.JavaCollectionWrappers#JConcurrentMapWrapper.mapValuesInPlaceImpl"), // private[collection] - - // #9733 - ProblemFilters.exclude[MissingClassProblem]("scala.collection.concurrent.TrieMap$RemovalPolicy$"), // private[concurrent] - ProblemFilters.exclude[DirectMissingMethodProblem]("scala.collection.convert.JavaCollectionWrappers#JConcurrentMapWrapper.removeRefEq"), // private[collection] - ProblemFilters.exclude[DirectMissingMethodProblem]("scala.collection.convert.JavaCollectionWrappers#JConcurrentMapWrapper.replaceRefEq"), // private[collection] - - // #9741 - ProblemFilters.exclude[MissingClassProblem]("scala.collection.immutable.SeqMap$SeqMapBuilderImpl"), // private[SeqMap] - - // #9752 - ProblemFilters.exclude[MissingTypesProblem]("scala.reflect.ClassTag$cache$"), - ProblemFilters.exclude[MissingClassProblem]("scala.runtime.ModuleSerializationProxy$"), - ProblemFilters.exclude[MissingTypesProblem]("scala.reflect.runtime.JavaMirrors$JavaMirror$typeTagCache$"), - ProblemFilters.exclude[MissingClassProblem]("scala.runtime.ClassValueCompat"), - ProblemFilters.exclude[MissingClassProblem]("scala.runtime.ClassValueCompat$"), - ProblemFilters.exclude[MissingClassProblem]("scala.runtime.ClassValueCompat$ClassValueInterface"), - ProblemFilters.exclude[MissingClassProblem]("scala.runtime.ClassValueCompat$JavaClassValue"), - ProblemFilters.exclude[MissingClassProblem]("scala.runtime.ClassValueCompat$FallbackClassValue"), ) override val buildSettings = Seq( diff --git a/versions.properties b/versions.properties index d2856613410d..7e64c1f39d9d 100644 --- a/versions.properties +++ b/versions.properties @@ -1,5 +1,5 @@ # Scala version used for bootstrapping (see README.md) -starr.version=2.13.6 +starr.version=2.13.7 # These are the versions of the modules that go with this release. # Artifact dependencies: From 5035461f0f524f59034bb81271a292f7c4f46976 Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Mon, 1 Nov 2021 17:01:15 -0700 Subject: [PATCH 432/769] Fix slice of seq view --- src/library/scala/collection/IndexedSeqView.scala | 14 ++++++++------ .../scala/collection/IndexedSeqViewTest.scala | 5 +++++ 2 files changed, 13 insertions(+), 6 deletions(-) diff --git a/src/library/scala/collection/IndexedSeqView.scala b/src/library/scala/collection/IndexedSeqView.scala index 692486b1e088..f3bc4b074b5e 100644 --- a/src/library/scala/collection/IndexedSeqView.scala +++ b/src/library/scala/collection/IndexedSeqView.scala @@ -105,12 +105,14 @@ object IndexedSeqView { } override def sliceIterator(from: Int, until: Int): Iterator[A] = { - val startCutoff = pos - val untilCutoff = startCutoff - remainder + 1 - val nextStartCutoff = if (from < 0) startCutoff else if (startCutoff - from < 0) 0 else startCutoff - from - val nextUntilCutoff = if (until < 0) startCutoff else if (startCutoff - until < untilCutoff) untilCutoff else startCutoff - until + 1 - remainder = Math.max(0, nextStartCutoff - nextUntilCutoff + 1) - pos = nextStartCutoff + if (_hasNext) { + val startCutoff = pos + val untilCutoff = startCutoff - remainder + 1 + val nextStartCutoff = if (from < 0) startCutoff else if (startCutoff - from < 0) 0 else startCutoff - from + val nextUntilCutoff = if (until < 0) startCutoff else if (startCutoff - until < untilCutoff) untilCutoff else startCutoff - until + 1 + remainder = Math.max(0, nextStartCutoff - nextUntilCutoff + 1) + pos = nextStartCutoff + } this } } diff --git a/test/junit/scala/collection/IndexedSeqViewTest.scala b/test/junit/scala/collection/IndexedSeqViewTest.scala index 01858a17acb1..9f6485482034 100644 --- a/test/junit/scala/collection/IndexedSeqViewTest.scala +++ b/test/junit/scala/collection/IndexedSeqViewTest.scala @@ -18,4 +18,9 @@ class IndexedSeqViewTest { assertEquals(2, IndexedSeq(1, 2, 3, 4, 5).view.iterator.take(2).knownSize) assertEquals(2, IndexedSeq(1, 2, 3, 4, 5).view.iterator.slice(2, 4).knownSize) } + + @Test + def reverseEmptyIterator(): Unit = { + assertEquals(0, Vector.empty[Int].reverseIterator.take(1).toList.size) + } } From 4ece43311c66c13920493b87b56a9f8162f19f85 Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Mon, 1 Nov 2021 23:20:58 -0700 Subject: [PATCH 433/769] Restore lazy iterator.drop Fix incorrect internal usage of drop. Optimize slice of empty and single Iterator. Simplify reverse view iterator slice. --- .../scala/collection/IndexedSeqView.scala | 27 ++++++++----------- src/library/scala/collection/Iterator.scala | 19 ++++++------- .../collection/immutable/SmallMapTest.scala | 14 +++++----- 3 files changed, 26 insertions(+), 34 deletions(-) diff --git a/src/library/scala/collection/IndexedSeqView.scala b/src/library/scala/collection/IndexedSeqView.scala index f3bc4b074b5e..5be6388536b7 100644 --- a/src/library/scala/collection/IndexedSeqView.scala +++ b/src/library/scala/collection/IndexedSeqView.scala @@ -84,8 +84,8 @@ object IndexedSeqView { } @SerialVersionUID(3L) private[collection] class IndexedSeqViewReverseIterator[A](self: IndexedSeqView[A]) extends AbstractIterator[A] with Serializable { - private[this] var pos = self.length - 1 private[this] var remainder = self.length + private[this] var pos = remainder - 1 @inline private[this] def _hasNext: Boolean = remainder > 0 def hasNext: Boolean = _hasNext def next(): A = @@ -96,22 +96,17 @@ object IndexedSeqView { r } else Iterator.empty.next() - override def drop(n: Int): Iterator[A] = { - if (n > 0) { - pos -= n - remainder = Math.max(0, remainder - n) - } - this - } - - override def sliceIterator(from: Int, until: Int): Iterator[A] = { + override protected def sliceIterator(from: Int, until: Int): Iterator[A] = { if (_hasNext) { - val startCutoff = pos - val untilCutoff = startCutoff - remainder + 1 - val nextStartCutoff = if (from < 0) startCutoff else if (startCutoff - from < 0) 0 else startCutoff - from - val nextUntilCutoff = if (until < 0) startCutoff else if (startCutoff - until < untilCutoff) untilCutoff else startCutoff - until + 1 - remainder = Math.max(0, nextStartCutoff - nextUntilCutoff + 1) - pos = nextStartCutoff + if (remainder <= from) remainder = 0 + else if (from <= 0) { + if (until >= 0 && until < remainder) remainder = until + } + else { + pos = pos - from + if (until >= 0 && until < remainder) remainder = until - from + else remainder -= from + } } this } diff --git a/src/library/scala/collection/Iterator.scala b/src/library/scala/collection/Iterator.scala index 911ff34f1912..1970d3babb62 100644 --- a/src/library/scala/collection/Iterator.scala +++ b/src/library/scala/collection/Iterator.scala @@ -409,9 +409,9 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite def indexWhere(p: A => Boolean, from: Int = 0): Int = { var i = math.max(from, 0) - drop(from) - while (hasNext) { - if (p(next())) return i + val dropped = drop(from) + while (dropped.hasNext) { + if (p(dropped.next())) return i i += 1 } -1 @@ -635,14 +635,7 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite def next() = if (hasNext) { hdDefined = false; hd } else Iterator.empty.next() } - def drop(n: Int): Iterator[A] = { - var i = 0 - while (i < n && hasNext) { - next() - i += 1 - } - this - } + def drop(n: Int): Iterator[A] = sliceIterator(n, -1) def dropWhile(p: A => Boolean): Iterator[A] = new AbstractIterator[A] { // Magic value: -1 = hasn't dropped, 0 = found first, 1 = defer to parent iterator @@ -972,6 +965,7 @@ object Iterator extends IterableFactory[Iterator] { def hasNext = false def next() = throw new NoSuchElementException("next on empty iterator") override def knownSize: Int = 0 + override protected def sliceIterator(from: Int, until: Int) = this } /** Creates a target $coll from an existing source collection @@ -989,6 +983,9 @@ object Iterator extends IterableFactory[Iterator] { private[this] var consumed: Boolean = false def hasNext = !consumed def next() = if (consumed) empty.next() else { consumed = true; a } + override protected def sliceIterator(from: Int, until: Int) = + if (consumed || from > 0 || until == 0) empty + else this } override def apply[A](xs: A*): Iterator[A] = xs.iterator diff --git a/test/junit/scala/collection/immutable/SmallMapTest.scala b/test/junit/scala/collection/immutable/SmallMapTest.scala index 1c182276b29d..c6c278676f34 100644 --- a/test/junit/scala/collection/immutable/SmallMapTest.scala +++ b/test/junit/scala/collection/immutable/SmallMapTest.scala @@ -6,14 +6,14 @@ import org.junit._ import scala.tools.testkit.AllocationTest class SmallMapTest extends AllocationTest { - def iterator(m:Map[_,_]) = m.iterator - def keysIterator(m:Map[_,_]) = m.keysIterator - def valuesIterator(m:Map[_,_]) = m.valuesIterator + def iterator(m: Map[_,_]) = m.iterator + def keysIterator(m: Map[_,_]) = m.keysIterator + def valuesIterator(m: Map[_,_]) = m.valuesIterator - //we use this side effect to avoid the git optimising away the tuples - //but without affecting the allocations + // we use this side effect to avoid the jit optimising away the tuples + // but without affecting the allocations val nonAllocationResult = new Array[Any](5) - def consume(it:Iterator[_]): Int = { + def consume(it: Iterator[_]): Int = { var size = 0 nonAllocationResult(0) = it while (it.hasNext) { @@ -22,7 +22,7 @@ class SmallMapTest extends AllocationTest { } size } - def consume1(it:Iterator[_]): Int = { + def consume1(it: Iterator[_]): Int = { nonAllocationResult(0) = it nonAllocationResult(1) = it.next() 1 From efd93964c37f66462deced9c29ad00036102cb59 Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Tue, 2 Nov 2021 16:17:57 -0700 Subject: [PATCH 434/769] Add comments to sliceIterator --- src/library/scala/collection/IndexedSeqView.scala | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/src/library/scala/collection/IndexedSeqView.scala b/src/library/scala/collection/IndexedSeqView.scala index 5be6388536b7..0cedb283a6fc 100644 --- a/src/library/scala/collection/IndexedSeqView.scala +++ b/src/library/scala/collection/IndexedSeqView.scala @@ -96,16 +96,18 @@ object IndexedSeqView { r } else Iterator.empty.next() + // from < 0 means don't move pos, until < 0 means don't limit remainder + // override protected def sliceIterator(from: Int, until: Int): Iterator[A] = { if (_hasNext) { - if (remainder <= from) remainder = 0 - else if (from <= 0) { - if (until >= 0 && until < remainder) remainder = until + if (remainder <= from) remainder = 0 // exhausted by big skip + else if (from <= 0) { // no skip, pos is same + if (until >= 0 && until < remainder) remainder = until // ...limited by until } else { - pos = pos - from - if (until >= 0 && until < remainder) remainder = until - from - else remainder -= from + pos -= from // skip ahead + if (until >= 0 && until < remainder) remainder = until - from // ...limited by until, less the skip + else remainder -= from // ...otherwise just less the skip } } this From 60144fc247c42f7f6268403e4eec3896053b3eb4 Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Tue, 2 Nov 2021 16:42:10 -0700 Subject: [PATCH 435/769] Avoid negative remainder --- src/library/scala/collection/IndexedSeqView.scala | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/src/library/scala/collection/IndexedSeqView.scala b/src/library/scala/collection/IndexedSeqView.scala index 0cedb283a6fc..737f032d2060 100644 --- a/src/library/scala/collection/IndexedSeqView.scala +++ b/src/library/scala/collection/IndexedSeqView.scala @@ -106,7 +106,10 @@ object IndexedSeqView { } else { pos -= from // skip ahead - if (until >= 0 && until < remainder) remainder = until - from // ...limited by until, less the skip + if (until >= 0 && until < remainder) { // ...limited by until + if (until <= from) remainder = 0 // ...exhausted if limit is smaller than skip + else remainder = until - from // ...limited by until, less the skip + } else remainder -= from // ...otherwise just less the skip } } From 27213177242af4ad354f0aab499b2969cc0b4487 Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Fri, 5 Nov 2021 09:31:59 -0700 Subject: [PATCH 436/769] Stack popAll preserves order --- src/library/scala/collection/mutable/Stack.scala | 2 +- test/junit/scala/collection/mutable/StackTest.scala | 7 +++++++ 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/src/library/scala/collection/mutable/Stack.scala b/src/library/scala/collection/mutable/Stack.scala index 1ef701fa2b61..008822871a72 100644 --- a/src/library/scala/collection/mutable/Stack.scala +++ b/src/library/scala/collection/mutable/Stack.scala @@ -91,7 +91,7 @@ class Stack[A] protected (array: Array[AnyRef], start: Int, end: Int) * * @return The removed elements */ - def popAll(): scala.collection.Seq[A] = removeAllReverse() + def popAll(): scala.collection.Seq[A] = removeAll() /** * Returns and removes all elements from the top of this stack which satisfy the given predicate diff --git a/test/junit/scala/collection/mutable/StackTest.scala b/test/junit/scala/collection/mutable/StackTest.scala index 5576a569b37d..af2e2ecdb38a 100644 --- a/test/junit/scala/collection/mutable/StackTest.scala +++ b/test/junit/scala/collection/mutable/StackTest.scala @@ -24,4 +24,11 @@ class StackTest { @Test def sliding(): Unit = ArrayDequeTest.genericSlidingTest(Stack, "Stack") + + @Test def `popAll preserves iteration order`: Unit = { + val stack = Stack.from(1 to 10) + val list = stack.toList + assertEquals(list, stack.popAll()) + assertTrue(stack.isEmpty) + } } From 72d175bd5d7d1ebe9fc34a90ff718dfff09c7569 Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Fri, 5 Nov 2021 09:59:46 -0700 Subject: [PATCH 437/769] Document stack preserves iteration order --- .../scala/collection/mutable/Stack.scala | 26 +++++++++++-------- .../scala/collection/mutable/StackTest.scala | 7 +++++ 2 files changed, 22 insertions(+), 11 deletions(-) diff --git a/src/library/scala/collection/mutable/Stack.scala b/src/library/scala/collection/mutable/Stack.scala index 008822871a72..21e442ac9f92 100644 --- a/src/library/scala/collection/mutable/Stack.scala +++ b/src/library/scala/collection/mutable/Stack.scala @@ -17,17 +17,21 @@ import scala.collection.generic.DefaultSerializable import scala.collection.{IterableFactoryDefaults, IterableOnce, SeqFactory, StrictOptimizedSeqFactory, StrictOptimizedSeqOps} /** A stack implements a data structure which allows to store and retrieve - * objects in a last-in-first-out (LIFO) fashion. - * - * @tparam A type of the elements contained in this stack. - * - * @define Coll `Stack` - * @define coll stack - * @define orderDependent - * @define orderDependentFold - * @define mayNotTerminateInf - * @define willNotTerminateInf - */ + * objects in a last-in-first-out (LIFO) fashion. + * + * Note that operations which consume and produce iterables preserve order, + * rather than reversing it (as would be expected from building a new stack + * by pushing an element at a time). + * + * @tparam A type of the elements contained in this stack. + * + * @define Coll `Stack` + * @define coll stack + * @define orderDependent + * @define orderDependentFold + * @define mayNotTerminateInf + * @define willNotTerminateInf + */ @migration("Stack is now based on an ArrayDeque instead of a linked list", "2.13.0") class Stack[A] protected (array: Array[AnyRef], start: Int, end: Int) extends ArrayDeque[A](array, start, end) diff --git a/test/junit/scala/collection/mutable/StackTest.scala b/test/junit/scala/collection/mutable/StackTest.scala index af2e2ecdb38a..a5352c85e82e 100644 --- a/test/junit/scala/collection/mutable/StackTest.scala +++ b/test/junit/scala/collection/mutable/StackTest.scala @@ -31,4 +31,11 @@ class StackTest { assertEquals(list, stack.popAll()) assertTrue(stack.isEmpty) } + + @Test def `popWhile preserves iteration order`: Unit = { + val stack = Stack.tabulate(10)(_ * 10) + val list = stack.toList.take(5) + assertEquals(list, stack.popWhile(_ < 50)) + assertEquals(5, stack.size) + } } From 8f909f5ec3293749870d293f6c94847e308b7a35 Mon Sep 17 00:00:00 2001 From: Martijn Hoekstra Date: Sat, 6 Nov 2021 20:02:24 +0100 Subject: [PATCH 438/769] cleanup documentation for ordering link and format the code --- src/library/scala/math/Ordering.scala | 25 +++++++++++++------------ 1 file changed, 13 insertions(+), 12 deletions(-) diff --git a/src/library/scala/math/Ordering.scala b/src/library/scala/math/Ordering.scala index a7756b9f8638..8333cc52cf77 100644 --- a/src/library/scala/math/Ordering.scala +++ b/src/library/scala/math/Ordering.scala @@ -22,10 +22,10 @@ import scala.annotation.migration * instances of a type. * * Ordering's companion object defines many implicit objects to deal with - * subtypes of AnyVal (e.g. Int, Double), String, and others. + * subtypes of [[AnyVal]] (e.g. `Int`, `Double`), `String`, and others. * * To sort instances by one or more member variables, you can take advantage - * of these built-in orderings using Ordering.by and Ordering.on: + * of these built-in orderings using [[Ordering.by]] and [[Ordering.on]]: * * {{{ * import scala.util.Sorting @@ -38,9 +38,10 @@ import scala.annotation.migration * Sorting.quickSort(pairs)(Ordering[(Int, String)].on(x => (x._3, x._1))) * }}} * - * An Ordering[T] is implemented by specifying compare(a:T, b:T), which - * decides how to order two instances a and b. Instances of Ordering[T] can be - * used by things like scala.util.Sorting to sort collections like Array[T]. + * An `Ordering[T]` is implemented by specifying the [[compare]] method, + * `compare(a: T, b: T): Int`, which decides how to order two instances + * `a` and `b`. Instances of `Ordering[T]` can be used by things like + * `scala.util.Sorting` to sort collections like `Array[T]`. * * For example: * @@ -52,21 +53,21 @@ import scala.annotation.migration * * // sort by age * object AgeOrdering extends Ordering[Person] { - * def compare(a:Person, b:Person) = a.age compare b.age + * def compare(a:Person, b:Person) = a.age.compare(b.age) * } * Sorting.quickSort(people)(AgeOrdering) * }}} * - * This trait and scala.math.Ordered both provide this same functionality, but - * in different ways. A type T can be given a single way to order itself by - * extending Ordered. Using Ordering, this same type may be sorted in many - * other ways. Ordered and Ordering both provide implicits allowing them to be + * This trait and [[scala.math.Ordered]] both provide this same functionality, but + * in different ways. A type `T` can be given a single way to order itself by + * extending `Ordered`. Using `Ordering`, this same type may be sorted in many + * other ways. `Ordered` and `Ordering` both provide implicits allowing them to be * used interchangeably. * - * You can import scala.math.Ordering.Implicits to gain access to other + * You can `import scala.math.Ordering.Implicits._` to gain access to other * implicit orderings. * - * @see [[scala.math.Ordered]], [[scala.util.Sorting]] + * @see [[scala.math.Ordered]], [[scala.util.Sorting]], [[scala.math.Ordering.Implicits]] */ @annotation.implicitNotFound(msg = "No implicit Ordering defined for ${T}.") trait Ordering[T] extends Comparator[T] with PartialOrdering[T] with Serializable { From d97a0a336be2981b46543633490a9e467ebf489c Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Fri, 5 Nov 2021 09:28:14 -0700 Subject: [PATCH 439/769] Improve supplementary char support Precedence uses codepoint when probing lead char. Scanner accepts supplementary chars in more places, such as op_Supple, Supple"interp", s"$Supple". --- spec/01-lexical-syntax.md | 8 +- .../scala/tools/nsc/ast/parser/Scanners.scala | 107 +++++++++--------- .../scala/reflect/internal/Chars.scala | 46 +++++--- .../scala/reflect/internal/Precedence.scala | 17 ++- test/files/run/t1406.scala | 40 ++++++- test/files/run/t1406b.check | 15 ++- test/files/run/t1406b.scala | 39 +++---- 7 files changed, 163 insertions(+), 109 deletions(-) diff --git a/spec/01-lexical-syntax.md b/spec/01-lexical-syntax.md index 3dbed39d6806..005756b9cd1f 100644 --- a/spec/01-lexical-syntax.md +++ b/spec/01-lexical-syntax.md @@ -506,7 +506,7 @@ interpolatedString ::= alphaid ‘"’ {[‘\’] interpolatedStringPart | interpolatedStringPart ::= printableChar \ (‘"’ | ‘$’ | ‘\’) | escape escape ::= ‘$$’ | ‘$"’ - | ‘$’ id + | ‘$’ alphaid | ‘$’ BlockExpr alphaid ::= upper idrest | varid @@ -533,9 +533,9 @@ in an interpolated string. A single ‘$’-sign can still be obtained by doubli character: ‘$$’. A single ‘"’-sign can be obtained by the sequence ‘\$"’. The simpler form consists of a ‘$’-sign followed by an identifier starting with -a letter and followed only by letters, digits, and underscore characters, -e.g `$id`. The simpler form is expanded by putting braces around the identifier, -e.g `$id` is equivalent to `${id}`. In the following, unless we explicitly state otherwise, +a letter and followed only by letters, digits, and underscore characters, e.g., `$id`. +The simpler form is expanded by putting braces around the identifier, +e.g., `$id` is equivalent to `${id}`. In the following, unless we explicitly state otherwise, we assume that this expansion has already been performed. The expanded expression is type checked normally. Usually, `StringContext` will resolve to diff --git a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala index 8010fd2756a0..a55e39f70608 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala @@ -182,22 +182,26 @@ trait Scanners extends ScannersCommon { private def isSupplementary(high: Char, test: Int => Boolean, strict: Boolean = true): Boolean = isHighSurrogate(high) && { var res = false - nextChar() - val low = ch + val low = lookaheadReader.getc() if (isLowSurrogate(low)) { - nextChar() - val codepoint = toCodePoint(high, low) - if (isValidCodePoint(codepoint) && test(codepoint)) { - putChar(high) - putChar(low) - res = true - } else - syntaxError(f"illegal character '\\u$high%04x\\u$low%04x'") - } else if (!strict) { + val codePoint = toCodePoint(high, low) + if (isValidCodePoint(codePoint)) { + if (test(codePoint)) { + putChar(high) + putChar(low) + nextChar() + nextChar() + res = true + } + } + else syntaxError(f"illegal character '\\u$high%04x\\u$low%04x'") + } + else if (!strict) { putChar(high) + nextChar() res = true - } else - syntaxError(f"illegal character '\\u$high%04x' missing low surrogate") + } + else syntaxError(f"illegal character '\\u$high%04x' missing low surrogate") res } private def atSupplementary(ch: Char, f: Int => Boolean): Boolean = @@ -621,8 +625,7 @@ trait Scanners extends ScannersCommon { putChar(ch) nextChar() getIdentRest() - if (ch == '"' && token == IDENTIFIER) - token = INTERPOLATIONID + if (ch == '"' && token == IDENTIFIER) token = INTERPOLATIONID case '<' => // is XMLSTART? def fetchLT() = { val last = if (charOffset >= 2) buf(charOffset - 2) else ' ' @@ -729,12 +732,31 @@ trait Scanners extends ScannersCommon { } syntaxError(msg) } + /** Either at closing quote of charlit + * or run the op and take it as a (deprecated) Symbol identifier. + */ + def charLitOrSymbolAfter(op: () => Unit): Unit = + if (ch == '\'') { + nextChar() + token = CHARLIT + setStrVal() + } else { + op() + token = SYMBOLLIT + strVal = name.toString + } def fetchSingleQuote() = { nextChar() - if (isIdentifierStart(ch)) - charLitOr(() => getIdentRest()) - else if (isOperatorPart(ch) && (ch != '\\')) - charLitOr(() => getOperatorRest()) + if (isIdentifierStart(ch)) { + putChar(ch) + nextChar() + charLitOrSymbolAfter(() => getIdentRest()) + } + else if (isOperatorPart(ch) && (ch != '\\')) { + putChar(ch) + nextChar() + charLitOrSymbolAfter(() => getOperatorRest()) + } else if (!isAtEnd && (ch != SU && ch != CR && ch != LF)) { val isEmptyCharLit = (ch == '\'') getLitChar() @@ -801,12 +823,16 @@ trait Scanners extends ScannersCommon { putChar(ch) nextChar() getIdentRest() + if (ch == '"' && token == IDENTIFIER) token = INTERPOLATIONID } else if (isSpecial(ch)) { putChar(ch) nextChar() getOperatorRest() } else if (isSupplementary(ch, isUnicodeIdentifierStart)) { getIdentRest() + if (ch == '"' && token == IDENTIFIER) token = INTERPOLATIONID + } else if (isSupplementary(ch, isSpecial)) { + getOperatorRest() } else { syntaxError(f"illegal character '\\u$ch%04x'") nextChar() @@ -872,7 +898,8 @@ trait Scanners extends ScannersCommon { putChar(ch) nextChar() getIdentOrOperatorRest() - case SU => // strangely enough, Character.isUnicodeIdentifierPart(SU) returns true! + case ' ' | LF | // optimize for common whitespace + SU => // strangely enough, Character.isUnicodeIdentifierPart(SU) returns true! finishNamed() case _ => if (isUnicodeIdentifierPart(ch)) { @@ -888,6 +915,7 @@ trait Scanners extends ScannersCommon { @tailrec private def getOperatorRest(): Unit = (ch: @switch) match { + case ' ' | LF => finishNamed() // optimize case '~' | '!' | '@' | '#' | '%' | '^' | '*' | '+' | '-' | '<' | '>' | '?' | ':' | '=' | '&' | @@ -899,24 +927,12 @@ trait Scanners extends ScannersCommon { else { putChar('/'); getOperatorRest() } case _ => if (isSpecial(ch)) { putChar(ch); nextChar(); getOperatorRest() } + else if (isSupplementary(ch, isSpecial)) getOperatorRest() else finishNamed() } - private def getIdentOrOperatorRest(): Unit = { - if (isIdentifierPart(ch)) - getIdentRest() - else ch match { - case '~' | '!' | '@' | '#' | '%' | - '^' | '*' | '+' | '-' | '<' | - '>' | '?' | ':' | '=' | '&' | - '|' | '\\' | '/' => - getOperatorRest() - case _ => - if (isSpecial(ch)) getOperatorRest() - else finishNamed() - } - } - + private def getIdentOrOperatorRest(): Unit = + if (isIdentifierPart(ch) || isSupplementary(ch, isIdentifierPart)) getIdentRest() else getOperatorRest() // Literals ----------------------------------------------------------------- @@ -1040,10 +1056,6 @@ trait Scanners extends ScannersCommon { getInterpolatedIdentRest() } else if (atSupplementary(ch, isUnicodeIdentifierStart)) { finishStringPart() - putChar(ch) - nextRawChar() - putChar(ch) - nextRawChar() getInterpolatedIdentRest() } else { val expectations = "$$, $\", $identifier or ${expression}" @@ -1370,23 +1382,6 @@ trait Scanners extends ScannersCommon { if (detectedFloat) restOfNonIntegralNumber() else restOfNumber() } - /** Parse character literal if current character is followed by \', - * or follow with given op and return a symbol literal token - */ - def charLitOr(op: () => Unit): Unit = { - putChar(ch) - nextChar() - if (ch == '\'') { - nextChar() - token = CHARLIT - setStrVal() - } else { - op() - token = SYMBOLLIT - strVal = name.toString - } - } - // Errors ----------------------------------------------------------------- /** generate an error at the given offset */ diff --git a/src/reflect/scala/reflect/internal/Chars.scala b/src/reflect/scala/reflect/internal/Chars.scala index d34651078f4b..19e7722a985b 100644 --- a/src/reflect/scala/reflect/internal/Chars.scala +++ b/src/reflect/scala/reflect/internal/Chars.scala @@ -15,10 +15,10 @@ package reflect package internal import scala.annotation.switch -import java.lang.{ Character => JCharacter } /** Contains constants and classifier methods for characters */ trait Chars { + import Chars.CodePoint // Be very careful touching these. // Apparently trivial changes to the way you write these constants // will cause Scanners.scala to go from a nice efficient switch to @@ -72,28 +72,46 @@ trait Chars { '0' <= c && c <= '9' || 'A' <= c && c <= 'Z' || 'a' <= c && c <= 'z' /** Can character start an alphanumeric Scala identifier? */ - def isIdentifierStart(c: Char): Boolean = - (c == '_') || (c == '$') || Character.isUnicodeIdentifierStart(c) + def isIdentifierStart(c: Char): Boolean = (c == '_') || (c == '$') || Character.isUnicodeIdentifierStart(c) + def isIdentifierStart(c: CodePoint): Boolean = (c == '_') || (c == '$') || Character.isUnicodeIdentifierStart(c) /** Can character form part of an alphanumeric Scala identifier? */ - def isIdentifierPart(c: Char) = - (c == '$') || Character.isUnicodeIdentifierPart(c) + def isIdentifierPart(c: Char) = (c == '$') || Character.isUnicodeIdentifierPart(c) + + def isIdentifierPart(c: CodePoint) = (c == '$') || Character.isUnicodeIdentifierPart(c) /** Is character a math or other symbol in Unicode? */ def isSpecial(c: Char) = { val chtp = Character.getType(c) chtp == Character.MATH_SYMBOL.toInt || chtp == Character.OTHER_SYMBOL.toInt } - - private final val otherLetters = Set[Char]('\u0024', '\u005F') // '$' and '_' - private final val letterGroups = { - import JCharacter._ - Set[Byte](LOWERCASE_LETTER, UPPERCASE_LETTER, OTHER_LETTER, TITLECASE_LETTER, LETTER_NUMBER) + def isSpecial(codePoint: CodePoint) = { + val chtp = Character.getType(codePoint) + chtp == Character.MATH_SYMBOL.toInt || chtp == Character.OTHER_SYMBOL.toInt } - def isScalaLetter(ch: Char) = letterGroups(JCharacter.getType(ch).toByte) || otherLetters(ch) + + // used for precedence + import Character.{LOWERCASE_LETTER, UPPERCASE_LETTER, OTHER_LETTER, TITLECASE_LETTER, LETTER_NUMBER} + def isScalaLetter(c: Char): Boolean = + Character.getType(c) match { + case LOWERCASE_LETTER | UPPERCASE_LETTER | OTHER_LETTER | TITLECASE_LETTER | LETTER_NUMBER => true + case _ => c == '$' || c == '_' + } + def isScalaLetter(c: CodePoint): Boolean = + Character.getType(c) match { + case LOWERCASE_LETTER | UPPERCASE_LETTER | OTHER_LETTER | TITLECASE_LETTER | LETTER_NUMBER => true + case _ => c == '$' || c == '_' + } /** Can character form part of a Scala operator name? */ - def isOperatorPart(c : Char) : Boolean = (c: @switch) match { + def isOperatorPart(c: Char): Boolean = (c: @switch) match { + case '~' | '!' | '@' | '#' | '%' | + '^' | '*' | '+' | '-' | '<' | + '>' | '?' | ':' | '=' | '&' | + '|' | '/' | '\\' => true + case c => isSpecial(c) + } + def isOperatorPart(c: CodePoint): Boolean = (c: @switch) match { case '~' | '!' | '@' | '#' | '%' | '^' | '*' | '+' | '-' | '<' | '>' | '?' | ':' | '=' | '&' | @@ -102,4 +120,6 @@ trait Chars { } } -object Chars extends Chars { } +object Chars extends Chars { + type CodePoint = Int +} diff --git a/src/reflect/scala/reflect/internal/Precedence.scala b/src/reflect/scala/reflect/internal/Precedence.scala index f63abd3d2f8f..0df567a7c3fa 100644 --- a/src/reflect/scala/reflect/internal/Precedence.scala +++ b/src/reflect/scala/reflect/internal/Precedence.scala @@ -10,26 +10,23 @@ * additional information regarding copyright ownership. */ -package scala -package reflect -package internal +package scala.reflect.internal import scala.annotation.switch -import Chars._ +import Chars.{CodePoint, isOperatorPart, isScalaLetter} final class Precedence private (val level: Int) extends AnyVal with Ordered[Precedence] { - def compare(that: Precedence): Int = level compare that.level + def compare(that: Precedence): Int = level.compare(that.level) override def toString = s"Precedence($level)" } - object Precedence extends (Int => Precedence) { private[this] val ErrorName = "" private def isAssignmentOp(name: String) = name match { case "!=" | "<=" | ">=" | "" => false - case _ => name.last == '=' && name.head != '=' && isOperatorPart(name.head) + case _ => name.last == '=' && name.head != '=' && isOperatorPart(name.codePointAt(0)) } - private def firstChar(ch: Char): Precedence = apply((ch: @switch) match { + private def firstChar(c: CodePoint): Precedence = apply((c: @switch) match { case '|' => 2 case '^' => 3 case '&' => 4 @@ -38,13 +35,13 @@ object Precedence extends (Int => Precedence) { case ':' => 7 case '+' | '-' => 8 case '*' | '/' | '%' => 9 - case _ => if (isScalaLetter(ch)) 1 else 10 + case _ => if (isScalaLetter(c)) 1 else 10 }) def apply(level: Int): Precedence = new Precedence(level) def apply(name: String): Precedence = name match { case "" | ErrorName => this(-1) case _ if isAssignmentOp(name) => this(0) - case _ => firstChar(name charAt 0) + case _ => firstChar(name.codePointAt(0)) } } diff --git a/test/files/run/t1406.scala b/test/files/run/t1406.scala index c027771716a8..8089e97bc909 100644 --- a/test/files/run/t1406.scala +++ b/test/files/run/t1406.scala @@ -9,8 +9,25 @@ object Test extends DirectTest { // \u10428 isLetter and isLowerCase def U2 = "\ud801" def U3 = "\udc28" + // symbol operator So with supplementary char + def U4 = "\ud834" + def U5 = "\udd97" + // cyclone 1f300 + def U6 = "\ud83c" + def U7 = "\udf00" + // rocket 1f680 + def U8 = "\ud83d" + def U9 = "\ude80" + // quintessence 1f700 + def UA = "\ud83d" + def UB = "\udf00" + + // 1d4c5 Mathematical Script Small P + def UC = "\ud835" + def UD = "\udcc5" + def code = - s"""class C { + s"""class Identifiers { | def x = "$U0" | def y = "$U1" | def `$U0` = x @@ -23,6 +40,27 @@ object Test extends DirectTest { | def g(x: Any) = x match { | case $U2$U3 @ _ => $U2$U3 | } + |} + |class Ops { + | def $U4$U5 = 42 // was error: illegal character + | def op_$U4$U5 = 42 // was error: illegal character + | def $U6$U7 = 42 + | def op_$U6$U7 = 42 + | def $U8$U9 = 42 + | def op_$U8$U9 = 42 + | def $UA$UB = 42 + | def op_$UA$UB = 42 + | def $UC$UD = 42 + | def op_$UC$UD = 42 + |} + |class Strings { + | implicit class Interps(sc: StringContext) { + | def $UC$UD(parts: Any*) = "done" + | } + | def $UC$UD = 42 + | def interpolated = s"$$$UC$UD" + | def e = "a $UC$UD b" + | def f = $UC$UD"one" |}""".stripMargin def show(): Unit = { diff --git a/test/files/run/t1406b.check b/test/files/run/t1406b.check index 407e44adf89d..50a0e9217169 100644 --- a/test/files/run/t1406b.check +++ b/test/files/run/t1406b.check @@ -1,6 +1,9 @@ -newSource1.scala:4: error: illegal character '\ud801' missing low surrogate - def ? = x - ^ -newSource1.scala:5: error: illegal character '\udc00' - def ? = y - ^ +C(84) +C(1764) +C(1764) +C(1806) +C(1806) +C(3528) +C(3528) +C(1806) +C(3528) diff --git a/test/files/run/t1406b.scala b/test/files/run/t1406b.scala index bd1868a642fb..ff16cd296478 100644 --- a/test/files/run/t1406b.scala +++ b/test/files/run/t1406b.scala @@ -1,22 +1,23 @@ -import scala.tools.partest.DirectTest - -object Test extends DirectTest { - // for reference, UTF-8 of U0 - //val data = Array(0xed, 0xa0, 0x81).map(_.asInstanceOf[Byte]) - def U0 = "\ud801" - def U1 = "\udc00" - def code = - s"""class C { - | def x = "$U0" - | def y = "$U1" - | def $U0 = x - | def $U1 = y - |}""".stripMargin - - def show(): Unit = { - assert(U0.length == 1) - assert(!compile()) - } +case class C(n: Int) { + def 𐀀(c: C): C = C(n * c.n) // actually a letter but supplementary 0x10000 + def ☀(c: C): C = C(n * c.n) // just a symbol + def ☀=(c: C): C = C(n * c.n) // just a symbol + def 🌀(c: C): C = C(n * c.n) // cyclone operator is symbol, supplementary + def 🌀=(c: C): C = C(n * c.n) // cyclone operator is symbol, supplementary + def *(c: C): C = C(n * c.n) + def +(c: C): C = C(n + c.n) +} +object Test extends App { + val c, d = C(42) + println(c + d) + println(c * d) + println(c ☀ d) + println(c * d + d) + println(c ☀ d + d) + println(c ☀= d + d) // assignment op is low precedence + println(c 𐀀 d + d) // the first one, letter should be low precedence + println(c 🌀d + d) // the second one, cyclone should be high precedence + println(c 🌀= d + d) // the second one, cyclone should be high precedence } From 950b90eb35cc52a988bfa34ae203eb52356b93f1 Mon Sep 17 00:00:00 2001 From: Georgi Krastev Date: Mon, 8 Nov 2021 20:16:31 +0100 Subject: [PATCH 440/769] RefCheck types uniformly - handle existentials and annotations - All existentially bound skolems are replaced with wildcards - Annotation types are checked deeply - Nesting of `@uncheckedBounds` is handled properly --- .../tools/nsc/typechecker/RefChecks.scala | 93 +++++++++---------- test/files/neg/ref-checks.check | 7 ++ test/files/neg/ref-checks.scala | 10 ++ test/files/run/t12481.check | 2 + test/files/run/t12481.scala | 6 ++ 5 files changed, 71 insertions(+), 47 deletions(-) create mode 100644 test/files/neg/ref-checks.check create mode 100644 test/files/neg/ref-checks.scala create mode 100644 test/files/run/t12481.check create mode 100644 test/files/run/t12481.scala diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala index 8d524d8f5d00..023f7b36a173 100644 --- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala +++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala @@ -1407,32 +1407,53 @@ abstract class RefChecks extends Transform { false } - private def checkTypeRef(tp: Type, tree: Tree, skipBounds: Boolean): Unit = tp match { - case TypeRef(pre, sym, args) => - tree match { - case tt: TypeTree if tt.original == null => // scala/bug#7783 don't warn about inferred types - // FIXME: reconcile this check with one in resetAttrs - case _ => checkUndesiredProperties(sym, tree.pos) + private object RefCheckTypeMap extends TypeMap { + object ExistentialToWildcard extends TypeMap { + override def apply(tpe: Type): Type = + if (tpe.typeSymbol.isExistential) WildcardType else tpe.mapOver(this) + } + + private[this] var skipBounds = false + private[this] var tree: Tree = EmptyTree + + def check(tpe: Type, tree: Tree): Type = { + this.tree = tree + try apply(tpe) finally { + skipBounds = false + this.tree = EmptyTree } - if (sym.isJavaDefined) - sym.typeParams foreach (_.cookJavaRawInfo()) - if (!tp.isHigherKinded && !skipBounds) - checkBounds(tree, pre, sym.owner, sym.typeParams, args) - case _ => - } + } - private def checkTypeRefBounds(tp: Type, tree: Tree) = { - var skipBounds = false - tp match { - case AnnotatedType(ann :: Nil, underlying) if ann.symbol == UncheckedBoundsClass => + // check all bounds, except those that are existential type parameters + // or those within typed annotated with @uncheckedBounds + override def apply(tpe: Type): Type = tpe match { + case tpe: AnnotatedType if tpe.hasAnnotation(UncheckedBoundsClass) => + // scala/bug#7694 Allow code synthesizers to disable checking of bounds for TypeTrees based on inferred LUBs + // which might not conform to the constraints. + val savedSkipBounds = skipBounds skipBounds = true - underlying + try tpe.mapOver(this).filterAnnotations(_.symbol != UncheckedBoundsClass) + finally skipBounds = savedSkipBounds + case tpe: TypeRef => + checkTypeRef(ExistentialToWildcard(tpe)) + tpe.mapOver(this) + case tpe => + tpe.mapOver(this) + } + + private def checkTypeRef(tpe: Type): Unit = tpe match { case TypeRef(pre, sym, args) => - if (!tp.isHigherKinded && !skipBounds) + tree match { + // scala/bug#7783 don't warn about inferred types + // FIXME: reconcile this check with one in resetAttrs + case tree: TypeTree if tree.original == null => + case tree => checkUndesiredProperties(sym, tree.pos) + } + if (sym.isJavaDefined) + sym.typeParams.foreach(_.cookJavaRawInfo()) + if (!tpe.isHigherKinded && !skipBounds) checkBounds(tree, pre, sym.owner, sym.typeParams, args) - tp case _ => - tp } } @@ -1449,8 +1470,7 @@ abstract class RefChecks extends Transform { def applyChecks(annots: List[AnnotationInfo]): List[AnnotationInfo] = if (annots.isEmpty) Nil else { annots.foreach { ann => checkVarArgs(ann.atp, tree) - checkTypeRef(ann.atp, tree, skipBounds = false) - checkTypeRefBounds(ann.atp, tree) + RefCheckTypeMap.check(ann.atp, tree) if (ann.original != null && ann.original.hasExistingSymbol) checkUndesiredProperties(ann.original.symbol, tree.pos) } @@ -1755,29 +1775,8 @@ abstract class RefChecks extends Transform { } } - val existentialParams = new ListBuffer[Symbol] - var skipBounds = false - // check all bounds, except those that are existential type parameters - // or those within typed annotated with @uncheckedBounds - if (!inPattern) tree.tpe foreach { - case tp @ ExistentialType(tparams, tpe) => - existentialParams ++= tparams - case ann: AnnotatedType if ann.hasAnnotation(UncheckedBoundsClass) => - // scala/bug#7694 Allow code synthesizers to disable checking of bounds for TypeTrees based on inferred LUBs - // which might not conform to the constraints. - skipBounds = true - case tp: TypeRef => - val tpWithWildcards = deriveTypeWithWildcards(existentialParams.toList)(tp) - checkTypeRef(tpWithWildcards, tree, skipBounds) - case _ => - } - if (skipBounds) { - tree.setType(tree.tpe.map { - _.filterAnnotations(_.symbol != UncheckedBoundsClass) - }) - } - - tree + if (inPattern) tree + else tree.setType(RefCheckTypeMap.check(tree.tpe, tree)) case TypeApply(fn, args) => checkBounds(tree, NoPrefix, NoSymbol, fn.tpe.typeParams, args map (_.tpe)) @@ -1812,8 +1811,8 @@ abstract class RefChecks extends Transform { case x @ Select(_, _) => transformSelect(x) - case Literal(Constant(tp: Type)) => - checkTypeRef(tp, tree, skipBounds = false) + case Literal(Constant(tpe: Type)) => + RefCheckTypeMap.check(tpe, tree) tree case UnApply(fun, args) => diff --git a/test/files/neg/ref-checks.check b/test/files/neg/ref-checks.check new file mode 100644 index 000000000000..8ffa9ff27bf0 --- /dev/null +++ b/test/files/neg/ref-checks.check @@ -0,0 +1,7 @@ +ref-checks.scala:8: error: type arguments [Int] do not conform to trait Chars's type parameter bounds [A <: CharSequence] + @ann[Chars[Int]] val x = 42 + ^ +ref-checks.scala:9: error: type arguments [Double] do not conform to trait Chars's type parameter bounds [A <: CharSequence] + val y: Two[Chars[Long] @uncheckedBounds, Chars[Double]] = null + ^ +2 errors diff --git a/test/files/neg/ref-checks.scala b/test/files/neg/ref-checks.scala new file mode 100644 index 000000000000..58e736ec1b54 --- /dev/null +++ b/test/files/neg/ref-checks.scala @@ -0,0 +1,10 @@ +import scala.annotation.StaticAnnotation +import scala.reflect.internal.annotations.uncheckedBounds + +object Test { + trait Chars[A <: CharSequence] + trait Two[A, B] + class ann[A] extends StaticAnnotation + @ann[Chars[Int]] val x = 42 + val y: Two[Chars[Long] @uncheckedBounds, Chars[Double]] = null +} diff --git a/test/files/run/t12481.check b/test/files/run/t12481.check new file mode 100644 index 000000000000..39d6696135de --- /dev/null +++ b/test/files/run/t12481.check @@ -0,0 +1,2 @@ +Test$Universe[_ <: Any] +Test$Universe[] diff --git a/test/files/run/t12481.scala b/test/files/run/t12481.scala new file mode 100644 index 000000000000..8407c634ef54 --- /dev/null +++ b/test/files/run/t12481.scala @@ -0,0 +1,6 @@ +object Test extends App { + trait Txn[T <: Txn[T]] + trait Universe[T <: Txn[T]] + println(implicitly[Manifest[Universe[_]]]) + println(implicitly[OptManifest[Universe[_]]]) +} From 2152d3851c6ae15bfaf21e3f772b3a3d896916b6 Mon Sep 17 00:00:00 2001 From: Seth Tisue Date: Thu, 11 Nov 2021 07:07:55 -0800 Subject: [PATCH 441/769] JLine 3.21.0 / JNA 5.9.0 (was 3.20.0 / 5.8.0) fixes scala/bug#12491 (REPL support on M1) --- versions.properties | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/versions.properties b/versions.properties index 7e64c1f39d9d..97b4bc3c3634 100644 --- a/versions.properties +++ b/versions.properties @@ -9,5 +9,5 @@ starr.version=2.13.7 scala-asm.version=9.2.0-scala-1 # jna.version must be updated together with jline-terminal-jna -jline.version=3.20.0 -jna.version=5.8.0 +jline.version=3.21.0 +jna.version=5.9.0 From 5c58e43fa4fed9d78ce7d463d37d5ee47b91c5a9 Mon Sep 17 00:00:00 2001 From: Georgi Krastev Date: Mon, 8 Nov 2021 20:16:31 +0100 Subject: [PATCH 442/769] [backport] RefCheck types uniformly, handle existentials and annotations - All existentially bound skolems are replaced with wildcards - Annotation types are checked deeply - Nesting of `@uncheckedBounds` is handled properly --- .../tools/nsc/typechecker/RefChecks.scala | 93 +++++++++---------- test/files/neg/ref-checks.check | 7 ++ test/files/neg/ref-checks.scala | 10 ++ test/files/run/t12481.check | 2 + test/files/run/t12481.scala | 6 ++ 5 files changed, 71 insertions(+), 47 deletions(-) create mode 100644 test/files/neg/ref-checks.check create mode 100644 test/files/neg/ref-checks.scala create mode 100644 test/files/run/t12481.check create mode 100644 test/files/run/t12481.scala diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala index abbc25953319..1fcfaa8a2a63 100644 --- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala +++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala @@ -1411,40 +1411,60 @@ abstract class RefChecks extends Transform { false } - private def checkTypeRef(tp: Type, tree: Tree, skipBounds: Boolean) = tp match { - case TypeRef(pre, sym, args) => - tree match { - case tt: TypeTree if tt.original == null => // scala/bug#7783 don't warn about inferred types - // FIXME: reconcile this check with one in resetAttrs - case _ => checkUndesiredProperties(sym, tree.pos) + private object RefCheckTypeMap extends TypeMap { + object ExistentialToWildcard extends TypeMap { + override def apply(tpe: Type): Type = + if (tpe.typeSymbol.isExistential) WildcardType else mapOver(tpe) + } + + private[this] var skipBounds = false + private[this] var tree: Tree = EmptyTree + + def check(tpe: Type, tree: Tree): Type = { + this.tree = tree + try apply(tpe) finally { + skipBounds = false + this.tree = EmptyTree } - if(sym.isJavaDefined) - sym.typeParams foreach (_.cookJavaRawInfo()) - if (!tp.isHigherKinded && !skipBounds) - checkBounds(tree, pre, sym.owner, sym.typeParams, args) - case _ => - } + } - private def checkTypeRefBounds(tp: Type, tree: Tree) = { - var skipBounds = false - tp match { - case AnnotatedType(ann :: Nil, underlying) if ann.symbol == UncheckedBoundsClass => + // check all bounds, except those that are existential type parameters + // or those within typed annotated with @uncheckedBounds + override def apply(tpe: Type): Type = tpe match { + case tpe: AnnotatedType if tpe.hasAnnotation(UncheckedBoundsClass) => + // scala/bug#7694 Allow code synthesizers to disable checking of bounds for TypeTrees based on inferred LUBs + // which might not conform to the constraints. + val savedSkipBounds = skipBounds skipBounds = true - underlying + try mapOver(tpe).filterAnnotations(_.symbol != UncheckedBoundsClass) + finally skipBounds = savedSkipBounds + case tpe: TypeRef => + checkTypeRef(ExistentialToWildcard(tpe)) + mapOver(tpe) + case tpe => + mapOver(tpe) + } + + private def checkTypeRef(tpe: Type): Unit = tpe match { case TypeRef(pre, sym, args) => - if (!tp.isHigherKinded && !skipBounds) + tree match { + // scala/bug#7783 don't warn about inferred types + // FIXME: reconcile this check with one in resetAttrs + case tree: TypeTree if tree.original == null => + case tree => checkUndesiredProperties(sym, tree.pos) + } + if (sym.isJavaDefined) + sym.typeParams.foreach(_.cookJavaRawInfo()) + if (!tpe.isHigherKinded && !skipBounds) checkBounds(tree, pre, sym.owner, sym.typeParams, args) - tp case _ => - tp } } private def applyRefchecksToAnnotations(tree: Tree): Unit = { def applyChecks(annots: List[AnnotationInfo]): List[AnnotationInfo] = if (annots.isEmpty) Nil else { annots.foreach { ann => - checkTypeRef(ann.tpe, tree, skipBounds = false) - checkTypeRefBounds(ann.tpe, tree) + RefCheckTypeMap.check(ann.tpe, tree) } val annotsBySymbol = new mutable.LinkedHashMap[Symbol, ListBuffer[AnnotationInfo]]() @@ -1800,29 +1820,8 @@ abstract class RefChecks extends Transform { } } - val existentialParams = new ListBuffer[Symbol] - var skipBounds = false - // check all bounds, except those that are existential type parameters - // or those within typed annotated with @uncheckedBounds - if (!inPattern) tree.tpe foreach { - case tp @ ExistentialType(tparams, tpe) => - existentialParams ++= tparams - case ann: AnnotatedType if ann.hasAnnotation(UncheckedBoundsClass) => - // scala/bug#7694 Allow code synthesizers to disable checking of bounds for TypeTrees based on inferred LUBs - // which might not conform to the constraints. - skipBounds = true - case tp: TypeRef => - val tpWithWildcards = deriveTypeWithWildcards(existentialParams.toList)(tp) - checkTypeRef(tpWithWildcards, tree, skipBounds) - case _ => - } - if (skipBounds) { - tree.setType(tree.tpe.map { - _.filterAnnotations(_.symbol != UncheckedBoundsClass) - }) - } - - tree + if (inPattern) tree + else tree.setType(RefCheckTypeMap.check(tree.tpe, tree)) case TypeApply(fn, args) => checkBounds(tree, NoPrefix, NoSymbol, fn.tpe.typeParams, args map (_.tpe)) @@ -1857,8 +1856,8 @@ abstract class RefChecks extends Transform { case x @ Select(_, _) => transformSelect(x) - case Literal(Constant(tp: Type)) => - checkTypeRef(tp, tree, skipBounds = false) + case Literal(Constant(tpe: Type)) => + RefCheckTypeMap.check(tpe, tree) tree case UnApply(fun, args) => diff --git a/test/files/neg/ref-checks.check b/test/files/neg/ref-checks.check new file mode 100644 index 000000000000..ca298c4f843c --- /dev/null +++ b/test/files/neg/ref-checks.check @@ -0,0 +1,7 @@ +ref-checks.scala:8: error: type arguments [Int] do not conform to trait Chars's type parameter bounds [A <: CharSequence] + @ann[Chars[Int]] val x = 42 + ^ +ref-checks.scala:9: error: type arguments [Double] do not conform to trait Chars's type parameter bounds [A <: CharSequence] + val y: Two[Chars[Long] @uncheckedBounds, Chars[Double]] = null + ^ +two errors found diff --git a/test/files/neg/ref-checks.scala b/test/files/neg/ref-checks.scala new file mode 100644 index 000000000000..58e736ec1b54 --- /dev/null +++ b/test/files/neg/ref-checks.scala @@ -0,0 +1,10 @@ +import scala.annotation.StaticAnnotation +import scala.reflect.internal.annotations.uncheckedBounds + +object Test { + trait Chars[A <: CharSequence] + trait Two[A, B] + class ann[A] extends StaticAnnotation + @ann[Chars[Int]] val x = 42 + val y: Two[Chars[Long] @uncheckedBounds, Chars[Double]] = null +} diff --git a/test/files/run/t12481.check b/test/files/run/t12481.check new file mode 100644 index 000000000000..39d6696135de --- /dev/null +++ b/test/files/run/t12481.check @@ -0,0 +1,2 @@ +Test$Universe[_ <: Any] +Test$Universe[] diff --git a/test/files/run/t12481.scala b/test/files/run/t12481.scala new file mode 100644 index 000000000000..8407c634ef54 --- /dev/null +++ b/test/files/run/t12481.scala @@ -0,0 +1,6 @@ +object Test extends App { + trait Txn[T <: Txn[T]] + trait Universe[T <: Txn[T]] + println(implicitly[Manifest[Universe[_]]]) + println(implicitly[OptManifest[Universe[_]]]) +} From 5d53a524cc1ed461ed5f33df3e4a963d43d2f5a2 Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Wed, 17 Nov 2021 13:50:05 +0100 Subject: [PATCH 443/769] Fix range positions in selection from parens In `(c).f`, the range position did not include the opening `(`. --- .../scala/tools/nsc/ast/parser/Parsers.scala | 13 ++++---- .../tools/nsc/ast/parser/TreeBuilder.scala | 3 +- test/files/run/t12490.scala | 33 +++++++++++++++++++ 3 files changed, 41 insertions(+), 8 deletions(-) create mode 100644 test/files/run/t12490.scala diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala index a3404767cb89..b01a741c292c 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala @@ -917,8 +917,8 @@ self => if (opinfo.targs.nonEmpty) syntaxError(opinfo.offset, "type application is not allowed for postfix operators") - val od = stripParens(reduceExprStack(base, opinfo.lhs)) - makePostfixSelect(start, opinfo.offset, od, opinfo.operator) + val lhs = reduceExprStack(base, opinfo.lhs) + makePostfixSelect(if (lhs.pos.isDefined) lhs.pos.start else start, opinfo.offset, stripParens(lhs), opinfo.operator) } def finishBinaryOp(isExpr: Boolean, opinfo: OpInfo, rhs: Tree): Tree = { @@ -1217,11 +1217,12 @@ self => def identOrMacro(): Name = if (isMacro) rawIdent() else ident() - def selector(t: Tree): Tree = { + def selector(t0: Tree): Tree = { + val t = stripParens(t0) val point = if (isIdent) in.offset else in.lastOffset //scala/bug#8459 //assert(t.pos.isDefined, t) if (t != EmptyTree) - Select(t, ident(skipIt = false)) setPos r2p(t.pos.start, point, in.lastOffset) + Select(t, ident(skipIt = false)) setPos r2p(t0.pos.start, point, in.lastOffset) else errorTermTree // has already been reported } @@ -1793,14 +1794,14 @@ self => in.token match { case DOT => in.nextToken() - simpleExprRest(selector(stripParens(t)), canApply = true) + simpleExprRest(selector(t), canApply = true) case LBRACKET => val t1 = stripParens(t) t1 match { case Ident(_) | Select(_, _) | Apply(_, _) => var app: Tree = t1 while (in.token == LBRACKET) - app = atPos(app.pos.start, in.offset)(TypeApply(app, exprTypeArgs())) + app = atPos(t.pos.start, in.offset)(TypeApply(app, exprTypeArgs())) simpleExprRest(app, canApply = true) case _ => diff --git a/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala b/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala index 63249dd88a6e..f19ddd77873c 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala @@ -56,8 +56,7 @@ abstract class TreeBuilder { ValDef(Modifiers(PRIVATE), name, tpt, EmptyTree) /** Tree for `od op`, start is start0 if od.pos is borked. */ - def makePostfixSelect(start0: Int, end: Int, od: Tree, op: Name): Tree = { - val start = if (od.pos.isDefined) od.pos.start else start0 + def makePostfixSelect(start: Int, end: Int, od: Tree, op: Name): Tree = { atPos(r2p(start, end, end + op.length)) { new PostfixSelect(od, op.encode) } } diff --git a/test/files/run/t12490.scala b/test/files/run/t12490.scala new file mode 100644 index 000000000000..422ef3fb4222 --- /dev/null +++ b/test/files/run/t12490.scala @@ -0,0 +1,33 @@ +import scala.tools.partest._ +import scala.collection.mutable.LinkedHashMap + +object Test extends CompilerTest { + import global._ + override def extraSettings = super.extraSettings + " -Yrangepos -Ystop-after:parser" + val tests = LinkedHashMap( + "class A { def t = new C() }" -> (24, 31), + "class B { def t = (new C) }" -> (25, 30), + "class C { def t = new C }" -> (24, 29), + "class D { def t = new C().t }" -> (24, 33), + "class E { def t = (new C).t }" -> (24, 33), + "class F { def t(c: C) = c }" -> (24, 25), + "class G { def t(c: C) = (c) }" -> (25, 26), + "class H { def t(c: C) = c.t }" -> (24, 27), + "class I { def t(c: C) = (c).t }" -> (24, 29), + "class J { def t[T]: C = (x.t)[C] }" -> (24, 32), + "class K { def t(f: F) = (f) t c }" -> (24, 31), + "class L { def t(c: C) = (c) t }" -> (24, 29), + // ^ 24 ^ 33 + ) + + override def sources = tests.toList.map(_._1) + + def check(source: String, unit: CompilationUnit): Unit = unit.body foreach { + case dd: DefDef if dd.name.startsWith("t") => + val pos = dd.rhs.pos + val (start, end) = tests(source) + assert(pos.start == start, pos.start) + assert(pos.end == end, pos.end) + case _ => + } +} From 6792eb418962b02ad9d686c4d2e41d1870daad83 Mon Sep 17 00:00:00 2001 From: Georgi Krastev Date: Sun, 28 Nov 2021 01:54:14 +0100 Subject: [PATCH 444/769] Followup improvements to RefChecks * Only convert unbound existential types to wildcards. * Extend undesired properties check to patterns. --- .../tools/nsc/typechecker/RefChecks.scala | 44 +++++++++++++------ src/reflect/scala/reflect/api/TypeTags.scala | 3 +- test/files/neg/ref-checks.check | 16 +++++-- test/files/neg/ref-checks.scala | 14 +++++- 4 files changed, 58 insertions(+), 19 deletions(-) diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala index 023f7b36a173..baef73b1df77 100644 --- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala +++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala @@ -1408,18 +1408,33 @@ abstract class RefChecks extends Transform { } private object RefCheckTypeMap extends TypeMap { - object ExistentialToWildcard extends TypeMap { - override def apply(tpe: Type): Type = - if (tpe.typeSymbol.isExistential) WildcardType else tpe.mapOver(this) + object UnboundExistential extends TypeMap { + private[this] val bound = mutable.Set.empty[Symbol] + + def toWildcardIn(tpe: Type): Type = + try apply(tpe) finally bound.clear() + + override def apply(tpe: Type): Type = tpe match { + case ExistentialType(quantified, _) => + bound ++= quantified + tpe.mapOver(this) + case tpe => + val sym = tpe.typeSymbol + if (sym.isExistential && !bound(sym)) WildcardType + else tpe.mapOver(this) + } } + private[this] var inPattern = false private[this] var skipBounds = false private[this] var tree: Tree = EmptyTree - def check(tpe: Type, tree: Tree): Type = { + def check(tpe: Type, tree: Tree, inPattern: Boolean = false): Type = { + this.inPattern = inPattern this.tree = tree try apply(tpe) finally { - skipBounds = false + this.inPattern = false + this.skipBounds = false this.tree = EmptyTree } } @@ -1435,7 +1450,8 @@ abstract class RefChecks extends Transform { try tpe.mapOver(this).filterAnnotations(_.symbol != UncheckedBoundsClass) finally skipBounds = savedSkipBounds case tpe: TypeRef => - checkTypeRef(ExistentialToWildcard(tpe)) + if (!inPattern) checkTypeRef(UnboundExistential.toWildcardIn(tpe)) + checkUndesired(tpe.sym) tpe.mapOver(this) case tpe => tpe.mapOver(this) @@ -1443,18 +1459,19 @@ abstract class RefChecks extends Transform { private def checkTypeRef(tpe: Type): Unit = tpe match { case TypeRef(pre, sym, args) => - tree match { - // scala/bug#7783 don't warn about inferred types - // FIXME: reconcile this check with one in resetAttrs - case tree: TypeTree if tree.original == null => - case tree => checkUndesiredProperties(sym, tree.pos) - } if (sym.isJavaDefined) sym.typeParams.foreach(_.cookJavaRawInfo()) if (!tpe.isHigherKinded && !skipBounds) checkBounds(tree, pre, sym.owner, sym.typeParams, args) case _ => } + + private def checkUndesired(sym: Symbol): Unit = tree match { + // scala/bug#7783 don't warn about inferred types + // FIXME: reconcile this check with one in resetAttrs + case tree: TypeTree if tree.original == null => + case tree => checkUndesiredProperties(sym, tree.pos) + } } private def applyRefchecksToAnnotations(tree: Tree): Unit = { @@ -1775,8 +1792,7 @@ abstract class RefChecks extends Transform { } } - if (inPattern) tree - else tree.setType(RefCheckTypeMap.check(tree.tpe, tree)) + tree.setType(RefCheckTypeMap.check(tree.tpe, tree, inPattern)) case TypeApply(fn, args) => checkBounds(tree, NoPrefix, NoSymbol, fn.tpe.typeParams, args map (_.tpe)) diff --git a/src/reflect/scala/reflect/api/TypeTags.scala b/src/reflect/scala/reflect/api/TypeTags.scala index 382577ce3cf4..7dba64a079e7 100644 --- a/src/reflect/scala/reflect/api/TypeTags.scala +++ b/src/reflect/scala/reflect/api/TypeTags.scala @@ -15,6 +15,7 @@ package reflect package api import java.io.ObjectStreamException +import scala.annotation.nowarn /** * A `TypeTag[T]` encapsulates the runtime type representation of some type `T`. @@ -290,7 +291,7 @@ trait TypeTags { self: Universe => def apply[T](mirror1: scala.reflect.api.Mirror[self.type], tpec1: TypeCreator): TypeTag[T] = { (mirror1: AnyRef) match { - case m: scala.reflect.runtime.JavaMirrors#MirrorImpl + case m: scala.reflect.runtime.JavaMirrors#JavaMirror @nowarn("cat=deprecation") if cacheMaterializedTypeTags && tpec1.getClass.getName.contains("$typecreator") && tpec1.getClass.getDeclaredFields.length == 0 => // excludes type creators that splice in bound types. diff --git a/test/files/neg/ref-checks.check b/test/files/neg/ref-checks.check index 8ffa9ff27bf0..8ea6d6e02b00 100644 --- a/test/files/neg/ref-checks.check +++ b/test/files/neg/ref-checks.check @@ -1,7 +1,17 @@ -ref-checks.scala:8: error: type arguments [Int] do not conform to trait Chars's type parameter bounds [A <: CharSequence] +ref-checks.scala:9: error: type arguments [Int] do not conform to trait Chars's type parameter bounds [A <: CharSequence] @ann[Chars[Int]] val x = 42 ^ -ref-checks.scala:9: error: type arguments [Double] do not conform to trait Chars's type parameter bounds [A <: CharSequence] +ref-checks.scala:10: error: type arguments [Double] do not conform to trait Chars's type parameter bounds [A <: CharSequence] val y: Two[Chars[Long] @uncheckedBounds, Chars[Double]] = null ^ -2 errors +ref-checks.scala:11: error: type arguments [X forSome { type X <: Int }] do not conform to trait Chars's type parameter bounds [A <: CharSequence] + def z: Chars[X forSome { type X <: Int }] = null + ^ +ref-checks.scala:18: warning: type DeprecatedAlias in object Test is deprecated + case _: DeprecatedAlias => + ^ +ref-checks.scala:19: warning: class DeprecatedClass in object Test is deprecated + case _: DeprecatedClass => + ^ +2 warnings +3 errors diff --git a/test/files/neg/ref-checks.scala b/test/files/neg/ref-checks.scala index 58e736ec1b54..e68f25938810 100644 --- a/test/files/neg/ref-checks.scala +++ b/test/files/neg/ref-checks.scala @@ -1,4 +1,5 @@ -import scala.annotation.StaticAnnotation +// scalac: -deprecation -Werror +import scala.annotation.{StaticAnnotation, nowarn} import scala.reflect.internal.annotations.uncheckedBounds object Test { @@ -7,4 +8,15 @@ object Test { class ann[A] extends StaticAnnotation @ann[Chars[Int]] val x = 42 val y: Two[Chars[Long] @uncheckedBounds, Chars[Double]] = null + def z: Chars[X forSome { type X <: Int }] = null + + @deprecated type DeprecatedAlias = String + @deprecated class DeprecatedClass + @nowarn("cat=deprecation") type UndeprecatedAlias = DeprecatedClass + + ("": Any) match { + case _: DeprecatedAlias => + case _: DeprecatedClass => + case _: UndeprecatedAlias => // no warning here + } } From 5021a57fda03dcc510ea782043e4d397c4aad311 Mon Sep 17 00:00:00 2001 From: Seth Tisue Date: Tue, 30 Nov 2021 16:16:52 -0800 Subject: [PATCH 445/769] fix build (JDK 17 + bootstrapped + fatal warnings) a little sequel to #9815, which is responsible for newly and rightly emitting these warnings --- src/compiler/scala/tools/reflect/WrappedProperties.scala | 1 + src/library/scala/sys/SystemProperties.scala | 1 + 2 files changed, 2 insertions(+) diff --git a/src/compiler/scala/tools/reflect/WrappedProperties.scala b/src/compiler/scala/tools/reflect/WrappedProperties.scala index 2ed0e459da0e..76caefb3c597 100644 --- a/src/compiler/scala/tools/reflect/WrappedProperties.scala +++ b/src/compiler/scala/tools/reflect/WrappedProperties.scala @@ -47,6 +47,7 @@ trait WrappedProperties extends PropertiesTrait { object WrappedProperties { object AccessControl extends WrappedProperties { + @annotation.nowarn("cat=deprecation") // AccessControlException is deprecated on JDK 17 def wrap[T](body: => T) = try Some(body) catch { case _: AccessControlException => None } } } diff --git a/src/library/scala/sys/SystemProperties.scala b/src/library/scala/sys/SystemProperties.scala index 4ae753aa8f80..aa2f0bd5d06c 100644 --- a/src/library/scala/sys/SystemProperties.scala +++ b/src/library/scala/sys/SystemProperties.scala @@ -52,6 +52,7 @@ extends mutable.AbstractMap[String, String] { def subtractOne (key: String): this.type = { wrapAccess(System.clearProperty(key)) ; this } def addOne (kv: (String, String)): this.type = { wrapAccess(System.setProperty(kv._1, kv._2)) ; this } + @annotation.nowarn("cat=deprecation") // AccessControlException is deprecated on JDK 17 def wrapAccess[T](body: => T): Option[T] = try Some(body) catch { case _: AccessControlException => None } } From 1e32c86fedef1a73c67d51f23d33552b65053395 Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Fri, 3 Dec 2021 02:59:21 -0800 Subject: [PATCH 446/769] Trim Symbol class text --- src/library/scala/Symbol.scala | 21 +++++++-------------- 1 file changed, 7 insertions(+), 14 deletions(-) diff --git a/src/library/scala/Symbol.scala b/src/library/scala/Symbol.scala index 6cf0b0f3b269..c388bde42a98 100644 --- a/src/library/scala/Symbol.scala +++ b/src/library/scala/Symbol.scala @@ -14,17 +14,11 @@ package scala /** This class provides a simple way to get unique objects for equal strings. * Since symbols are interned, they can be compared using reference equality. - * Instances of `Symbol` can be created easily with Scala's built-in quote - * mechanism. - * - * For instance, the Scala term `'mysym` will - * invoke the constructor of the `Symbol` class in the following way: - * `Symbol("mysym")`. */ final class Symbol private (val name: String) extends Serializable { - /** Converts this symbol to a string. + /** A string representation of this symbol. */ - override def toString(): String = "Symbol(" + name + ")" + override def toString(): String = s"Symbol($name)" @throws(classOf[java.io.ObjectStreamException]) private def readResolve(): Any = Symbol.apply(name) @@ -40,8 +34,7 @@ object Symbol extends UniquenessCache[String, Symbol] { /** This is private so it won't appear in the library API, but * abstracted to offer some hope of reusability. */ -private[scala] abstract class UniquenessCache[K, V >: Null] -{ +private[scala] abstract class UniquenessCache[K, V >: Null] { import java.lang.ref.WeakReference import java.util.WeakHashMap import java.util.concurrent.locks.ReentrantReadWriteLock @@ -82,10 +75,10 @@ private[scala] abstract class UniquenessCache[K, V >: Null] } finally wlock.unlock } - - val res = cached() - if (res == null) updateCache() - else res + cached() match { + case null => updateCache() + case res => res + } } def unapply(other: V): Option[K] = keyFromValue(other) } From 7d1a35a2c1c27701aea952bf6bb19e5b29a05f27 Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Fri, 3 Dec 2021 03:39:06 -0800 Subject: [PATCH 447/769] Curtailed promise of symbol literal type --- spec/01-lexical-syntax.md | 11 +---------- spec/03-types.md | 2 +- 2 files changed, 2 insertions(+), 11 deletions(-) diff --git a/spec/01-lexical-syntax.md b/spec/01-lexical-syntax.md index 3dbed39d6806..7f3d2887238d 100644 --- a/spec/01-lexical-syntax.md +++ b/spec/01-lexical-syntax.md @@ -577,16 +577,7 @@ string literal does not start a valid escape sequence. symbolLiteral ::= ‘'’ plainid ``` -A symbol literal `'x` is a shorthand for the expression `scala.Symbol("x")` and -is of the [literal type](03-types.html#literal-types) `'x`. -`Symbol` is a [case class](05-classes-and-objects.html#case-classes), which is defined as follows. - -```scala -package scala -final case class Symbol private (name: String) { - override def toString: String = "'" + name -} -``` +A symbol literal `'x` is deprecated shorthand for the expression `scala.Symbol("x")`. The `apply` method of `Symbol`'s companion object caches weak references to `Symbol`s, thus ensuring that diff --git a/spec/03-types.md b/spec/03-types.md index 2f898d8acb39..b4bdb7cb2e07 100644 --- a/spec/03-types.md +++ b/spec/03-types.md @@ -131,7 +131,7 @@ determined by evaluating `e == lit`. Literal types are available for all types for which there is dedicated syntax except `Unit`. This includes the numeric types (other than `Byte` and `Short` -which don't currently have syntax), `Boolean`, `Char`, `String` and `Symbol`. +which don't currently have syntax), `Boolean`, `Char` and `String`. ### Stable Types A _stable type_ is a singleton type, a literal type, From 771ac065f3619cb23cff8e69d3e63eb16e76887e Mon Sep 17 00:00:00 2001 From: NthPortal Date: Sat, 4 Dec 2021 22:02:33 -0500 Subject: [PATCH 448/769] [bug#12473] Throw on unsigned type in `NumericRange#reverse` --- .../collection/immutable/NumericRange.scala | 8 +++++++- test/junit/scala/runtime/RichCharTest.scala | 20 +++++++++++++++++++ 2 files changed, 27 insertions(+), 1 deletion(-) create mode 100644 test/junit/scala/runtime/RichCharTest.scala diff --git a/src/library/scala/collection/immutable/NumericRange.scala b/src/library/scala/collection/immutable/NumericRange.scala index 33464e875883..728fe3acbf54 100644 --- a/src/library/scala/collection/immutable/NumericRange.scala +++ b/src/library/scala/collection/immutable/NumericRange.scala @@ -153,7 +153,13 @@ sealed class NumericRange[T]( override def splitAt(n: Int): (NumericRange[T], NumericRange[T]) = (take(n), drop(n)) override def reverse: NumericRange[T] = - if (isEmpty) this else new NumericRange.Inclusive(last, start, -step) + if (isEmpty) this + else { + val newStep = -step + if (num.sign(newStep) == num.sign(step)) { + throw new ArithmeticException("number type is unsigned, and .reverse requires a negative step") + } else new NumericRange.Inclusive(last, start, newStep) + } import NumericRange.defaultOrdering diff --git a/test/junit/scala/runtime/RichCharTest.scala b/test/junit/scala/runtime/RichCharTest.scala new file mode 100644 index 000000000000..0acdfe14594d --- /dev/null +++ b/test/junit/scala/runtime/RichCharTest.scala @@ -0,0 +1,20 @@ +package scala.runtime + +import org.junit.Test + +import scala.collection.immutable.NumericRange +import scala.tools.testkit.AssertUtil.assertThrows + +class RichCharTest { + @Test + def rangeReverse(): Unit = { + def check(range: NumericRange[Char]): Unit = + assertThrows[ArithmeticException](range.reverse, + s => Seq("unsigned", "reverse", "negative step").forall(s.contains)) + + check('a' until 'z') + check('a' until 'z' by 2.toChar) + check('a' to 'z') + check('a' to 'z' by 2.toChar) + } +} From d0474076619bcc64c3cf13a251afb4c056d679a6 Mon Sep 17 00:00:00 2001 From: Georgi Krastev Date: Sat, 4 Dec 2021 11:04:46 +0100 Subject: [PATCH 449/769] GroupedIterator improvements - Specify the size when creating `ArrayBuffer`s - Modify intermediate results in place as much as possible - Don't allocate an `Option` for padding --- src/library/scala/collection/Iterator.scala | 80 +++++++++------------ 1 file changed, 34 insertions(+), 46 deletions(-) diff --git a/src/library/scala/collection/Iterator.scala b/src/library/scala/collection/Iterator.scala index 1970d3babb62..cc6503ac3b92 100644 --- a/src/library/scala/collection/Iterator.scala +++ b/src/library/scala/collection/Iterator.scala @@ -146,19 +146,18 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite } /** A flexible iterator for transforming an `Iterator[A]` into an - * Iterator[Seq[A]], with configurable sequence size, step, and + * `Iterator[Seq[A]]`, with configurable sequence size, step, and * strategy for dealing with elements which don't fit evenly. * * Typical uses can be achieved via methods `grouped` and `sliding`. */ class GroupedIterator[B >: A](self: Iterator[B], size: Int, step: Int) extends AbstractIterator[immutable.Seq[B]] { - require(size >= 1 && step >= 1, f"size=$size%d and step=$step%d, but both must be positive") - private[this] var buffer: ArrayBuffer[B] = ArrayBuffer() // the buffer - private[this] var filled = false // whether the buffer is "hot" - private[this] var _partial = true // whether we deliver short sequences - private[this] var pad: Option[() => B] = None // what to pad short sequences with + private[this] val group = new ArrayBuffer[B](size) // the group + private[this] var filled = false // whether the group is "hot" + private[this] var partial = true // whether we deliver short sequences + private[this] var pad: () => B = null // what to pad short sequences with /** Public functions which can be used to configure the iterator before use. * @@ -171,9 +170,10 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite * @note This method is mutually exclusive with `withPartial(true)`. */ def withPadding(x: => B): this.type = { - pad = Some(() => x) + pad = () => x this } + /** Public functions which can be used to configure the iterator before use. * * Select whether the last segment may be returned with less than `size` @@ -186,10 +186,9 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite * @note This method is mutually exclusive with `withPadding`. */ def withPartial(x: Boolean): this.type = { - _partial = x - if (_partial) // reset pad since otherwise it will take precedence - pad = None - + partial = x + // reset pad since otherwise it will take precedence + if (partial) pad = null this } @@ -200,8 +199,8 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite * so a subsequent self.hasNext would not test self after the * group was consumed. */ - private def takeDestructively(size: Int): Seq[B] = { - val buf = new ArrayBuffer[B] + private def takeDestructively(size: Int): ArrayBuffer[B] = { + val buf = new ArrayBuffer[B](size) var i = 0 // The order of terms in the following condition is important // here as self.hasNext could be blocking @@ -212,45 +211,36 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite buf } - private def padding(x: Int) = immutable.ArraySeq.untagged.fill(x)(pad.get()) private def gap = (step - size) max 0 private def go(count: Int) = { - val prevSize = buffer.size + val prevSize = group.size def isFirst = prevSize == 0 + val extension = takeDestructively(count) // If there is padding defined we insert it immediately // so the rest of the code can be oblivious - val xs: Seq[B] = { - val res = takeDestructively(count) - // was: extra checks so we don't calculate length unless there's reason - // but since we took the group eagerly, just use the fast length - val shortBy = count - res.length - if (shortBy > 0 && pad.isDefined) res ++ padding(shortBy) else res + var shortBy = count - extension.size + if (pad != null) while (shortBy > 0) { + extension += pad() + shortBy -= 1 } - lazy val len = xs.length - lazy val incomplete = len < count + val extSize = extension.size // if 0 elements are requested, or if the number of newly obtained // elements is less than the gap between sequences, we are done. - def deliver(howMany: Int) = { - (howMany > 0 && (isFirst || len > gap)) && { - if (!isFirst) - buffer dropInPlace (step min prevSize) - - val available = - if (isFirst) len - else howMany min (len - gap) - - buffer ++= (xs takeRight available) + def deliver(howMany: Int) = + (howMany > 0 && (isFirst || extSize > gap)) && { + if (!isFirst) group.dropInPlace(step min prevSize) + val available = if (isFirst) extSize else howMany min (extSize - gap) + group ++= extension.takeRightInPlace(available) filled = true true } - } - if (xs.isEmpty) false // self ran out of elements - else if (_partial) deliver(len min size) // if _partial is true, we deliver regardless - else if (incomplete) false // !_partial && incomplete means no more seqs - else if (isFirst) deliver(len) // first element + if (extension.isEmpty) false // self ran out of elements + else if (partial) deliver(extSize min size) // if partial is true, we deliver regardless + else if (extSize < count) false // !partial && extSize < count means no more seqs + else if (isFirst) deliver(extSize) // first element else deliver(step min size) // the typical case } @@ -258,20 +248,18 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite private def fill(): Boolean = { if (!self.hasNext) false // the first time we grab size, but after that we grab step - else if (buffer.isEmpty) go(size) + else if (group.isEmpty) go(size) else go(step) } - def hasNext = filled || fill() + def hasNext: Boolean = filled || fill() + @throws[NoSuchElementException] def next(): immutable.Seq[B] = { - if (!filled) - fill() - - if (!filled) - throw new NoSuchElementException("next on empty iterator") + if (!filled) fill() + if (!filled) Iterator.empty.next() filled = false - immutable.ArraySeq.unsafeWrapArray(buffer.toArray[Any]).asInstanceOf[immutable.ArraySeq[B]] + immutable.ArraySeq.unsafeWrapArray(group.toArray[Any]).asInstanceOf[immutable.ArraySeq[B]] } } From 8f0577c603ac86e9bb1cc8fc01bb74f01501b5dc Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Thu, 9 Dec 2021 19:20:16 +0100 Subject: [PATCH 450/769] Don't emit `releaseFence` for class params of specialized classes Non-specialized val fields in a specialized class are made non-final because of the way specialization is encoded. A `releaseFence` call is added to the constructor to ensure safe publication. The releaseFence call is not necessary for class parameters as those remain final. Follow-up for #9704, fixes scala/bug#12500 --- src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala | 2 +- test/junit/scala/tools/nsc/backend/jvm/BytecodeTest.scala | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala index 20f3b8c59486..14077bb69e49 100644 --- a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala +++ b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala @@ -750,7 +750,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { for (m <- normMembers) { if (!needsSpecialization(fullEnv, m)) { - if (m.isValue && !m.isMutable && !m.isMethod && !m.isDeferred && !m.isLazy) { + if (m.isValue && !m.isMutable && !m.isMethod && !m.isDeferred && !m.isLazy && !m.isParamAccessor) { // non-specialized `val` fields are made mutable (in Constructors) and assigned from the // constructors of specialized subclasses. See PR scala/scala#9704. clazz.primaryConstructor.updateAttachment(ConstructorNeedsFence) diff --git a/test/junit/scala/tools/nsc/backend/jvm/BytecodeTest.scala b/test/junit/scala/tools/nsc/backend/jvm/BytecodeTest.scala index f7a0de1c5372..44983abe6524 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/BytecodeTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/BytecodeTest.scala @@ -393,7 +393,7 @@ class BytecodeTest extends BytecodeTesting { @Test def nonSpecializedValFence(): Unit = { def code(u1: String) = - s"""abstract class Speck[@specialized(Int) T](t: T) { + s"""abstract class Speck[@specialized(Int) T](t: T, sm: String, val sn: String) { | val a = t | $u1 | lazy val u2 = "?" From db5dd2a30299810dff0c75fb94bfc874419b0dfc Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Thu, 18 Nov 2021 11:15:00 -0800 Subject: [PATCH 451/769] Minor cleanup for readability --- .../tools/nsc/typechecker/RefChecks.scala | 142 ++++++++---------- test/files/neg/abstract-report.check | 2 +- test/files/neg/abstract-report.scala | 2 +- 3 files changed, 64 insertions(+), 82 deletions(-) diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala index baef73b1df77..597c7db501d5 100644 --- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala +++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala @@ -534,21 +534,21 @@ abstract class RefChecks extends Transform { // Verifying a concrete class has nothing unimplemented. if (clazz.isConcreteClass && !typesOnly) { - val abstractErrors = new ListBuffer[String] - def abstractErrorMessage = - // a little formatting polish - if (abstractErrors.size <= 2) abstractErrors mkString " " - else abstractErrors.tail.mkString(abstractErrors.head + "\n", "\n", "") - - def abstractClassError(mustBeMixin: Boolean, msg: String): Unit = { - def prelude = ( + val abstractErrors = ListBuffer.empty[String] + def abstractErrorMessage = abstractErrors.mkString(if (abstractErrors.size <= 2) " " else "\n") + + def mustBeMixin(msg: String): Unit = addError(mustBeMixin = true, msg, supplement = "") + def abstractClassError(msg: String): Unit = addError(mustBeMixin = false, msg, supplement = "") + def abstractClassErrorStubs(msg: String, stubs: String): Unit = addError(mustBeMixin = false, msg, supplement = stubs) + def addError(mustBeMixin: Boolean, msg: String, supplement: String): Unit = { + def prelude = if (clazz.isAnonymousClass || clazz.isModuleClass) "object creation impossible." else if (mustBeMixin) s"$clazz needs to be a mixin." else s"$clazz needs to be abstract." - ) - if (abstractErrors.isEmpty) abstractErrors ++= List(prelude, msg) - else abstractErrors += msg + if (abstractErrors.isEmpty) abstractErrors += prelude + abstractErrors += msg + if (!supplement.isEmpty) abstractErrors += supplement } def javaErasedOverridingSym(sym: Symbol): Symbol = @@ -563,43 +563,34 @@ abstract class RefChecks extends Transform { exitingErasure(tp1 matches tp2) }) - def ignoreDeferred(member: Symbol) = ( + def ignoreDeferred(member: Symbol) = (member.isAbstractType && !member.isFBounded) || ( // the test requires exitingErasure so shouldn't be // done if the compiler has no erasure phase available member.isJavaDefined && (currentRun.erasurePhase == NoPhase || javaErasedOverridingSym(member) != NoSymbol) ) - ) // 2. Check that only abstract classes have deferred members def checkNoAbstractMembers(): Unit = { // Avoid spurious duplicates: first gather any missing members. - def memberList = clazz.info.nonPrivateMembersAdmitting(VBRIDGE) - var missing: List[Symbol] = Nil - var rest: List[Symbol] = Nil - memberList.reverseIterator.foreach { - case m if m.isDeferred && !ignoreDeferred(m) => - missing ::= m - case m if m.isAbstractOverride && m.isIncompleteIn(clazz) => - rest ::= m - case _ => // No more + val (missing, rest): (List[Symbol], Iterator[Symbol]) = { + val memberList = clazz.info.nonPrivateMembersAdmitting(VBRIDGE) + val (missing0, rest0) = memberList.iterator.partition(m => m.isDeferred & !ignoreDeferred(m)) + (missing0.toList, rest0) } - // Group missing members by the name of the underlying symbol, - // to consolidate getters and setters. - val grouped = missing groupBy (_.name.getterName) - val missingMethods = grouped.toList flatMap { - case (name, syms) => - if (syms exists (_.isSetter)) syms filterNot (_.isGetter) - else syms + // Group missing members by the name of the underlying symbol, to consolidate getters and setters. + val grouped = missing.groupBy(_.name.getterName) + val missingMethods = grouped.toList.flatMap { + case (_, syms) if syms.exists(_.isSetter) => syms.filterNot(_.isGetter) + case (_, syms) => syms } - def stubImplementations: List[String] = { // Grouping missing methods by the declaring class val regrouped = missingMethods.groupBy(_.owner).toList def membersStrings(members: List[Symbol]) = { - members foreach fullyInitializeSymbol - members.sortBy(_.name) map (m => m.defStringSeenAs(clazz.tpe_* memberType m) + " = ???") + members.foreach(fullyInitializeSymbol) + members.sortBy(_.name).map(m => s"${m.defStringSeenAs(clazz.tpe_* memberType m)} = ???") } if (regrouped.tail.isEmpty) @@ -609,15 +600,6 @@ abstract class RefChecks extends Transform { ("// Members declared in " + owner.fullName) +: membersStrings(members) :+ "" }).init } - - // If there are numerous missing methods, we presume they are aware of it and - // give them a nicely formatted set of method signatures for implementing. - if (missingMethods.size > 1) { - abstractClassError(false, s"Missing implementations for ${missingMethods.size} members. Stub implementations follow:") - abstractErrors += stubImplementations.map(" " + _ + "\n").mkString("", "", "") - return - } - def diagnose(member: Symbol): String = { val underlying = analyzer.underlyingSymbol(member) // TODO: don't use this method @@ -629,12 +611,11 @@ abstract class RefChecks extends Transform { if (groupedAccessors.exists(_.isSetter) || (member.isGetter && !isMultiple && member.setterIn(member.owner).exists)) { // If both getter and setter are missing, squelch the setter error. if (member.isSetter && isMultiple) null - else { - if (member.isSetter) "\n(Note that an abstract var requires a setter in addition to the getter)" - else if (member.isGetter && !isMultiple) "\n(Note that an abstract var requires a getter in addition to the setter)" - else "\n(Note that variables need to be initialized to be defined)" - } - } else if (underlying.isMethod) { + else if (member.isSetter) "\n(Note that an abstract var requires a setter in addition to the getter)" + else if (member.isGetter && !isMultiple) "\n(Note that an abstract var requires a getter in addition to the setter)" + else "\n(Note that variables need to be initialized to be defined)" + } + else if (underlying.isMethod) { // Highlight any member that nearly matches: same name and arity, // but differs in one param or param list. val abstractParamLists = underlying.paramLists @@ -646,15 +627,17 @@ abstract class RefChecks extends Transform { sumSize(m.paramLists, 0) == sumSize(abstractParamLists, 0) && sameLength(m.tpe.typeParams, underlying.tpe.typeParams) } - matchingArity match { // So far so good: only one candidate method case Scope(concrete) => - val aplIter = abstractParamLists .iterator.flatten - val cplIter = concrete.paramLists.iterator.flatten + val concreteParamLists = concrete.paramLists + val aplIter = abstractParamLists.iterator.flatten + val cplIter = concreteParamLists.iterator.flatten def mismatch(apl: Symbol, cpl: Symbol): Option[(Type, Type)] = if (apl.tpe.asSeenFrom(clazz.tpe, underlying.owner) =:= cpl.tpe) None else Some(apl.tpe -> cpl.tpe) - + def missingImplicit = abstractParamLists.zip(concreteParamLists).exists { + case (abss, konkrete) => abss.headOption.exists(_.isImplicit) && !konkrete.headOption.exists(_.isImplicit) + } val mismatches = mapFilter2(aplIter, cplIter)(mismatch).take(2).toList mismatches match { // Only one mismatched parameter: say something useful. @@ -666,8 +649,7 @@ abstract class RefChecks extends Transform { val addendum = ( if (abstractSym == concreteSym) { // TODO: what is the optimal way to test for a raw type at this point? - // Compilation has already failed so we shouldn't have to worry overmuch - // about forcing types. + // Compilation has already failed so we shouldn't have to worry overmuch about forcing types. if (underlying.isJavaDefined && pa.typeArgs.isEmpty && abstractSym.typeParams.nonEmpty) s". To implement this raw type, use ${rawToExistential(pa)}" else if (pa.prefix =:= pc.prefix) @@ -675,19 +657,12 @@ abstract class RefChecks extends Transform { else ": their prefixes (i.e., enclosing instances) differ" } - else if (abstractSym isSubClass concreteSym) - subclassMsg(abstractSym, concreteSym) - else if (concreteSym isSubClass abstractSym) - subclassMsg(concreteSym, abstractSym) + else if (abstractSym.isSubClass(concreteSym)) subclassMsg(abstractSym, concreteSym) + else if (concreteSym.isSubClass(abstractSym)) subclassMsg(concreteSym, abstractSym) else "" ) s"\n(Note that $pa does not match $pc$addendum)" - case Nil => // other overriding gotchas - val missingImplicit = abstractParamLists.zip(concrete.paramLists).exists { - case (abss, konkrete) => abss.headOption.exists(_.isImplicit) && !konkrete.headOption.exists(_.isImplicit) - } - val msg = if (missingImplicit) "\n(overriding member must declare implicit parameter list)" else "" - msg + case Nil if missingImplicit => "\n(overriding member must declare implicit parameter list)" // other overriding gotchas case _ => "" } case _ => "" @@ -695,22 +670,30 @@ abstract class RefChecks extends Transform { } else "" } - for (member <- missing ; msg = diagnose(member) ; if msg != null) { - val addendum = if (msg.isEmpty) msg else " " + msg - val from = if (member.owner != clazz) s" // inherited from ${member.owner}" else "" - abstractClassError(false, s"Missing implementation for:\n ${infoString0(member, false)}$from$addendum") + // The outcomes are + // - 1 method in current class + // If there are numerous missing methods, we presume they are aware of it and + // give them a nicely formatted set of method signatures for implementing. + if (missingMethods.size > 1) { + val stubs = stubImplementations.map(" " + _ + "\n").mkString("", "", "") + abstractClassErrorStubs(s"Missing implementations for ${missingMethods.size} members. Stub implementations follow:", stubs) } - - // Check the remainder for invalid absoverride. - rest.foreach { member => - val other = member.superSymbolIn(clazz) - val explanation = - if (other != NoSymbol) " and overrides incomplete superclass member\n" + infoString(other) - else ", but no concrete implementation could be found in a base class" - - abstractClassError(true, s"${infoString(member)} is marked `abstract` and `override`$explanation") + else { + for (member <- missing ; msg = diagnose(member) if msg != null) { + val addendum = if (msg.isEmpty) msg else " " + msg + val from = if (member.owner != clazz) s" // inherited from ${member.owner}" else "" + abstractClassError(s"Missing implementation for:\n ${infoString0(member, false)}$from$addendum") + } + // Check the remainder for invalid absoverride. + for (member <- rest if member.isAbstractOverride && member.isIncompleteIn(clazz)) { + val explanation = member.superSymbolIn(clazz) match { + case NoSymbol => ", but no concrete implementation could be found in a base class" + case other => " and overrides incomplete superclass member\n" + infoString(other) + } + mustBeMixin(s"${infoString(member)} is marked `abstract` and `override`$explanation") + } } - } + } // end checkNoAbstractMembers // 3. Check that concrete classes do not have deferred definitions // that are not implemented in a subclass. @@ -724,10 +707,9 @@ abstract class RefChecks extends Transform { for (decl <- bc.info.decls) { if (decl.isDeferred && !ignoreDeferred(decl)) { val impl = decl.matchingSymbol(clazz.thisType, admit = VBRIDGE) - if (impl == NoSymbol || (decl.owner isSubClass impl.owner)) { - abstractClassError(false, s"No implementation found in a subclass for deferred declaration\n" + + if (impl == NoSymbol || decl.owner.isSubClass(impl.owner)) + abstractClassError(s"No implementation found in a subclass for deferred declaration\n" + s"${infoString(decl)}${analyzer.abstractVarMessage(decl)}") - } } } if (bc.superClass hasFlag ABSTRACT) diff --git a/test/files/neg/abstract-report.check b/test/files/neg/abstract-report.check index 41e46af09eba..80ff55c045aa 100644 --- a/test/files/neg/abstract-report.check +++ b/test/files/neg/abstract-report.check @@ -10,6 +10,6 @@ Missing implementations for 6 members. Stub implementations follow: protected def newSpecificBuilder: scala.collection.mutable.Builder[String,List[String]] = ??? def toIterable: Iterable[String] = ??? -class Unimplemented extends scala.collection.IterableOps[String, List, List[String]] { } +class Unimplemented extends scala.collection.IterableOps[String, List, List[String]] ^ 1 error diff --git a/test/files/neg/abstract-report.scala b/test/files/neg/abstract-report.scala index 6a9fb414096e..fd4b5b1dce60 100644 --- a/test/files/neg/abstract-report.scala +++ b/test/files/neg/abstract-report.scala @@ -1 +1 @@ -class Unimplemented extends scala.collection.IterableOps[String, List, List[String]] { } +class Unimplemented extends scala.collection.IterableOps[String, List, List[String]] From 2f74486e0641ff5bb3f4bfc45cdd9ed863ae21b0 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Fri, 10 Dec 2021 10:50:45 +1000 Subject: [PATCH 452/769] Deal with Object/Any impedence mismatch under -Ypickle-write-java -Ypickle-write-java can generate Scala pickles for Java files. Unpickler needs to deal with the special case for Object/Any, as we do when using Java sources of javac compiled .class files as inputs. Fixes scala/bug#12512 --- .../reflect/internal/pickling/UnPickler.scala | 8 ++- .../scala/tools/nsc/PickleWriteTest.scala | 57 +++++++++++++++++++ 2 files changed, 64 insertions(+), 1 deletion(-) diff --git a/src/reflect/scala/reflect/internal/pickling/UnPickler.scala b/src/reflect/scala/reflect/internal/pickling/UnPickler.scala index 09f3e8009b98..35131dbefba4 100644 --- a/src/reflect/scala/reflect/internal/pickling/UnPickler.scala +++ b/src/reflect/scala/reflect/internal/pickling/UnPickler.scala @@ -399,6 +399,12 @@ abstract class UnPickler { ThisType(sym) } + def fixJavaObjectType(typeRef: Type): Type = { + if (classRoot.isJava && typeRef =:= definitions.ObjectTpe) { + definitions.ObjectTpeJava + } else typeRef + } + // We're stuck with the order types are pickled in, but with judicious use // of named parameters we can recapture a declarative flavor in a few cases. // But it's still a rat's nest of ad-hockery. @@ -409,7 +415,7 @@ abstract class UnPickler { case SINGLEtpe => SingleType(readTypeRef(), readSymbolRef().filter(_.isStable)) // scala/bug#7596 account for overloading case SUPERtpe => SuperType(readTypeRef(), readTypeRef()) case CONSTANTtpe => ConstantType(readConstantRef()) - case TYPEREFtpe => TypeRef(readTypeRef(), readSymbolRef(), readTypes()) + case TYPEREFtpe => fixJavaObjectType(TypeRef(readTypeRef(), readSymbolRef(), readTypes())) case TYPEBOUNDStpe => TypeBounds(readTypeRef(), readTypeRef()) case REFINEDtpe | CLASSINFOtpe => CompoundType(readSymbolRef(), readTypes()) case METHODtpe => MethodTypeRef(readTypeRef(), readSymbols()) diff --git a/test/junit/scala/tools/nsc/PickleWriteTest.scala b/test/junit/scala/tools/nsc/PickleWriteTest.scala index 57dcc7b46d0a..04100950ffd5 100644 --- a/test/junit/scala/tools/nsc/PickleWriteTest.scala +++ b/test/junit/scala/tools/nsc/PickleWriteTest.scala @@ -96,4 +96,61 @@ class PickleWriteTest { new global2.Run().compile(command2.files) assert(!global2.reporter.hasErrors) } + + @Test + def testPickleWriteJava(): Unit = { + val pathFactory = new VirtualFilePathFactory + + val build = new Build(projectsBase, "b1") + val p1 = build.project("p1") + val p1ApiVirtual = VirtualFilePathFactory.path("p1") + p1.scalacOptions ++= List( + "-Ypickle-write", p1ApiVirtual, + "-Ypickle-java", + "-Ystop-after:pickler" + ) + p1.withSource("b1/p1/J.java")( + """ + |package b1.p1; + |public class J { + | public Object foo(Object o) { return o; } + | public T bar(T t) { return t; } + | + | public void ol(scala.Equals o) {} // Equals extends AnyVal + | public void ol(Object o) {} + |} + """.stripMargin) + + val p2 = build.project("p2") + p2.classpath += p1ApiVirtual + p2.withSource("b1/p2/Client.scala")( + """ + |package b1.p2 + |class Client[T] extends b1.p1.J[T] { + | override def foo(o: Object): Object = o + | override def bar(t: T): T = t + | def test(): Unit = { + | // this was incorrectly showing as ambiguous because Unpickler wasn't massaging type refs to Object + | // in Java-defined .sig files. + | ol(Option("")) + | } + |} + """.stripMargin) + + val settings1 = new Settings(Console.println, pathFactory) + settings1.usejavacp.value = true + val argsFile1 = p1.argsFile() + val command1 = new CompilerCommand("@" + argsFile1.toAbsolutePath.toString :: Nil, settings1) + val global1 = new Global(command1.settings) + new global1.Run().compile(command1.files) + assert(!global1.reporter.hasErrors) + + val argsFile2 = p2.argsFile() + val settings2 = new Settings(Console.println, pathFactory) + settings2.usejavacp.value = true + val command2 = new CompilerCommand("@" + argsFile2.toAbsolutePath.toString :: Nil, settings2) + val global2 = new Global(command2.settings) + new global2.Run().compile(command2.files) + assert(!global2.reporter.hasErrors) + } } From 94ccc1caaae424fc012c0a3584806248fe84ff9a Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Tue, 7 Dec 2021 05:01:08 -0800 Subject: [PATCH 453/769] Simplify check of _root_ usage --- .../scala/tools/nsc/ast/parser/Parsers.scala | 11 ++- .../scala/tools/nsc/typechecker/Typers.scala | 83 ++++++++----------- .../reflect/internal/StdAttachments.scala | 3 + .../reflect/runtime/JavaUniverseForce.scala | 1 + test/files/neg/t6217.scala | 2 +- test/files/neg/t6217b.scala | 2 +- test/files/neg/t6217c.scala | 2 +- 7 files changed, 52 insertions(+), 52 deletions(-) diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala index a012521ccb3e..94e0e78a655d 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala @@ -1357,6 +1357,7 @@ self => t = selector(t) if (in.token == DOT) t = selectors(t, typeOK, in.skipToken()) } else { + if (name == nme.ROOTPKG) t.updateAttachment(RootSelection) t = selectors(t, typeOK, dotOffset) } } @@ -1400,7 +1401,10 @@ self => def qualId(): Tree = { val start = in.offset val id = atPos(start) { Ident(ident()) } - if (in.token == DOT) { selectors(id, typeOK = false, in.skipToken()) } + if (in.token == DOT) { + if (id.name == nme.ROOTPKG) id.updateAttachment(RootSelection) + selectors(id, typeOK = false, in.skipToken()) + } else id } /** Calls `qualId()` and manages some package state. */ @@ -2704,7 +2708,10 @@ self => else syntaxError(in.lastOffset, s". expected", skipIt = false) if (in.token == THIS) thisDotted(id.name.toTypeName) - else id + else { + if (id.name == nme.ROOTPKG) id.updateAttachment(RootSelection) + id + } }) } diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index f0d111d66813..0144fd5d4c1a 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -6147,58 +6147,47 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper @inline final def typedQualifier(tree: Tree): Tree = typedQualifier(tree, NOmode, WildcardType) // if a package id is a selection from _root_ in scope, warn about semantics and set symbol for typedQualifier - @inline final def typedPackageQualifier(tree: Tree): Tree = typedQualifier(packageQualifierTraverser(tree)) - - object packageQualifierTraverser extends Traverser { - def checkRootSymbol(t: Tree): Unit = - context.lookupSymbol(nme.ROOTPKG, p => p.hasPackageFlag && !p.isRootPackage) match { - case LookupSucceeded(_, sym) => - runReporting.warning( - t.pos, - s"${nme.ROOTPKG} in root position in package definition does not refer to the root package, but to ${sym.fullLocationString}, which is in scope", - WarningCategory.Other, - currentOwner) - t.setSymbol(sym) - case _ => () - } - override def traverse(tree: Tree): Unit = - tree match { - case Select(id@Ident(nme.ROOTPKG), _) if !id.hasExistingSymbol => checkRootSymbol(id) - case _ => super.traverse(tree) - } + @inline final def typedPackageQualifier(tree: Tree): Tree = typedQualifier(checkRootOfPackageQualifier(tree)) + + def checkRootOfPackageQualifier(q: Tree): Tree = { + q match { + case Select(id @ Ident(nme.ROOTPKG), _) if !id.hasExistingSymbol && id.hasAttachment[RootSelection.type] => + context.lookupSymbol(nme.ROOTPKG, p => p.hasPackageFlag && !p.isRootPackage) match { + case LookupSucceeded(_, sym) => + runReporting.warning( + id.pos, + s"${nme.ROOTPKG} in root position in package definition does not refer to the root package, but to ${sym.fullLocationString}, which is in scope", + WarningCategory.Other, + context.owner) + id.removeAttachment[RootSelection.type] + id.setSymbol(sym) + case _ => + } + case _ => + } + q } /** If import from path starting with _root_, warn if there is a _root_ value in scope, * and ensure _root_ can only be the root package in that position. */ - @inline def checkRootOfQualifier(q: Tree, mode: Mode): Tree = - if (mode.typingPatternOrTypePat) patternQualifierTraverser(q) else nonpatternQualifierTraverser(q) - - abstract class QualifierTraverser extends Traverser { - def startContext: Context - def checkRootSymbol(t: Tree): Unit = { - startContext.lookupSymbol(nme.ROOTPKG, !_.isRootPackage) match { - case LookupSucceeded(_, sym) => - runReporting.warning( - t.pos, - s"${nme.ROOTPKG} in root position of qualifier refers to the root package, not ${sym.fullLocationString}, which is in scope", - WarningCategory.Other, - currentOwner) - t.setSymbol(rootMirror.RootPackage) - case _ => () - } - } - override def traverse(tree: Tree): Unit = - tree match { - case Select(id@Ident(nme.ROOTPKG), _) if !id.hasExistingSymbol => checkRootSymbol(id) - case _ => super.traverse(tree) - } - } - object patternQualifierTraverser extends QualifierTraverser { - override def startContext = context.outer - } - object nonpatternQualifierTraverser extends QualifierTraverser { - override def startContext = context + def checkRootOfQualifier(q: Tree, mode: Mode): Tree = { + q match { + case Ident(nme.ROOTPKG) if !q.hasExistingSymbol && q.hasAttachment[RootSelection.type] => + val startContext = if (mode.typingPatternOrTypePat) context.outer else context + startContext.lookupSymbol(nme.ROOTPKG, !_.isRootPackage) match { + case LookupSucceeded(_, sym) => + runReporting.warning( + q.pos, + s"${nme.ROOTPKG} in root position of qualifier refers to the root package, not ${sym.fullLocationString}, which is in scope", + WarningCategory.Other, + context.owner) + case _ => + } + q.setSymbol(rootMirror.RootPackage) + case _ => + } + q } /** Types function part of an application */ diff --git a/src/reflect/scala/reflect/internal/StdAttachments.scala b/src/reflect/scala/reflect/internal/StdAttachments.scala index 0c8af3b7601f..4cb68fa65322 100644 --- a/src/reflect/scala/reflect/internal/StdAttachments.scala +++ b/src/reflect/scala/reflect/internal/StdAttachments.scala @@ -135,4 +135,7 @@ trait StdAttachments { case class ChangeOwnerAttachment(originalOwner: Symbol) case object InterpolatedString extends PlainAttachment + + // Use of _root_ is in correct leading position of selection + case object RootSelection extends PlainAttachment } diff --git a/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala b/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala index c093aa14bd23..fd08efbacf1d 100644 --- a/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala +++ b/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala @@ -73,6 +73,7 @@ trait JavaUniverseForce { self: runtime.JavaUniverse => this.NullaryOverrideAdapted this.ChangeOwnerAttachment this.InterpolatedString + this.RootSelection this.noPrint this.typeDebug // inaccessible: this.posAssigner diff --git a/test/files/neg/t6217.scala b/test/files/neg/t6217.scala index ac15486c0573..931fe8c13930 100644 --- a/test/files/neg/t6217.scala +++ b/test/files/neg/t6217.scala @@ -1,4 +1,4 @@ -// scalac: -Xfatal-warnings +// scalac: -Werror package p { package _root_ { package scala { diff --git a/test/files/neg/t6217b.scala b/test/files/neg/t6217b.scala index e0452a4b2f56..a33cae6eca7e 100644 --- a/test/files/neg/t6217b.scala +++ b/test/files/neg/t6217b.scala @@ -1,4 +1,4 @@ -// scalac: -Xfatal-warnings +// scalac: -Werror package p package _root_ object Test { diff --git a/test/files/neg/t6217c.scala b/test/files/neg/t6217c.scala index b0bbb78a7f70..f27162811d96 100644 --- a/test/files/neg/t6217c.scala +++ b/test/files/neg/t6217c.scala @@ -1,4 +1,4 @@ -// scalac: -Xfatal-warnings +// scalac: -Werror package b { class B } From bbde45c537bda47b246afd10e76d1af3e4c94ad9 Mon Sep 17 00:00:00 2001 From: Philippus Date: Sat, 11 Dec 2021 08:16:12 +0100 Subject: [PATCH 454/769] Update sbt 1.5.5 to 1.5.6 --- project/build.properties | 2 +- scripts/common | 2 +- src/intellij/scala.ipr.SAMPLE | 36 +++++++++++++------------- test/jcstress/project/build.properties | 2 +- 4 files changed, 21 insertions(+), 21 deletions(-) diff --git a/project/build.properties b/project/build.properties index 10fd9eee04ac..bb3a9b7dc6d2 100644 --- a/project/build.properties +++ b/project/build.properties @@ -1 +1 @@ -sbt.version=1.5.5 +sbt.version=1.5.6 diff --git a/scripts/common b/scripts/common index 5118e9ec4b17..2fc012cbe8c4 100644 --- a/scripts/common +++ b/scripts/common @@ -11,7 +11,7 @@ else fi SBT_CMD=${SBT_CMD-sbt} -SBT_CMD="$SBT_CMD -sbt-version 1.5.5" +SBT_CMD="$SBT_CMD -sbt-version 1.5.6" # repo to publish builds integrationRepoUrl=${integrationRepoUrl-"https://scala-ci.typesafe.com/artifactory/scala-integration/"} diff --git a/src/intellij/scala.ipr.SAMPLE b/src/intellij/scala.ipr.SAMPLE index 53c7254a1cfb..292cce8c2a87 100644 --- a/src/intellij/scala.ipr.SAMPLE +++ b/src/intellij/scala.ipr.SAMPLE @@ -325,13 +325,13 @@ - + - + - - + + @@ -354,7 +354,7 @@ - + @@ -368,8 +368,8 @@ - - + + @@ -380,13 +380,13 @@ - + - + @@ -397,16 +397,16 @@ - + - + - + - + @@ -428,17 +428,17 @@ - - + + - - + + - + diff --git a/test/jcstress/project/build.properties b/test/jcstress/project/build.properties index 10fd9eee04ac..bb3a9b7dc6d2 100644 --- a/test/jcstress/project/build.properties +++ b/test/jcstress/project/build.properties @@ -1 +1 @@ -sbt.version=1.5.5 +sbt.version=1.5.6 From 1c9daea6adf0cd50e888d7c32c5ef17bd90901c8 Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Thu, 9 Dec 2021 23:24:40 -0800 Subject: [PATCH 455/769] Rejigger missing methods check Always print the missing methods in stub form, with the template in the header, and with diagnostic text as a line comment. --- .../tools/nsc/typechecker/RefChecks.scala | 109 +++++++++--------- test/files/neg/abstract-class-2.check | 7 +- test/files/neg/abstract-class-error.check | 7 +- .../files/neg/abstract-concrete-methods.check | 7 +- test/files/neg/abstract-report.check | 2 +- test/files/neg/abstract-report2.check | 16 ++- test/files/neg/abstract-report2.scala | 22 ++++ test/files/neg/abstract-vars.check | 35 +++--- test/files/neg/accesses2.check | 6 +- test/files/neg/logImplicits.check | 6 +- test/files/neg/raw-types-stubs.check | 4 +- test/files/neg/t0345.check | 6 +- test/files/neg/t10260.check | 28 +++-- test/files/neg/t2213.check | 4 +- test/files/neg/t3854.check | 7 +- test/files/neg/t4431.check | 3 +- test/files/neg/t521.check | 12 +- test/files/neg/t6013.check | 6 +- test/files/neg/t856.check | 2 +- test/files/neg/t9138.check | 27 +++-- 20 files changed, 187 insertions(+), 129 deletions(-) diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala index 597c7db501d5..6f92de69b4bd 100644 --- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala +++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala @@ -535,7 +535,7 @@ abstract class RefChecks extends Transform { // Verifying a concrete class has nothing unimplemented. if (clazz.isConcreteClass && !typesOnly) { val abstractErrors = ListBuffer.empty[String] - def abstractErrorMessage = abstractErrors.mkString(if (abstractErrors.size <= 2) " " else "\n") + def abstractErrorMessage = abstractErrors.mkString("\n") def mustBeMixin(msg: String): Unit = addError(mustBeMixin = true, msg, supplement = "") def abstractClassError(msg: String): Unit = addError(mustBeMixin = false, msg, supplement = "") @@ -573,54 +573,24 @@ abstract class RefChecks extends Transform { // 2. Check that only abstract classes have deferred members def checkNoAbstractMembers(): Unit = { - // Avoid spurious duplicates: first gather any missing members. - val (missing, rest): (List[Symbol], Iterator[Symbol]) = { - val memberList = clazz.info.nonPrivateMembersAdmitting(VBRIDGE) - val (missing0, rest0) = memberList.iterator.partition(m => m.isDeferred & !ignoreDeferred(m)) - (missing0.toList, rest0) - } - // Group missing members by the name of the underlying symbol, to consolidate getters and setters. - val grouped = missing.groupBy(_.name.getterName) - val missingMethods = grouped.toList.flatMap { - case (_, syms) if syms.exists(_.isSetter) => syms.filterNot(_.isGetter) - case (_, syms) => syms - } - def stubImplementations: List[String] = { - // Grouping missing methods by the declaring class - val regrouped = missingMethods.groupBy(_.owner).toList - def membersStrings(members: List[Symbol]) = { - members.foreach(fullyInitializeSymbol) - members.sortBy(_.name).map(m => s"${m.defStringSeenAs(clazz.tpe_* memberType m)} = ???") - } - - if (regrouped.tail.isEmpty) - membersStrings(regrouped.head._2) - else (regrouped.sortBy("" + _._1.name) flatMap { - case (owner, members) => - ("// Members declared in " + owner.fullName) +: membersStrings(members) :+ "" - }).init - } - def diagnose(member: Symbol): String = { + def diagnose(member: Symbol, accessors: List[Symbol]): String = { val underlying = analyzer.underlyingSymbol(member) // TODO: don't use this method // Give a specific error message for abstract vars based on why it fails: // It could be unimplemented, have only one accessor, or be uninitialized. - val groupedAccessors = grouped.getOrElse(member.name.getterName, Nil) - val isMultiple = groupedAccessors.size > 1 - - if (groupedAccessors.exists(_.isSetter) || (member.isGetter && !isMultiple && member.setterIn(member.owner).exists)) { - // If both getter and setter are missing, squelch the setter error. - if (member.isSetter && isMultiple) null - else if (member.isSetter) "\n(Note that an abstract var requires a setter in addition to the getter)" - else if (member.isGetter && !isMultiple) "\n(Note that an abstract var requires a getter in addition to the setter)" - else "\n(Note that variables need to be initialized to be defined)" + val isMultiple = accessors.size > 1 + + if (accessors.exists(_.isSetter) || (member.isGetter && !isMultiple && member.setterIn(member.owner).exists)) { + if (member.isSetter && isMultiple) null // If both getter and setter are missing, squelch the setter error. + else if (member.isSetter) "an abstract var requires a setter in addition to the getter" + else if (member.isGetter && !isMultiple) "an abstract var requires a getter in addition to the setter" + else "variables need to be initialized to be defined" } else if (underlying.isMethod) { // Highlight any member that nearly matches: same name and arity, // but differs in one param or param list. val abstractParamLists = underlying.paramLists - val matchingName = clazz.tpe.nonPrivateMembersAdmitting(VBRIDGE) - val matchingArity = matchingName.filter { m => + val matchingArity = clazz.tpe.nonPrivateMembersAdmitting(VBRIDGE).filter { m => !m.isDeferred && m.name == underlying.name && sameLength(m.paramLists, abstractParamLists) && @@ -661,8 +631,8 @@ abstract class RefChecks extends Transform { else if (concreteSym.isSubClass(abstractSym)) subclassMsg(concreteSym, abstractSym) else "" ) - s"\n(Note that $pa does not match $pc$addendum)" - case Nil if missingImplicit => "\n(overriding member must declare implicit parameter list)" // other overriding gotchas + s"$pa does not match $pc$addendum" + case Nil if missingImplicit => "overriding member must declare implicit parameter list" // other overriding gotchas case _ => "" } case _ => "" @@ -670,20 +640,49 @@ abstract class RefChecks extends Transform { } else "" } - // The outcomes are - // - 1 method in current class - // If there are numerous missing methods, we presume they are aware of it and - // give them a nicely formatted set of method signatures for implementing. - if (missingMethods.size > 1) { - val stubs = stubImplementations.map(" " + _ + "\n").mkString("", "", "") - abstractClassErrorStubs(s"Missing implementations for ${missingMethods.size} members. Stub implementations follow:", stubs) + // Avoid spurious duplicates: first gather any missing members. + val (missing, rest): (List[Symbol], Iterator[Symbol]) = { + val memberList = clazz.info.nonPrivateMembersAdmitting(VBRIDGE) + val (missing0, rest0) = memberList.iterator.partition(m => m.isDeferred & !ignoreDeferred(m)) + (missing0.toList, rest0) } - else { - for (member <- missing ; msg = diagnose(member) if msg != null) { - val addendum = if (msg.isEmpty) msg else " " + msg - val from = if (member.owner != clazz) s" // inherited from ${member.owner}" else "" - abstractClassError(s"Missing implementation for:\n ${infoString0(member, false)}$from$addendum") - } + if (missing.nonEmpty) { + // Group missing members by the name of the underlying symbol, to consolidate getters and setters. + val byName = missing.groupBy(_.name.getterName) + // There may be 1 or more missing members declared in 1 or more parents. + // If a single parent, the message names it. Otherwise, missing members are grouped by declaring class. + val byOwner = missing.groupBy(_.owner).toList + val announceOwner = byOwner.size > 1 + def membersStrings(members: List[Symbol]) = + members.sortBy(_.name).map { m => + val accessors = byName.getOrElse(m.name.getterName, Nil) + val diagnostic = diagnose(m, accessors) + if (diagnostic == null) null + else { + val s0 = infoString0(m, showLocation = false) + fullyInitializeSymbol(m) + val s1 = m.defStringSeenAs(clazz.tpe_*.memberType(m)) + val implMsg = if (s0 != s1) s"implements `$s0`" else "" + val spacer = if (diagnostic.nonEmpty && implMsg.nonEmpty) "; " else "" + val comment = if (diagnostic.nonEmpty || implMsg.nonEmpty) s" // $implMsg$spacer$diagnostic" else "" + s"$s1 = ???$comment" + } + }.filter(_ ne null) + var count = 0 + val stubs = + byOwner.sortBy(_._1.name.toString).flatMap { + case (owner, members) => + val ms = membersStrings(members) :+ "" + count += ms.size - 1 + if (announceOwner) s"// Members declared in ${owner.fullName}" :: ms else ms + }.init.map(s => s" $s\n").mkString + val isMulti = count > 1 + val singleParent = if (byOwner.size == 1 && byOwner.head._1 != clazz) s" member${if (isMulti) "s" else ""} of ${byOwner.head._1}" else "" + val line0 = + if (isMulti) s"Missing implementations for ${count}${val p = singleParent ; if (p.isEmpty) " members" else p}." + else s"Missing implementation${val p = singleParent ; if (p.isEmpty) p else s" for$p"}:" + abstractClassErrorStubs(line0, stubs) + // Check the remainder for invalid absoverride. for (member <- rest if member.isAbstractOverride && member.isIncompleteIn(clazz)) { val explanation = member.superSymbolIn(clazz) match { diff --git a/test/files/neg/abstract-class-2.check b/test/files/neg/abstract-class-2.check index 68121a633c67..90f76289882f 100644 --- a/test/files/neg/abstract-class-2.check +++ b/test/files/neg/abstract-class-2.check @@ -1,6 +1,7 @@ -abstract-class-2.scala:11: error: object creation impossible. Missing implementation for: - def f(x: P2.this.p.S1): Int // inherited from trait S2 -(Note that P.this.p.S1 does not match P2.this.S1: their prefixes (i.e., enclosing instances) differ) +abstract-class-2.scala:11: error: object creation impossible. +Missing implementation for member of trait S2: + def f(x: P2.this.p.S1): Int = ??? // P.this.p.S1 does not match P2.this.S1: their prefixes (i.e., enclosing instances) differ + object O2 extends S2 { ^ 1 error diff --git a/test/files/neg/abstract-class-error.check b/test/files/neg/abstract-class-error.check index c8b2fb8f1906..9e30ffd214fa 100644 --- a/test/files/neg/abstract-class-error.check +++ b/test/files/neg/abstract-class-error.check @@ -1,6 +1,7 @@ -S.scala:1: error: class S needs to be abstract. Missing implementation for: - def g(y: Int, z: java.util.List): Int // inherited from class J -(Note that java.util.List does not match java.util.List[String]. To implement this raw type, use java.util.List[_]) +S.scala:1: error: class S needs to be abstract. +Missing implementation for member of class J: + def g(y: Int, z: java.util.List[_]): Int = ??? // implements `def g(y: Int, z: java.util.List): Int`; java.util.List does not match java.util.List[String]. To implement this raw type, use java.util.List[_] + class S extends J { ^ 1 error diff --git a/test/files/neg/abstract-concrete-methods.check b/test/files/neg/abstract-concrete-methods.check index bbf9f714eb05..b3ab5bff2e2d 100644 --- a/test/files/neg/abstract-concrete-methods.check +++ b/test/files/neg/abstract-concrete-methods.check @@ -1,6 +1,7 @@ -abstract-concrete-methods.scala:7: error: class Outer2 needs to be abstract. Missing implementation for: - def score(i: Outer2#Inner): Double // inherited from trait Outer -(Note that This#Inner does not match Outer2#Inner: class Inner in class Outer2 is a subclass of trait Inner in trait Outer, but method parameter types must match exactly.) +abstract-concrete-methods.scala:7: error: class Outer2 needs to be abstract. +Missing implementation for member of trait Outer: + def score(i: Outer2#Inner): Double = ??? // This#Inner does not match Outer2#Inner: class Inner in class Outer2 is a subclass of trait Inner in trait Outer, but method parameter types must match exactly. + class Outer2 extends Outer[Outer2] { ^ 1 error diff --git a/test/files/neg/abstract-report.check b/test/files/neg/abstract-report.check index 80ff55c045aa..736a021329f7 100644 --- a/test/files/neg/abstract-report.check +++ b/test/files/neg/abstract-report.check @@ -1,5 +1,5 @@ abstract-report.scala:1: error: class Unimplemented needs to be abstract. -Missing implementations for 6 members. Stub implementations follow: +Missing implementations for 6 members. // Members declared in scala.collection.IterableOnce def iterator: Iterator[String] = ??? diff --git a/test/files/neg/abstract-report2.check b/test/files/neg/abstract-report2.check index c77c30509e35..7a97c84a1c7f 100644 --- a/test/files/neg/abstract-report2.check +++ b/test/files/neg/abstract-report2.check @@ -1,5 +1,5 @@ abstract-report2.scala:3: error: class Foo needs to be abstract. -Missing implementations for 13 members. Stub implementations follow: +Missing implementations for 13 members of trait Collection. def add(x$1: Int): Boolean = ??? def addAll(x$1: java.util.Collection[_ <: Int]): Boolean = ??? def clear(): Unit = ??? @@ -17,7 +17,7 @@ Missing implementations for 13 members. Stub implementations follow: class Foo extends Collection[Int] ^ abstract-report2.scala:5: error: class Bar needs to be abstract. -Missing implementations for 13 members. Stub implementations follow: +Missing implementations for 13 members of trait Collection. def add(x$1: List[_ <: String]): Boolean = ??? def addAll(x$1: java.util.Collection[_ <: List[_ <: String]]): Boolean = ??? def clear(): Unit = ??? @@ -35,7 +35,7 @@ Missing implementations for 13 members. Stub implementations follow: class Bar extends Collection[List[_ <: String]] ^ abstract-report2.scala:7: error: class Baz needs to be abstract. -Missing implementations for 13 members. Stub implementations follow: +Missing implementations for 13 members of trait Collection. def add(x$1: T): Boolean = ??? def addAll(x$1: java.util.Collection[_ <: T]): Boolean = ??? def clear(): Unit = ??? @@ -53,7 +53,7 @@ Missing implementations for 13 members. Stub implementations follow: class Baz[T] extends Collection[T] ^ abstract-report2.scala:21: error: class Dingus needs to be abstract. -Missing implementations for 7 members. Stub implementations follow: +Missing implementations for 7 members. // Members declared in scala.collection.IterableOnce def iterator: Iterator[(Set[Int], String)] = ??? @@ -69,4 +69,10 @@ Missing implementations for 7 members. Stub implementations follow: class Dingus extends Bippy[String, Set[Int], List[Int]] ^ -4 errors +abstract-report2.scala:23: error: class JustOne needs to be abstract. +Missing implementation for member of trait Collection: + def toArray[T](x$1: Array[T with Object]): Array[T with Object] = ??? + +class JustOne extends Collection[Int] { + ^ +5 errors diff --git a/test/files/neg/abstract-report2.scala b/test/files/neg/abstract-report2.scala index b11d97a0c0d4..5e7d58c9bd5c 100644 --- a/test/files/neg/abstract-report2.scala +++ b/test/files/neg/abstract-report2.scala @@ -19,3 +19,25 @@ trait Symbolic { trait Bippy[T1, T2, T3] extends collection.IterableOps[(T2, String), List, List[(T2, String)]] with Xyz[T3] class Dingus extends Bippy[String, Set[Int], List[Int]] + +class JustOne extends Collection[Int] { + def add(x$1: Int): Boolean = ??? + def addAll(x$1: java.util.Collection[_ <: Int]): Boolean = ??? + def clear(): Unit = ??? + def contains(x$1: Object): Boolean = ??? + def containsAll(x$1: java.util.Collection[_]): Boolean = ??? + def isEmpty(): Boolean = ??? + def iterator(): java.util.Iterator[Int] = ??? + def remove(x$1: Object): Boolean = ??? + def removeAll(x$1: java.util.Collection[_]): Boolean = ??? + def retainAll(x$1: java.util.Collection[_]): Boolean = ??? + def size(): Int = ??? + //def toArray[T](x$1: Array[T with Object]): Array[T with Object] = ??? + def toArray(): Array[Object] = ??? +} +/* was: +test/files/neg/abstract-report2.scala:23: error: class JustOne needs to be abstract. Missing implementation for: + def toArray[T](x$1: Array[T with Object]): Array[T with Object] // inherited from trait Collection +(Note that Array[T with Object] does not match java.util.function.IntFunction[Array[T with Object]]) +class JustOne extends Collection[Int] { + */ diff --git a/test/files/neg/abstract-vars.check b/test/files/neg/abstract-vars.check index 39092a836e97..9610c97b68fc 100644 --- a/test/files/neg/abstract-vars.check +++ b/test/files/neg/abstract-vars.check @@ -1,26 +1,31 @@ -abstract-vars.scala:5: error: class Fail1 needs to be abstract. Missing implementation for: - def x: Int -(Note that variables need to be initialized to be defined) +abstract-vars.scala:5: error: class Fail1 needs to be abstract. +Missing implementation: + def x: Int = ??? // variables need to be initialized to be defined + class Fail1 extends A { ^ -abstract-vars.scala:9: error: class Fail2 needs to be abstract. Missing implementation for: - def x: Int // inherited from class A -(Note that variables need to be initialized to be defined) +abstract-vars.scala:9: error: class Fail2 needs to be abstract. +Missing implementation for member of class A: + def x: Int = ??? // variables need to be initialized to be defined + class Fail2 extends A { } ^ -abstract-vars.scala:11: error: class Fail3 needs to be abstract. Missing implementation for: - def x_=(x$1: Int): Unit // inherited from class A -(Note that an abstract var requires a setter in addition to the getter) +abstract-vars.scala:11: error: class Fail3 needs to be abstract. +Missing implementation for member of class A: + def x_=(x$1: Int): Unit = ??? // an abstract var requires a setter in addition to the getter + class Fail3 extends A { ^ -abstract-vars.scala:14: error: class Fail4 needs to be abstract. Missing implementation for: - def x_=(x$1: Int): Unit // inherited from class A -(Note that an abstract var requires a setter in addition to the getter) +abstract-vars.scala:14: error: class Fail4 needs to be abstract. +Missing implementation for member of class A: + def x_=(x$1: Int): Unit = ??? // an abstract var requires a setter in addition to the getter + class Fail4 extends A { ^ -abstract-vars.scala:18: error: class Fail5 needs to be abstract. Missing implementation for: - def x: Int // inherited from class A -(Note that an abstract var requires a getter in addition to the setter) +abstract-vars.scala:18: error: class Fail5 needs to be abstract. +Missing implementation for member of class A: + def x: Int = ??? // an abstract var requires a getter in addition to the setter + class Fail5 extends A { ^ 5 errors diff --git a/test/files/neg/accesses2.check b/test/files/neg/accesses2.check index 56895b717ea1..f4aff8f61dfd 100644 --- a/test/files/neg/accesses2.check +++ b/test/files/neg/accesses2.check @@ -3,8 +3,10 @@ private[package p2] def f2(): Int (defined in class A) override should not be private private def f2(): Int = 1 ^ -accesses2.scala:5: error: class B1 needs to be abstract. Missing implementation for: - private[package p2] def f2(): Int // inherited from class A +accesses2.scala:5: error: class B1 needs to be abstract. +Missing implementation for member of class A: + private[package p2] def f2(): Int = ??? + class B1 extends A { ^ accesses2.scala:9: error: weaker access privileges in overriding diff --git a/test/files/neg/logImplicits.check b/test/files/neg/logImplicits.check index 4779952a699b..5ffcac5da49d 100644 --- a/test/files/neg/logImplicits.check +++ b/test/files/neg/logImplicits.check @@ -13,8 +13,10 @@ logImplicits.scala:21: applied implicit conversion from Int(1) to ?{def -> : ?} logImplicits.scala:21: applied implicit conversion from (Int, Int) to ?{def + : ?} = final implicit def any2stringadd[A](self: A): any2stringadd[A] def f = (1 -> 2) + "c" ^ -logImplicits.scala:24: error: class Un needs to be abstract. Missing implementation for: - def unimplemented: Int +logImplicits.scala:24: error: class Un needs to be abstract. +Missing implementation: + def unimplemented: Int = ??? + class Un { ^ 1 error diff --git a/test/files/neg/raw-types-stubs.check b/test/files/neg/raw-types-stubs.check index 217346f55b33..9d677259edfb 100644 --- a/test/files/neg/raw-types-stubs.check +++ b/test/files/neg/raw-types-stubs.check @@ -1,6 +1,6 @@ S_3.scala:1: error: class Sub needs to be abstract. -Missing implementations for 2 members. Stub implementations follow: - def raw(x$1: M_1[_ <: String]): Unit = ??? +Missing implementations for 2 members of class Raw_2. + def raw(x$1: M_1[_ <: String]): Unit = ??? // implements `def raw(x$1: M_1): Unit` def raw(x$1: Object): Unit = ??? class Sub extends Raw_2 { } diff --git a/test/files/neg/t0345.check b/test/files/neg/t0345.check index 31a416a3cba3..5e2e0611d9cc 100644 --- a/test/files/neg/t0345.check +++ b/test/files/neg/t0345.check @@ -1,5 +1,7 @@ -t0345.scala:2: error: object creation impossible. Missing implementation for: - def cons(a: Nothing): Unit // inherited from trait Lizt +t0345.scala:2: error: object creation impossible. +Missing implementation for member of trait Lizt: + def cons(a: Nothing): Unit = ??? + val empty = new Lizt[Nothing] { ^ 1 error diff --git a/test/files/neg/t10260.check b/test/files/neg/t10260.check index 8564edf654c2..11c8029f52d0 100644 --- a/test/files/neg/t10260.check +++ b/test/files/neg/t10260.check @@ -1,21 +1,25 @@ -Test.scala:1: error: class IAImpl needs to be abstract. Missing implementation for: - def foo(a: A): Unit // inherited from trait IA -(Note that A does not match A[_]. To implement this raw type, use A[T] forSome { type T <: A[T] }) +Test.scala:1: error: class IAImpl needs to be abstract. +Missing implementation for member of trait IA: + def foo(a: A[T] forSome { type T <: A[T] }): Unit = ??? // implements `def foo(a: A): Unit`; A does not match A[_]. To implement this raw type, use A[T] forSome { type T <: A[T] } + class IAImpl extends IA { def foo(a: A[_]) = ??? } ^ -Test.scala:2: error: class IBImpl needs to be abstract. Missing implementation for: - def foo(a: B): Unit // inherited from trait IB -(Note that B does not match B[_, _]. To implement this raw type, use B[T,R] forSome { type T; type R <: java.util.List[_ >: T] }) +Test.scala:2: error: class IBImpl needs to be abstract. +Missing implementation for member of trait IB: + def foo(a: B[T,R] forSome { type T; type R <: java.util.List[_ >: T] }): Unit = ??? // implements `def foo(a: B): Unit`; B does not match B[_, _]. To implement this raw type, use B[T,R] forSome { type T; type R <: java.util.List[_ >: T] } + class IBImpl extends IB { def foo(a: B[_,_]) = ??? } ^ -Test.scala:3: error: class ICImpl needs to be abstract. Missing implementation for: - def foo(a: Int, b: C, c: String): C // inherited from trait IC -(Note that C does not match C[_]. To implement this raw type, use C[_ <: String]) +Test.scala:3: error: class ICImpl needs to be abstract. +Missing implementation for member of trait IC: + def foo(a: Int, b: C[_ <: String], c: String): C[_ <: String] = ??? // implements `def foo(a: Int, b: C, c: String): C`; C does not match C[_]. To implement this raw type, use C[_ <: String] + class ICImpl extends IC { def foo(a: Int, b: C[_], c: String) = ??? } ^ -Test.scala:4: error: class IDImpl needs to be abstract. Missing implementation for: - def foo(a: D): Unit // inherited from trait ID -(Note that D does not match D[_ <: String]. To implement this raw type, use D[_]) +Test.scala:4: error: class IDImpl needs to be abstract. +Missing implementation for member of trait ID: + def foo(a: D[_]): Unit = ??? // implements `def foo(a: D): Unit`; D does not match D[_ <: String]. To implement this raw type, use D[_] + class IDImpl extends ID { def foo(a: D[_ <: String]) = ??? } ^ 4 errors diff --git a/test/files/neg/t2213.check b/test/files/neg/t2213.check index 06f17099dea6..ae97b55a9768 100644 --- a/test/files/neg/t2213.check +++ b/test/files/neg/t2213.check @@ -1,5 +1,5 @@ t2213.scala:9: error: class C needs to be abstract. -Missing implementations for 4 members. Stub implementations follow: +Missing implementations for 4 members of class A. def f: Int = ??? def g: Int = ??? val x: Int = ??? @@ -8,7 +8,7 @@ Missing implementations for 4 members. Stub implementations follow: class C extends A {} ^ t2213.scala:11: error: object creation impossible. -Missing implementations for 4 members. Stub implementations follow: +Missing implementations for 4 members of class A. def f: Int = ??? def g: Int = ??? val x: Int = ??? diff --git a/test/files/neg/t3854.check b/test/files/neg/t3854.check index b0826dde8beb..935c1c563a22 100644 --- a/test/files/neg/t3854.check +++ b/test/files/neg/t3854.check @@ -1,6 +1,7 @@ -t3854.scala:1: error: class Bar needs to be abstract. Missing implementation for: - def foo[G[_]](implicit n: N[G,F]): X[F] // inherited from trait Foo -(Note that N[G,F] does not match M[G]) +t3854.scala:1: error: class Bar needs to be abstract. +Missing implementation for member of trait Foo: + def foo[G[_]](implicit n: N[G,F]): X[F] = ??? // N[G,F] does not match M[G] + class Bar[F[_]] extends Foo[F] { ^ 1 error diff --git a/test/files/neg/t4431.check b/test/files/neg/t4431.check index cfa6fd0ce5be..50e28e8bb3b6 100644 --- a/test/files/neg/t4431.check +++ b/test/files/neg/t4431.check @@ -1,4 +1,5 @@ -t4431.scala:5: error: class BB needs to be abstract. No implementation found in a subclass for deferred declaration +t4431.scala:5: error: class BB needs to be abstract. +No implementation found in a subclass for deferred declaration def f(): Unit class BB extends B { def f (): Unit } ^ diff --git a/test/files/neg/t521.check b/test/files/neg/t521.check index 9cdb6a824084..3cf03a2c36bc 100644 --- a/test/files/neg/t521.check +++ b/test/files/neg/t521.check @@ -1,13 +1,17 @@ -t521.scala:10: error: class PlainFile needs to be abstract. Missing implementation for: - def path: String // inherited from class AbstractFile +t521.scala:10: error: class PlainFile needs to be abstract. +Missing implementation for member of class AbstractFile: + def path: String = ??? + class PlainFile(val file : File) extends AbstractFile {} ^ t521.scala:13: error: `override` modifier required to override concrete member: val file: java.io.File (defined in class PlainFile) final class ZipArchive(val file : File, archive : ZipFile) extends PlainFile(file) { ^ -t521.scala:13: error: class ZipArchive needs to be abstract. Missing implementation for: - def path: String // inherited from class AbstractFile +t521.scala:13: error: class ZipArchive needs to be abstract. +Missing implementation for member of class AbstractFile: + def path: String = ??? + final class ZipArchive(val file : File, archive : ZipFile) extends PlainFile(file) { ^ t521.scala:15: error: stable, immutable value required to override: diff --git a/test/files/neg/t6013.check b/test/files/neg/t6013.check index 9640c4156489..daa978432760 100644 --- a/test/files/neg/t6013.check +++ b/test/files/neg/t6013.check @@ -1,8 +1,10 @@ -DerivedScala.scala:4: error: class C needs to be abstract. No implementation found in a subclass for deferred declaration +DerivedScala.scala:4: error: class C needs to be abstract. +No implementation found in a subclass for deferred declaration def foo: Int (defined in class B) class C extends B ^ -DerivedScala.scala:7: error: class DerivedScala needs to be abstract. No implementation found in a subclass for deferred declaration +DerivedScala.scala:7: error: class DerivedScala needs to be abstract. +No implementation found in a subclass for deferred declaration def foo(): Boolean (defined in class Abstract) class DerivedScala extends Abstract ^ diff --git a/test/files/neg/t856.check b/test/files/neg/t856.check index 63f1229a75b1..ee13aa9411b1 100644 --- a/test/files/neg/t856.check +++ b/test/files/neg/t856.check @@ -1,5 +1,5 @@ t856.scala:3: error: class ComplexRect needs to be abstract. -Missing implementations for 2 members. Stub implementations follow: +Missing implementations for 2 members. // Members declared in scala.Equals def canEqual(that: Any): Boolean = ??? diff --git a/test/files/neg/t9138.check b/test/files/neg/t9138.check index a0c993226e07..b6d296c96900 100644 --- a/test/files/neg/t9138.check +++ b/test/files/neg/t9138.check @@ -1,20 +1,25 @@ -t9138.scala:9: error: class D needs to be abstract. Missing implementation for: - def f(t: B)(s: String): B // inherited from class C -(Note that String does not match Int) +t9138.scala:9: error: class D needs to be abstract. +Missing implementation for member of class C: + def f(t: B)(s: String): B = ??? // String does not match Int + class D extends C[B] { ^ -t9138.scala:19: error: object creation impossible. Missing implementation for: - def foo(a: String)(b: Int): Nothing // inherited from trait Base +t9138.scala:19: error: object creation impossible. +Missing implementation for member of trait Base: + def foo(a: String)(b: Int): Nothing = ??? + object Derived extends Base[String] { ^ -t9138.scala:29: error: class DDD needs to be abstract. Missing implementation for: - def f(t: B, s: String): B // inherited from class CCC -(Note that T does not match Int) +t9138.scala:29: error: class DDD needs to be abstract. +Missing implementation for member of class CCC: + def f(t: B, s: String): B = ??? // T does not match Int + class DDD extends CCC[B] { ^ -t9138.scala:43: error: object creation impossible. Missing implementation for: - def create(conditionalParams: ImplementingParamTrait)(implicit d: Double): Int // inherited from trait Model -(overriding member must declare implicit parameter list) +t9138.scala:43: error: object creation impossible. +Missing implementation for member of trait Model: + def create(conditionalParams: ImplementingParamTrait)(implicit d: Double): Int = ??? // overriding member must declare implicit parameter list + object Obj extends Model[ImplementingParamTrait] { ^ 4 errors From b1c22386783754f1ec643b0c620d7fdf55833f88 Mon Sep 17 00:00:00 2001 From: Georgi Krastev Date: Sun, 12 Dec 2021 10:21:17 +0100 Subject: [PATCH 456/769] Tweak inferMethodInstance to consider if implicits are enabled When implicits are disabled, we can't recover later with a conversion. Esp. when we are already type checking an implicit conversion. This fixes a regression uncovered in Finch. --- src/compiler/scala/tools/nsc/typechecker/Infer.scala | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/src/compiler/scala/tools/nsc/typechecker/Infer.scala b/src/compiler/scala/tools/nsc/typechecker/Infer.scala index 9dbede86660f..49b40e16903b 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Infer.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Infer.scala @@ -1076,7 +1076,11 @@ trait Infer extends Checkable { */ def inferMethodInstance(fn: Tree, undetParams: List[Symbol], args: List[Tree], pt0: Type): List[Symbol] = fn.tpe match { - case mt @ MethodType(_, _) => + case mt: MethodType => + // If we can't infer the type parameters, we can recover in `tryTypedApply` with an implicit conversion, + // but only when implicit conversions are enabled. In that case we have to infer the type parameters again. + def noInstanceResult = if (context.implicitsEnabled) undetParams else Nil + try { val pt = if (pt0.typeSymbol == UnitClass) WildcardType else pt0 val formals = formalTypes(mt.paramTypes, args.length) @@ -1101,10 +1105,10 @@ trait Infer extends Checkable { enhanceBounds(adjusted.okParams, adjusted.okArgs, xs1) xs1 } - } else undetParams + } else noInstanceResult } catch ifNoInstance { msg => NoMethodInstanceError(fn, args, msg) - undetParams + noInstanceResult } case x => throw new MatchError(x) } From 39057e51dbf3e44942d0265d99b5d190fb86a2d3 Mon Sep 17 00:00:00 2001 From: Seth Tisue Date: Tue, 14 Dec 2021 10:53:08 -0800 Subject: [PATCH 457/769] sbt 1.5.6 (was 1.5.5) --- project/build.properties | 2 +- scripts/common | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/project/build.properties b/project/build.properties index 10fd9eee04ac..bb3a9b7dc6d2 100644 --- a/project/build.properties +++ b/project/build.properties @@ -1 +1 @@ -sbt.version=1.5.5 +sbt.version=1.5.6 diff --git a/scripts/common b/scripts/common index 8cfac63b2f47..474161e3fd6d 100644 --- a/scripts/common +++ b/scripts/common @@ -15,7 +15,7 @@ rm -rf "$WORKSPACE/resolutionScratch_" mkdir -p "$WORKSPACE/resolutionScratch_" SBT_CMD=${SBT_CMD-sbt} -SBT_CMD="$SBT_CMD -sbt-version 1.5.5" +SBT_CMD="$SBT_CMD -sbt-version 1.5.6" # repo to publish builds integrationRepoUrl=${integrationRepoUrl-"https://scala-ci.typesafe.com/artifactory/scala-integration/"} From 6c35ebbf38bb09a0e2d6292df4209b45403508d8 Mon Sep 17 00:00:00 2001 From: Seth Tisue Date: Tue, 14 Dec 2021 10:53:31 -0800 Subject: [PATCH 458/769] upgrade logback and slf4j they are used only in the build and testing, but regardless, let's do it --- build.sbt | 6 +++--- project/plugins.sbt | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/build.sbt b/build.sbt index 5bf8d015d125..cdc32dbd094f 100644 --- a/build.sbt +++ b/build.sbt @@ -741,9 +741,9 @@ def osgiTestProject(p: Project, framework: ModuleID) = p "org.ops4j.pax.exam" % "pax-exam-link-assembly" % paxExamVersion, "org.ops4j.pax.url" % "pax-url-aether" % "2.4.1", "org.ops4j.pax.swissbox" % "pax-swissbox-tracker" % "1.8.1", - "ch.qos.logback" % "logback-core" % "1.1.3", - "ch.qos.logback" % "logback-classic" % "1.1.3", - "org.slf4j" % "slf4j-api" % "1.7.12", + "ch.qos.logback" % "logback-core" % "1.2.8", + "ch.qos.logback" % "logback-classic" % "1.2.8", + "org.slf4j" % "slf4j-api" % "1.7.32", framework % "test" ) }, diff --git a/project/plugins.sbt b/project/plugins.sbt index 77018c1b4bba..5f9a27ca4b72 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -22,7 +22,7 @@ addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "0.8.1") libraryDependencies ++= Seq( "org.eclipse.jgit" % "org.eclipse.jgit" % "4.6.0.201612231935-r", - "org.slf4j" % "slf4j-nop" % "1.7.31", + "org.slf4j" % "slf4j-nop" % "1.7.32", "com.googlecode.java-diff-utils" % "diffutils" % "1.3.0" ) From 6fce59a5b283e90a94f20bdc02e8ce79bf3fbb34 Mon Sep 17 00:00:00 2001 From: Seth Tisue Date: Tue, 14 Dec 2021 20:27:24 -0800 Subject: [PATCH 459/769] bye bye Gitter, hello Discord --- CONTRIBUTING.md | 11 +++++------ README.md | 4 ++-- 2 files changed, 7 insertions(+), 8 deletions(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 59c9675e690d..197f841d78db 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -10,9 +10,9 @@ In 2014, you -- the Scala community -- matched the core team at EPFL in number o We are super happy about this, and are eager to make your experience contributing to Scala productive and satisfying, so that we can keep up this growth. We can't do this alone (nor do we want to)! -This is why we're collecting these notes on how to contribute, and we hope you'll share your experience to improve the process for the next contributor! (Feel free to send a PR for this note, send your thoughts to scala/contributors (Gitter) or contributors.scala-lang.org (Discourse).) +This is why we're collecting these notes on how to contribute, and we hope you'll share your experience to improve the process for the next contributor! (Feel free to send a PR for this note, send your thoughts to \#scala-contributors (on [Discord](https://discord.com/invite/scala)) or contributors.scala-lang.org (Discourse).) -By the way, the team at Lightbend is: @lrytz, @retronym, @SethTisue, and @dwijnand. +By the way, the team at Lightbend is: @lrytz, @retronym, @SethTisue, and @dwijnand. ## What kind of PR are you submitting? @@ -285,8 +285,7 @@ See the [scala-jenkins-infra repo](https://github.com/scala/scala-jenkins-infra) ### Pass code review Your PR will need to be assigned to one or more reviewers. You can suggest reviewers -yourself; if you're not sure, see the list in [README.md](README.md) or ask on scala/contributors (Gitter) -or contributors.scala-lang.org (Discourse). +yourself; if you're not sure, see the list in [README.md](README.md) or ask on \#scala-contributors (on [Discord](https://discord.com/invite/scala)) or contributors.scala-lang.org (Discourse). To assign a reviewer, add a "review by @reviewer" to the PR description or in a comment on your PR. @@ -300,8 +299,8 @@ and `push -f` to the branch. This is to keep the git history clean. Additional c are OK if they stand on their own. Once all these conditions are met, we will merge your changes -- if we -agree with it! We are available on scala/contributors (Gitter) or -contributors.scala-lang.org (Discourse) to discuss changes beforehand, +agree with it! We are available on \#scala-contributors (on [Discord](https://discord.com/invite/scala)) +or contributors.scala-lang.org (Discourse) to discuss changes beforehand, before you put in the coding work. diff --git a/README.md b/README.md index 1fefc3f11305..e7613b0abed5 100644 --- a/README.md +++ b/README.md @@ -26,8 +26,8 @@ For more information on building and developing the core of Scala, read the rest # Get in touch! -In order to get in touch with other Scala contributors, join -[scala/contributors](https://gitter.im/scala/contributors) (Gitter) or post on +In order to get in touch with other Scala contributors, join the +\#scala-contributors channel on the [Scala Discord](https://discord.com/invite/scala) chat, or post on [contributors.scala-lang.org](https://contributors.scala-lang.org) (Discourse). If you need some help with your PR at any time, please feel free to @-mention anyone from the list below, and we will do our best to help you out: From 88ac5e40a8551345c55a1fafc44ea51eca7a8d96 Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Tue, 14 Dec 2021 21:08:49 -0800 Subject: [PATCH 460/769] Backport nowarn advice tweak --- src/library/scala/annotation/nowarn.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/library/scala/annotation/nowarn.scala b/src/library/scala/annotation/nowarn.scala index 8fb0a5549956..889b81f8583f 100644 --- a/src/library/scala/annotation/nowarn.scala +++ b/src/library/scala/annotation/nowarn.scala @@ -29,7 +29,7 @@ package scala.annotation * def f = { 1; deprecated() } // show deprecation warning * }}} * - * To ensure that a `@nowarn` annotation actually suppresses a warning, enable `-Xlint:nowarn`. + * To ensure that a `@nowarn` annotation actually suppresses a warning, enable `-Xlint:unused` or `-Wunused:nowarn`. */ @nowarn("msg=subclassing ClassfileAnnotation does not\nmake your annotation visible at runtime") class nowarn(value: String = "") extends ClassfileAnnotation From c03d42029ee9a5f0d12fc138e0de7549cc6c8dc7 Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Thu, 16 Dec 2021 11:14:50 +0100 Subject: [PATCH 461/769] Update biz.aQute.bndlib to 6.1.0 in 2.12.x --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index 5f9a27ca4b72..7a22d89c508f 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -4,7 +4,7 @@ libraryDependencies += "org.apache.commons" % "commons-lang3" % "3.12.0" libraryDependencies += "org.pantsbuild" % "jarjar" % "1.7.2" -libraryDependencies += "biz.aQute.bnd" % "biz.aQute.bndlib" % "5.3.0" +libraryDependencies += "biz.aQute.bnd" % "biz.aQute.bndlib" % "6.1.0" enablePlugins(BuildInfoPlugin) From 85f8fa73a13fe36c09599733d8503329a46ac9fd Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Thu, 16 Dec 2021 11:14:55 +0100 Subject: [PATCH 462/769] Update jackson-annotations, jackson-core to 2.9.10 in 2.12.x --- build.sbt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/build.sbt b/build.sbt index cdc32dbd094f..768260149519 100644 --- a/build.sbt +++ b/build.sbt @@ -411,8 +411,8 @@ lazy val compilerOptionsExporter = Project("compilerOptionsExporter", file(".") .settings(disablePublishing) .settings( libraryDependencies ++= Seq( - "com.fasterxml.jackson.core" % "jackson-core" % "2.9.7", - "com.fasterxml.jackson.core" % "jackson-annotations" % "2.9.7", + "com.fasterxml.jackson.core" % "jackson-core" % "2.9.10", + "com.fasterxml.jackson.core" % "jackson-annotations" % "2.9.10", "com.fasterxml.jackson.core" % "jackson-databind" % "2.9.7", "com.fasterxml.jackson.dataformat" % "jackson-dataformat-yaml" % "2.9.7", "com.fasterxml.jackson.module" %% "jackson-module-scala" % "2.9.7" From 7eb8abdc212cfa5abb04a993df8b083c9527437c Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Thu, 16 Dec 2021 11:15:10 +0100 Subject: [PATCH 463/769] Update jackson-databind to 2.9.10.8 in 2.12.x --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index cdc32dbd094f..61d4b2bb1757 100644 --- a/build.sbt +++ b/build.sbt @@ -413,7 +413,7 @@ lazy val compilerOptionsExporter = Project("compilerOptionsExporter", file(".") libraryDependencies ++= Seq( "com.fasterxml.jackson.core" % "jackson-core" % "2.9.7", "com.fasterxml.jackson.core" % "jackson-annotations" % "2.9.7", - "com.fasterxml.jackson.core" % "jackson-databind" % "2.9.7", + "com.fasterxml.jackson.core" % "jackson-databind" % "2.9.10.8", "com.fasterxml.jackson.dataformat" % "jackson-dataformat-yaml" % "2.9.7", "com.fasterxml.jackson.module" %% "jackson-module-scala" % "2.9.7" ) From e99f6a541b6cd518d341b0aa1fb58c947bc61d78 Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Thu, 16 Dec 2021 11:15:23 +0100 Subject: [PATCH 464/769] Update jackson-dataformat-yaml to 2.9.10 in 2.12.x --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index cdc32dbd094f..265545ea9597 100644 --- a/build.sbt +++ b/build.sbt @@ -414,7 +414,7 @@ lazy val compilerOptionsExporter = Project("compilerOptionsExporter", file(".") "com.fasterxml.jackson.core" % "jackson-core" % "2.9.7", "com.fasterxml.jackson.core" % "jackson-annotations" % "2.9.7", "com.fasterxml.jackson.core" % "jackson-databind" % "2.9.7", - "com.fasterxml.jackson.dataformat" % "jackson-dataformat-yaml" % "2.9.7", + "com.fasterxml.jackson.dataformat" % "jackson-dataformat-yaml" % "2.9.10", "com.fasterxml.jackson.module" %% "jackson-module-scala" % "2.9.7" ) ) From e8a02cca32e61bf777e67f7cd65905f014febc47 Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Thu, 16 Dec 2021 11:15:35 +0100 Subject: [PATCH 465/769] Update jackson-module-scala to 2.9.10 in 2.12.x --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index cdc32dbd094f..71857d74dd7c 100644 --- a/build.sbt +++ b/build.sbt @@ -415,7 +415,7 @@ lazy val compilerOptionsExporter = Project("compilerOptionsExporter", file(".") "com.fasterxml.jackson.core" % "jackson-annotations" % "2.9.7", "com.fasterxml.jackson.core" % "jackson-databind" % "2.9.7", "com.fasterxml.jackson.dataformat" % "jackson-dataformat-yaml" % "2.9.7", - "com.fasterxml.jackson.module" %% "jackson-module-scala" % "2.9.7" + "com.fasterxml.jackson.module" %% "jackson-module-scala" % "2.9.10" ) ) From bfef99f02bb6a33e1426cb7a0dd89f09c32ecddc Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Thu, 16 Dec 2021 11:15:56 +0100 Subject: [PATCH 466/769] Update sbt-mima-plugin to 0.9.2 in 2.12.x --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index 5f9a27ca4b72..ea4a0c22cb39 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -18,7 +18,7 @@ buildInfoKeys := Seq[BuildInfoKey](buildClasspath) buildInfoPackage := "scalabuild" -addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "0.8.1") +addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "0.9.2") libraryDependencies ++= Seq( "org.eclipse.jgit" % "org.eclipse.jgit" % "4.6.0.201612231935-r", From 883001a6ec136bfc39fea638987532a98170d751 Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Thu, 16 Dec 2021 11:16:02 +0100 Subject: [PATCH 467/769] Update sbt-header to 5.6.0 in 2.12.x --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index 5f9a27ca4b72..ad452a99be76 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -30,6 +30,6 @@ concurrentRestrictions in Global := Seq( Tags.limitAll(1) // workaround for https://github.com/sbt/sbt/issues/2970 ) -addSbtPlugin("de.heikoseeberger" % "sbt-header" % "5.0.0") +addSbtPlugin("de.heikoseeberger" % "sbt-header" % "5.6.0") addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.4.2") From 5c531917712f4ab77419e92ba7e1d3b3d5195b78 Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Thu, 16 Dec 2021 11:16:26 +0100 Subject: [PATCH 468/769] Update ant to 1.9.16 in 2.12.x --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index cdc32dbd094f..d3f9c470652b 100644 --- a/build.sbt +++ b/build.sbt @@ -47,7 +47,7 @@ val jolDep = "org.openjdk.jol" % "jol-core" % " val asmDep = "org.scala-lang.modules" % "scala-asm" % versionProps("scala-asm.version") val jlineDep = "jline" % "jline" % versionProps("jline.version") val jansiDep = "org.fusesource.jansi" % "jansi" % "1.12" -val antDep = "org.apache.ant" % "ant" % "1.9.4" +val antDep = "org.apache.ant" % "ant" % "1.9.16" val testInterfaceDep = "org.scala-sbt" % "test-interface" % "1.0" val diffUtilsDep = "com.googlecode.java-diff-utils" % "diffutils" % "1.3.0" From b80376aa7d446dac8b5ac5d1f8c652e5b5df3e86 Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Thu, 16 Dec 2021 11:16:31 +0100 Subject: [PATCH 469/769] Update org.eclipse.jgit to 4.6.1.201703071140-r in 2.12.x --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index 5f9a27ca4b72..5f21dc5dc09b 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -21,7 +21,7 @@ buildInfoPackage := "scalabuild" addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "0.8.1") libraryDependencies ++= Seq( - "org.eclipse.jgit" % "org.eclipse.jgit" % "4.6.0.201612231935-r", + "org.eclipse.jgit" % "org.eclipse.jgit" % "4.6.1.201703071140-r", "org.slf4j" % "slf4j-nop" % "1.7.32", "com.googlecode.java-diff-utils" % "diffutils" % "1.3.0" ) From f6c3ae18ce520777b83e19207dd89572e2a035a7 Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Thu, 16 Dec 2021 16:20:42 +0100 Subject: [PATCH 470/769] Update jol-core to 0.16 in 2.12.x --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index cdc32dbd094f..de90677dd037 100644 --- a/build.sbt +++ b/build.sbt @@ -43,7 +43,7 @@ val scalaParserCombinatorsDep = scalaDep("org.scala-lang.modules", "scala-par val junitDep = "junit" % "junit" % "4.12" val junitInterfaceDep = "com.novocode" % "junit-interface" % "0.11" % Test val scalacheckDep = "org.scalacheck" %% "scalacheck" % "1.14.3" % Test -val jolDep = "org.openjdk.jol" % "jol-core" % "0.13" +val jolDep = "org.openjdk.jol" % "jol-core" % "0.16" val asmDep = "org.scala-lang.modules" % "scala-asm" % versionProps("scala-asm.version") val jlineDep = "jline" % "jline" % versionProps("jline.version") val jansiDep = "org.fusesource.jansi" % "jansi" % "1.12" From 9efb9688b56ce385709161b5d820cf18cda65335 Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Thu, 16 Dec 2021 16:21:25 +0100 Subject: [PATCH 471/769] Update sbt-jmh to 0.4.3 in 2.12.x --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index 5f9a27ca4b72..5872ba32c7ca 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -32,4 +32,4 @@ concurrentRestrictions in Global := Seq( addSbtPlugin("de.heikoseeberger" % "sbt-header" % "5.0.0") -addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.4.2") +addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.4.3") From c5ec562fc52e21b773a713cf4095fcfd5381e933 Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Thu, 16 Dec 2021 16:20:53 +0100 Subject: [PATCH 472/769] Update sbt to 1.5.7 in 2.12.x --- project/build.properties | 2 +- scripts/common | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/project/build.properties b/project/build.properties index bb3a9b7dc6d2..baf5ff3ec78b 100644 --- a/project/build.properties +++ b/project/build.properties @@ -1 +1 @@ -sbt.version=1.5.6 +sbt.version=1.5.7 diff --git a/scripts/common b/scripts/common index 474161e3fd6d..aaaa4f4750ca 100644 --- a/scripts/common +++ b/scripts/common @@ -15,7 +15,7 @@ rm -rf "$WORKSPACE/resolutionScratch_" mkdir -p "$WORKSPACE/resolutionScratch_" SBT_CMD=${SBT_CMD-sbt} -SBT_CMD="$SBT_CMD -sbt-version 1.5.6" +SBT_CMD="$SBT_CMD -sbt-version 1.5.7" # repo to publish builds integrationRepoUrl=${integrationRepoUrl-"https://scala-ci.typesafe.com/artifactory/scala-integration/"} From e0228a168957336a76eb2eab218ad53b676b8f3d Mon Sep 17 00:00:00 2001 From: Seth Tisue Date: Thu, 16 Dec 2021 14:33:58 -0800 Subject: [PATCH 473/769] Revert "GroupedIterator improvements" This reverts commit d0474076619bcc64c3cf13a251afb4c056d679a6. Reverts PR #9818; see scala/community-build#1519 for details on the community build failures --- src/library/scala/collection/Iterator.scala | 80 ++++++++++++--------- 1 file changed, 46 insertions(+), 34 deletions(-) diff --git a/src/library/scala/collection/Iterator.scala b/src/library/scala/collection/Iterator.scala index cc6503ac3b92..1970d3babb62 100644 --- a/src/library/scala/collection/Iterator.scala +++ b/src/library/scala/collection/Iterator.scala @@ -146,18 +146,19 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite } /** A flexible iterator for transforming an `Iterator[A]` into an - * `Iterator[Seq[A]]`, with configurable sequence size, step, and + * Iterator[Seq[A]], with configurable sequence size, step, and * strategy for dealing with elements which don't fit evenly. * * Typical uses can be achieved via methods `grouped` and `sliding`. */ class GroupedIterator[B >: A](self: Iterator[B], size: Int, step: Int) extends AbstractIterator[immutable.Seq[B]] { + require(size >= 1 && step >= 1, f"size=$size%d and step=$step%d, but both must be positive") - private[this] val group = new ArrayBuffer[B](size) // the group - private[this] var filled = false // whether the group is "hot" - private[this] var partial = true // whether we deliver short sequences - private[this] var pad: () => B = null // what to pad short sequences with + private[this] var buffer: ArrayBuffer[B] = ArrayBuffer() // the buffer + private[this] var filled = false // whether the buffer is "hot" + private[this] var _partial = true // whether we deliver short sequences + private[this] var pad: Option[() => B] = None // what to pad short sequences with /** Public functions which can be used to configure the iterator before use. * @@ -170,10 +171,9 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite * @note This method is mutually exclusive with `withPartial(true)`. */ def withPadding(x: => B): this.type = { - pad = () => x + pad = Some(() => x) this } - /** Public functions which can be used to configure the iterator before use. * * Select whether the last segment may be returned with less than `size` @@ -186,9 +186,10 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite * @note This method is mutually exclusive with `withPadding`. */ def withPartial(x: Boolean): this.type = { - partial = x - // reset pad since otherwise it will take precedence - if (partial) pad = null + _partial = x + if (_partial) // reset pad since otherwise it will take precedence + pad = None + this } @@ -199,8 +200,8 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite * so a subsequent self.hasNext would not test self after the * group was consumed. */ - private def takeDestructively(size: Int): ArrayBuffer[B] = { - val buf = new ArrayBuffer[B](size) + private def takeDestructively(size: Int): Seq[B] = { + val buf = new ArrayBuffer[B] var i = 0 // The order of terms in the following condition is important // here as self.hasNext could be blocking @@ -211,36 +212,45 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite buf } + private def padding(x: Int) = immutable.ArraySeq.untagged.fill(x)(pad.get()) private def gap = (step - size) max 0 private def go(count: Int) = { - val prevSize = group.size + val prevSize = buffer.size def isFirst = prevSize == 0 - val extension = takeDestructively(count) // If there is padding defined we insert it immediately // so the rest of the code can be oblivious - var shortBy = count - extension.size - if (pad != null) while (shortBy > 0) { - extension += pad() - shortBy -= 1 + val xs: Seq[B] = { + val res = takeDestructively(count) + // was: extra checks so we don't calculate length unless there's reason + // but since we took the group eagerly, just use the fast length + val shortBy = count - res.length + if (shortBy > 0 && pad.isDefined) res ++ padding(shortBy) else res } + lazy val len = xs.length + lazy val incomplete = len < count - val extSize = extension.size // if 0 elements are requested, or if the number of newly obtained // elements is less than the gap between sequences, we are done. - def deliver(howMany: Int) = - (howMany > 0 && (isFirst || extSize > gap)) && { - if (!isFirst) group.dropInPlace(step min prevSize) - val available = if (isFirst) extSize else howMany min (extSize - gap) - group ++= extension.takeRightInPlace(available) + def deliver(howMany: Int) = { + (howMany > 0 && (isFirst || len > gap)) && { + if (!isFirst) + buffer dropInPlace (step min prevSize) + + val available = + if (isFirst) len + else howMany min (len - gap) + + buffer ++= (xs takeRight available) filled = true true } + } - if (extension.isEmpty) false // self ran out of elements - else if (partial) deliver(extSize min size) // if partial is true, we deliver regardless - else if (extSize < count) false // !partial && extSize < count means no more seqs - else if (isFirst) deliver(extSize) // first element + if (xs.isEmpty) false // self ran out of elements + else if (_partial) deliver(len min size) // if _partial is true, we deliver regardless + else if (incomplete) false // !_partial && incomplete means no more seqs + else if (isFirst) deliver(len) // first element else deliver(step min size) // the typical case } @@ -248,18 +258,20 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite private def fill(): Boolean = { if (!self.hasNext) false // the first time we grab size, but after that we grab step - else if (group.isEmpty) go(size) + else if (buffer.isEmpty) go(size) else go(step) } - def hasNext: Boolean = filled || fill() - + def hasNext = filled || fill() @throws[NoSuchElementException] def next(): immutable.Seq[B] = { - if (!filled) fill() - if (!filled) Iterator.empty.next() + if (!filled) + fill() + + if (!filled) + throw new NoSuchElementException("next on empty iterator") filled = false - immutable.ArraySeq.unsafeWrapArray(group.toArray[Any]).asInstanceOf[immutable.ArraySeq[B]] + immutable.ArraySeq.unsafeWrapArray(buffer.toArray[Any]).asInstanceOf[immutable.ArraySeq[B]] } } From a80c014e9e0372981434f12e0188a0749bf755e5 Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Fri, 17 Dec 2021 03:06:47 +0100 Subject: [PATCH 474/769] Update sbt-mima-plugin to 1.0.1 in 2.12.x --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index 33f1e616c72d..7abd38405aea 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -18,7 +18,7 @@ buildInfoKeys := Seq[BuildInfoKey](buildClasspath) buildInfoPackage := "scalabuild" -addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "0.9.2") +addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "1.0.1") libraryDependencies ++= Seq( "org.eclipse.jgit" % "org.eclipse.jgit" % "4.6.0.201612231935-r", From 158163a5ad0fc22fa205d98cc3d4ed09fb639190 Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Fri, 17 Dec 2021 11:31:19 +0100 Subject: [PATCH 475/769] Update ant to 1.10.12 in 2.12.x --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index e8ccb5d2e8a1..16aafd377e68 100644 --- a/build.sbt +++ b/build.sbt @@ -47,7 +47,7 @@ val jolDep = "org.openjdk.jol" % "jol-core" % " val asmDep = "org.scala-lang.modules" % "scala-asm" % versionProps("scala-asm.version") val jlineDep = "jline" % "jline" % versionProps("jline.version") val jansiDep = "org.fusesource.jansi" % "jansi" % "1.12" -val antDep = "org.apache.ant" % "ant" % "1.9.16" +val antDep = "org.apache.ant" % "ant" % "1.10.12" val testInterfaceDep = "org.scala-sbt" % "test-interface" % "1.0" val diffUtilsDep = "com.googlecode.java-diff-utils" % "diffutils" % "1.3.0" From ff1c6993d7628eba1dc97f80eebda5c1206e231a Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Fri, 17 Dec 2021 11:31:36 +0100 Subject: [PATCH 476/769] Update org.eclipse.jgit to 4.11.9.201909030838-r in 2.12.x --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index 95970efc5b5b..845d6b348b8c 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -21,7 +21,7 @@ buildInfoPackage := "scalabuild" addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "0.9.2") libraryDependencies ++= Seq( - "org.eclipse.jgit" % "org.eclipse.jgit" % "4.6.1.201703071140-r", + "org.eclipse.jgit" % "org.eclipse.jgit" % "4.11.9.201909030838-r", "org.slf4j" % "slf4j-nop" % "1.7.32", "com.googlecode.java-diff-utils" % "diffutils" % "1.3.0" ) From 015639366b562b18fb7839292ff47aeffae67a92 Mon Sep 17 00:00:00 2001 From: Seth Tisue Date: Fri, 17 Dec 2021 09:01:08 -0800 Subject: [PATCH 477/769] Update jackson-annotations, jackson-core to 2.13.0 in 2.12.x --- build.sbt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/build.sbt b/build.sbt index fa31b81e3412..e9e173adc525 100644 --- a/build.sbt +++ b/build.sbt @@ -411,8 +411,8 @@ lazy val compilerOptionsExporter = Project("compilerOptionsExporter", file(".") .settings(disablePublishing) .settings( libraryDependencies ++= Seq( - "com.fasterxml.jackson.core" % "jackson-core" % "2.9.10", - "com.fasterxml.jackson.core" % "jackson-annotations" % "2.9.10", + "com.fasterxml.jackson.core" % "jackson-core" % "2.13.0", + "com.fasterxml.jackson.core" % "jackson-annotations" % "2.13.0", "com.fasterxml.jackson.core" % "jackson-databind" % "2.9.10.8", "com.fasterxml.jackson.dataformat" % "jackson-dataformat-yaml" % "2.9.10", "com.fasterxml.jackson.module" %% "jackson-module-scala" % "2.9.10" From 9272e6645b8a51d79151ff904f2016bdeba56640 Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Fri, 17 Dec 2021 19:04:47 +0100 Subject: [PATCH 478/769] Update jackson-module-scala to 2.13.0 in 2.12.x --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index fa31b81e3412..89854f525ac4 100644 --- a/build.sbt +++ b/build.sbt @@ -415,7 +415,7 @@ lazy val compilerOptionsExporter = Project("compilerOptionsExporter", file(".") "com.fasterxml.jackson.core" % "jackson-annotations" % "2.9.10", "com.fasterxml.jackson.core" % "jackson-databind" % "2.9.10.8", "com.fasterxml.jackson.dataformat" % "jackson-dataformat-yaml" % "2.9.10", - "com.fasterxml.jackson.module" %% "jackson-module-scala" % "2.9.10" + "com.fasterxml.jackson.module" %% "jackson-module-scala" % "2.13.0" ) ) From d66bd0be171eebb2b56c8092a21495972c3c81a0 Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Fri, 17 Dec 2021 12:52:35 -0800 Subject: [PATCH 479/769] Avoid Option allocation --- src/library/scala/collection/Iterator.scala | 32 +++++++++------------ 1 file changed, 14 insertions(+), 18 deletions(-) diff --git a/src/library/scala/collection/Iterator.scala b/src/library/scala/collection/Iterator.scala index 1970d3babb62..99e095ba7fcf 100644 --- a/src/library/scala/collection/Iterator.scala +++ b/src/library/scala/collection/Iterator.scala @@ -146,7 +146,7 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite } /** A flexible iterator for transforming an `Iterator[A]` into an - * Iterator[Seq[A]], with configurable sequence size, step, and + * `Iterator[Seq[A]]`, with configurable sequence size, step, and * strategy for dealing with elements which don't fit evenly. * * Typical uses can be achieved via methods `grouped` and `sliding`. @@ -155,10 +155,10 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite require(size >= 1 && step >= 1, f"size=$size%d and step=$step%d, but both must be positive") - private[this] var buffer: ArrayBuffer[B] = ArrayBuffer() // the buffer + private[this] val buffer = ArrayBuffer.empty[B] // the buffer private[this] var filled = false // whether the buffer is "hot" - private[this] var _partial = true // whether we deliver short sequences - private[this] var pad: Option[() => B] = None // what to pad short sequences with + private[this] var partial = true // whether we deliver short sequences + private[this] var pad: () => B = null // what to pad short sequences with /** Public functions which can be used to configure the iterator before use. * @@ -171,7 +171,7 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite * @note This method is mutually exclusive with `withPartial(true)`. */ def withPadding(x: => B): this.type = { - pad = Some(() => x) + pad = () => x this } /** Public functions which can be used to configure the iterator before use. @@ -186,10 +186,8 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite * @note This method is mutually exclusive with `withPadding`. */ def withPartial(x: Boolean): this.type = { - _partial = x - if (_partial) // reset pad since otherwise it will take precedence - pad = None - + partial = x + if (partial) pad = null // reset pad since otherwise it will take precedence this } @@ -212,7 +210,7 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite buf } - private def padding(x: Int) = immutable.ArraySeq.untagged.fill(x)(pad.get()) + private def padding(x: Int) = immutable.ArraySeq.untagged.fill(x)(pad()) private def gap = (step - size) max 0 private def go(count: Int) = { @@ -225,7 +223,7 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite // was: extra checks so we don't calculate length unless there's reason // but since we took the group eagerly, just use the fast length val shortBy = count - res.length - if (shortBy > 0 && pad.isDefined) res ++ padding(shortBy) else res + if (shortBy > 0 && pad != null) res ++ padding(shortBy) else res } lazy val len = xs.length lazy val incomplete = len < count @@ -248,8 +246,8 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite } if (xs.isEmpty) false // self ran out of elements - else if (_partial) deliver(len min size) // if _partial is true, we deliver regardless - else if (incomplete) false // !_partial && incomplete means no more seqs + else if (partial) deliver(len min size) // if partial is true, we deliver regardless + else if (incomplete) false // !partial && incomplete means no more seqs else if (isFirst) deliver(len) // first element else deliver(step min size) // the typical case } @@ -263,13 +261,11 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite } def hasNext = filled || fill() + @throws[NoSuchElementException] def next(): immutable.Seq[B] = { - if (!filled) - fill() - - if (!filled) - throw new NoSuchElementException("next on empty iterator") + if (!filled) fill() + if (!filled) Iterator.empty.next() filled = false immutable.ArraySeq.unsafeWrapArray(buffer.toArray[Any]).asInstanceOf[immutable.ArraySeq[B]] } From f549f701066ada2e986145e913da16fe80659dc7 Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Fri, 17 Dec 2021 13:19:03 -0800 Subject: [PATCH 480/769] Simplify grouped next/hasNext --- src/library/scala/collection/Iterator.scala | 12 +++++------- 1 file changed, 5 insertions(+), 7 deletions(-) diff --git a/src/library/scala/collection/Iterator.scala b/src/library/scala/collection/Iterator.scala index 99e095ba7fcf..bb549795c26f 100644 --- a/src/library/scala/collection/Iterator.scala +++ b/src/library/scala/collection/Iterator.scala @@ -253,19 +253,17 @@ trait Iterator[+A] extends IterableOnce[A] with IterableOnceOps[A, Iterator, Ite } // fill() returns false if no more sequences can be produced - private def fill(): Boolean = { - if (!self.hasNext) false + private def fill(): Boolean = filled || self.hasNext && { // the first time we grab size, but after that we grab step - else if (buffer.isEmpty) go(size) - else go(step) + val need = if (buffer.isEmpty) size else step + go(need) } - def hasNext = filled || fill() + def hasNext = fill() @throws[NoSuchElementException] def next(): immutable.Seq[B] = { - if (!filled) fill() - if (!filled) Iterator.empty.next() + if (!fill()) Iterator.empty.next() filled = false immutable.ArraySeq.unsafeWrapArray(buffer.toArray[Any]).asInstanceOf[immutable.ArraySeq[B]] } From c60c2076192efefa8d2d97ba6011bffb5fc5406d Mon Sep 17 00:00:00 2001 From: Seth Tisue Date: Sat, 18 Dec 2021 18:33:06 -0800 Subject: [PATCH 481/769] upgrade all jackson libraries together because the Steward was opening annoying separate PRs --- build.sbt | 17 ++++++++++------- 1 file changed, 10 insertions(+), 7 deletions(-) diff --git a/build.sbt b/build.sbt index e658cb7ea2c8..088192420e28 100644 --- a/build.sbt +++ b/build.sbt @@ -410,13 +410,16 @@ lazy val compilerOptionsExporter = Project("compilerOptionsExporter", file(".") .settings(disableDocs) .settings(disablePublishing) .settings( - libraryDependencies ++= Seq( - "com.fasterxml.jackson.core" % "jackson-core" % "2.13.0", - "com.fasterxml.jackson.core" % "jackson-annotations" % "2.13.0", - "com.fasterxml.jackson.core" % "jackson-databind" % "2.9.10.8", - "com.fasterxml.jackson.dataformat" % "jackson-dataformat-yaml" % "2.9.10", - "com.fasterxml.jackson.module" %% "jackson-module-scala" % "2.13.0" - ) + libraryDependencies ++= { + val jacksonVersion = "2.13.1" + Seq( + "com.fasterxml.jackson.core" % "jackson-core" % jacksonVersion, + "com.fasterxml.jackson.core" % "jackson-annotations" % jacksonVersion, + "com.fasterxml.jackson.core" % "jackson-databind" % jacksonVersion, + "com.fasterxml.jackson.dataformat" % "jackson-dataformat-yaml" % jacksonVersion, + "com.fasterxml.jackson.module" %% "jackson-module-scala" % jacksonVersion, + ) + } ) lazy val compiler = configureAsSubproject(project) From 40266d519258b152c6488666197fcaf98c58ec02 Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Mon, 20 Dec 2021 06:20:40 -0800 Subject: [PATCH 482/769] Tweak doc for ordering of SeqMap --- .../scala/collection/immutable/Map.scala | 12 ++++----- .../scala/collection/immutable/SeqMap.scala | 26 +++++++++---------- 2 files changed, 18 insertions(+), 20 deletions(-) diff --git a/src/library/scala/collection/immutable/Map.scala b/src/library/scala/collection/immutable/Map.scala index 415f01035136..e33ac07ce9a2 100644 --- a/src/library/scala/collection/immutable/Map.scala +++ b/src/library/scala/collection/immutable/Map.scala @@ -92,13 +92,11 @@ trait MapOps[K, +V, +CC[X, +Y] <: MapOps[X, Y, CC, _], +C <: MapOps[K, V, CC, C] @`inline` final override def -- (keys: IterableOnce[K]): C = removedAll(keys) /** Creates a new map obtained by updating this map with a given key/value pair. - * @param key the key - * @param value the value - * @tparam V1 the type of the added value - * @return A new map with the new key/value mapping added to this map. - * - * @inheritdoc - */ + * @param key the key + * @param value the value + * @tparam V1 the type of the added value + * @return A new map with the new key/value mapping added to this map. + */ def updated[V1 >: V](key: K, value: V1): CC[K, V1] /** diff --git a/src/library/scala/collection/immutable/SeqMap.scala b/src/library/scala/collection/immutable/SeqMap.scala index 013697d64cce..ff63ababe2a3 100644 --- a/src/library/scala/collection/immutable/SeqMap.scala +++ b/src/library/scala/collection/immutable/SeqMap.scala @@ -16,19 +16,19 @@ package immutable import scala.collection.mutable.{Builder, ReusableBuilder} -/** - * A generic trait for ordered immutable maps. Concrete classes have to provide - * functionality for the abstract methods in `SeqMap`. - * - * Note that when checking for equality [[SeqMap]] does not take into account - * ordering. - * - * @tparam K the type of the keys contained in this linked map. - * @tparam V the type of the values associated with the keys in this linked map. - * - * @define coll immutable seq map - * @define Coll `immutable.SeqMap` - */ +/** A generic trait for ordered immutable maps. Concrete classes have to provide + * functionality for the abstract methods in `SeqMap`. + * + * Methods that return a new map, such as [[removed]] and [[updated]], must preserve ordering. + * + * Note that when checking for equality, [[SeqMap]] does not take ordering into account. + * + * @tparam K the type of the keys contained in this linked map. + * @tparam V the type of the values associated with the keys in this linked map. + * + * @define coll immutable seq map + * @define Coll `immutable.SeqMap` + */ trait SeqMap[K, +V] extends Map[K, V] From 955ed27508d5da706ba158428cff1377b8e527fd Mon Sep 17 00:00:00 2001 From: Seth Tisue Date: Mon, 20 Dec 2021 15:46:29 -0800 Subject: [PATCH 483/769] [backport] cut down on warning noise in 2.12 build 2.13.x already has these same changes --- build.sbt | 3 +++ src/partest/scala/tools/partest/nest/UnsafeAccess.java | 2 +- 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index 088192420e28..ee9b4d84f332 100644 --- a/build.sbt +++ b/build.sbt @@ -168,6 +168,8 @@ lazy val commonSettings = instanceSettings ++ clearSourceAndResourceDirectories (run / fork) := true, (run / connectInput) := true, (Compile / scalacOptions) += "-Ywarn-unused:imports", + // work around https://github.com/scala/bug/issues/11534 + Compile / scalacOptions += "-Wconf:cat=unchecked&msg=The outer reference in this type test cannot be checked at run time.:s", (Compile / doc / scalacOptions) ++= Seq( "-doc-footer", "epfl", "-diagrams", @@ -606,6 +608,7 @@ lazy val partest = configureAsSubproject(project) .settings( name := "scala-partest", description := "Scala Compiler Testing Tool", + Compile / javacOptions += "-XDenableSunApiLintControl", libraryDependencies ++= List(testInterfaceDep, diffUtilsDep, junitDep), pomDependencyExclusions ++= List((organization.value, "scala-repl-jline-embedded"), (organization.value, "scala-compiler-doc")), fixPom( diff --git a/src/partest/scala/tools/partest/nest/UnsafeAccess.java b/src/partest/scala/tools/partest/nest/UnsafeAccess.java index dadb6d189ca4..b28060d4f1d3 100644 --- a/src/partest/scala/tools/partest/nest/UnsafeAccess.java +++ b/src/partest/scala/tools/partest/nest/UnsafeAccess.java @@ -14,7 +14,7 @@ import java.lang.reflect.Field; -@SuppressWarnings("unsafe") +@SuppressWarnings("sunapi") // also requires passing -XDenableSunApiLintControl to javac public class UnsafeAccess { public final static sun.misc.Unsafe U; From 4c96ccde321a26dab6a3c95eb91c3dfe60ebec1e Mon Sep 17 00:00:00 2001 From: Seth Tisue Date: Mon, 20 Dec 2021 18:51:42 -0800 Subject: [PATCH 484/769] [backport] eliminate warning when running partest --- build.sbt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/build.sbt b/build.sbt index 088192420e28..43bba618cccd 100644 --- a/build.sbt +++ b/build.sbt @@ -3,7 +3,7 @@ * * What you see below is very much work-in-progress. The following features are implemented: * - Compiling all classses for the compiler and library ("compile" in the respective subprojects) - * - Running JUnit ("junit/test"), ScalaCheck ("scalacheck/test"), and partest ("test/it:test") tests + * - Running JUnit ("junit/test"), ScalaCheck ("scalacheck/test"), and partest ("test/IntegrationTest/test") tests * - Creating build/quick with all compiled classes and launcher scripts ("dist/mkQuick") * - Creating build/pack with all JARs and launcher scripts ("dist/mkPack") * - Building all scaladoc sets ("doc") @@ -1143,7 +1143,7 @@ def generateServiceProviderResources(services: (String, String)*): Setting[_] = // Add tab completion to partest commands += Command("partest")(_ => PartestUtil.partestParser((ThisBuild / baseDirectory).value, (ThisBuild / baseDirectory).value / "test")) { (state, parsed) => - ("test/it:testOnly -- " + parsed) :: state + ("test/IntegrationTest/testOnly -- " + parsed) :: state } // Watch the test files also so ~partest triggers on test case changes From cec12b4b342f153dbb167f513f3990924904ebca Mon Sep 17 00:00:00 2001 From: Seth Tisue Date: Mon, 20 Dec 2021 18:52:00 -0800 Subject: [PATCH 485/769] JQuery 3.6.0 (was 3.5.1) --- project/ScaladocSettings.scala | 2 +- spec/_layouts/default.yml | 2 +- spec/_layouts/toc.yml | 2 +- src/intellij/scala.ipr.SAMPLE | 10 +++++----- .../scala/tools/nsc/doc/html/HtmlFactory.scala | 2 +- 5 files changed, 9 insertions(+), 9 deletions(-) diff --git a/project/ScaladocSettings.scala b/project/ScaladocSettings.scala index 1ac6ed7a1916..eb6fe3969863 100644 --- a/project/ScaladocSettings.scala +++ b/project/ScaladocSettings.scala @@ -7,7 +7,7 @@ object ScaladocSettings { // when this changes, the integrity check in HtmlFactory.scala also needs updating val webjarResources = Seq( - "org.webjars" % "jquery" % "3.5.1" + "org.webjars" % "jquery" % "3.6.0" ) def extractResourcesFromWebjar = Def.task { diff --git a/spec/_layouts/default.yml b/spec/_layouts/default.yml index 419581efd82a..5c78a1d09c38 100644 --- a/spec/_layouts/default.yml +++ b/spec/_layouts/default.yml @@ -16,7 +16,7 @@ } }); - +