diff --git a/.git-blame-ignore-revs b/.git-blame-ignore-revs new file mode 100644 index 000000000000..118a334b383a --- /dev/null +++ b/.git-blame-ignore-revs @@ -0,0 +1 @@ +46a26945a172429740ebdd1fc83517130670080b diff --git a/.github/ISSUE_TEMPLATE/bug.md b/.github/ISSUE_TEMPLATE/bug.md index 6e53805900c4..8feba58c44d7 100644 --- a/.github/ISSUE_TEMPLATE/bug.md +++ b/.github/ISSUE_TEMPLATE/bug.md @@ -9,19 +9,33 @@ assignees: '' ## Compiler version -If you're not sure what version you're using, run `print scalaVersion` from sbt -(if you're running scalac manually, use `scalac -version` instead). +If you're not sure which version you're using, run `print scalaVersion` from sbt. +(If you're running scalac manually, use `scalac -version` instead.) + +If possible, check if your issue appears in the nightly version of the compiler! For example, in Scala CLI (the `scala`/`scala-cli` runner script), you can use `//> using scala 3.nightly` (or `-S 3.nightly` from the command line) to grab the latest one. ## Minimized code -```Scala -println("hello, world") +```scala +//> using scala 3.7.0 +//> using options -Wall -Werror +//> using dep com.outr::scribe:3.16.1 + +@main def test = println("hello, world") ``` ## Output diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index db2a66b8b234..c8b5617fc534 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -26,10 +26,10 @@ on: - cron: '0 3 * * *' # Every day at 3 AM workflow_dispatch: -# Cancels any in-progress runs within the same group identified by workflow name and GH reference (branch or tag) +# Cancels any in-progress runs within the same group identified by workflow name and GH reference (branch or tag) # For example it would: # - terminate previous PR CI execution after pushing more changes to the same PR branch -# - terminate previous on-push CI run after merging new PR to main +# - terminate previous on-push CI run after merging new PR to main concurrency: group: ${{ github.workflow }}-${{ github.ref }} cancel-in-progress: ${{ github.ref != 'refs/heads/main' }} @@ -692,7 +692,7 @@ jobs: - name: Publish Nightly if: "steps.not_yet_published.outcome == 'success'" run: | - ./project/scripts/sbtPublish ";project scala3-bootstrapped ;publishSigned ;sonatypeBundleRelease" + ./project/scripts/sbtPublish ";project scala3-bootstrapped ;publishSigned ;sonaRelease" nightly_documentation: runs-on: [self-hosted, Linux] @@ -862,7 +862,7 @@ jobs: scala3-${{ env.RELEASE_TAG }}.msi - name: Publish Release - run: ./project/scripts/sbtPublish ";project scala3-bootstrapped ;publishSigned ;sonatypeBundleUpload" + run: ./project/scripts/sbtPublish ";project scala3-bootstrapped ;publishSigned ;sonaUpload" open_issue_on_failure: diff --git a/.github/workflows/language-reference.yaml b/.github/workflows/language-reference.yaml index d79f4d029a77..61a2768c51da 100644 --- a/.github/workflows/language-reference.yaml +++ b/.github/workflows/language-reference.yaml @@ -70,22 +70,3 @@ jobs: fi cd .. - backport-to-main: - name: Create pull request with backport to main - permissions: - pull-requests: write # for repo-sync/pull-request to create a PR - runs-on: ubuntu-latest - if: github.event_name == 'push' || github.event_name == 'workflow_dispatch' - steps: - - uses: actions/checkout@v4 - - uses: repo-sync/pull-request@v2 - with: - destination_branch: main - pr_label: area:documentation - pr_title: Sync with the stable documentation branch - pr_body: | - This pull request is syncing the main with changes from language-reference-stable. - - It was created automatically after ${{ github.event.head_commit.id }} by @${{ github.event.head_commit.author.username }} - pr_assignee: ${{ github.event.head_commit.author.username }} - diff --git a/.github/workflows/lts-backport.yaml b/.github/workflows/lts-backport.yaml index 33cdc8d07f6c..2cc3b5d1df48 100644 --- a/.github/workflows/lts-backport.yaml +++ b/.github/workflows/lts-backport.yaml @@ -7,7 +7,8 @@ on: jobs: add-to-backporting-project: - if: "!contains(github.event.push.head_commit.message, '[Next only]')" + if: "!contains(github.event.push.head_commit.message, '[Next only]') && + github.repository == 'scala/scala3'" runs-on: ubuntu-latest steps: @@ -15,7 +16,7 @@ jobs: with: fetch-depth: 0 - uses: coursier/cache-action@v6 - - uses: VirtusLab/scala-cli-setup@v1.7.0 + - uses: VirtusLab/scala-cli-setup@v1.8.0 - run: scala-cli ./project/scripts/addToBackportingProject.scala -- ${{ github.sha }} env: GRAPHQL_API_TOKEN: ${{ secrets.GRAPHQL_API_TOKEN }} diff --git a/.github/workflows/publish-sdkman.yml b/.github/workflows/publish-sdkman.yml index c0f29db7f0b0..fbbada2a1a70 100644 --- a/.github/workflows/publish-sdkman.yml +++ b/.github/workflows/publish-sdkman.yml @@ -46,7 +46,7 @@ jobs: - platform: WINDOWS_64 archive : 'scala3-${{ inputs.version }}-x86_64-pc-win32.zip' steps: - - uses: sdkman/sdkman-release-action@a60691d56279724b4c9ff0399c0ae21d641ab75e + - uses: sdkman/sdkman-release-action@2800d4359ae097a99afea7e0370f0c6e726182a4 with: CONSUMER-KEY : ${{ secrets.CONSUMER-KEY }} CONSUMER-TOKEN : ${{ secrets.CONSUMER-TOKEN }} diff --git a/.jvmopts b/.jvmopts index a50abf36aa42..4df4f826d1db 100644 --- a/.jvmopts +++ b/.jvmopts @@ -1,5 +1,5 @@ -Xss1m --Xms512m --Xmx4096m +-Xms1024m +-Xmx8192m -XX:MaxInlineLevel=35 -XX:ReservedCodeCacheSize=512m diff --git a/changelogs/3.7.0-RC1.md b/changelogs/3.7.0-RC1.md new file mode 100644 index 000000000000..e895cfc324e0 --- /dev/null +++ b/changelogs/3.7.0-RC1.md @@ -0,0 +1,266 @@ +# Highlights of the release + +- Add `@scala.annotation.internal.preview` annotation and `-preview` flag. [#22317](https://github.com/scala/scala3/pull/22317) +- Make SIP-52 `@publicInBinary` a stable feature in 3.7 [#22591](https://github.com/scala/scala3/pull/22591) +- Make SIP-58 - `Named Tuples` a stable feature in 3.7 [#22753](https://github.com/scala/scala3/pull/22753) +- Make SIP-62 - `Better Fors` a preview feature in 3.7 [#22776](https://github.com/scala/scala3/pull/22776) +- Implement SIP-61 `@unroll` annotation as experimental [#21693](https://github.com/scala/scala3/pull/21693) +- Upgrade Scala 2 Standard Library to 2.13.16 [#22386](https://github.com/scala/scala3/pull/22386) +- Upgrade Scala.js to 1.18.1. [#22397](https://github.com/scala/scala3/pull/22397) +- Upgrade Scala CLI to 1.7.0 [#22735](https://github.com/scala/scala3/pull/22735) +- Expression Compiler is now part of Scala 3 compiler [#22597](https://github.com/scala/scala3/pull/22597) +- Quotes API: Added `apply` methods to import selectors [#22457](https://github.com/scala/scala3/pull/22457) +- Quotes API: Implement experimental `summonIgnoring` method [#22417](https://github.com/scala/scala3/pull/22417) +- Quotes API: Add class parameters, flags, and privateWithin and annotations to experimental `newClass` methods [#21880](https://github.com/scala/scala3/pull/21880) +- Experimental: Capture Calculus - Inferring `tracked` modifier [#21628](https://github.com/scala/scala3/pull/21628) +- Presentation Compiler: Show inferred type on holes in hover [#21423](https://github.com/scala/scala3/pull/21423) +- JVM Backend: Fix #15736 blocking Scala 3 on Android [#22632](https://github.com/scala/scala3/pull/22632) +- REPL: add flag to quit after evaluating init script [#22636](https://github.com/scala/scala3/pull/22636) +- REPL: implement :jar (deprecate :require) [#22343](https://github.com/scala/scala3/pull/22343) +- Linting: Improvements to -Wunused and -Wconf [#20894](https://github.com/scala/scala3/pull/20894) +- Implicit parameters warn at call site without `using` keyword [#22441](https://github.com/scala/scala3/pull/22441) +- Minimal support for dependent case classes [#21698](https://github.com/scala/scala3/pull/21698) + +# Other changes and fixes + +## Annotations + +- Lift arguments of explicitly constructed annotations [#22553](https://github.com/scala/scala3/pull/22553) +- Fix copy of annotation on `@main` methods [#22582](https://github.com/scala/scala3/pull/22582) +- `@publicInBinary` has now effect on secondary constructors [#22630](https://github.com/scala/scala3/pull/22630) +- Fix mapping of annotations [#22407](https://github.com/scala/scala3/pull/22407) + +## Backend: Scala.js + +- Emit `js.NewArray` IR nodes when possible. [#22446](https://github.com/scala/scala3/pull/22446) + +## Classpath + +- Fix empty ClassPath attribute in one or more classpath jars causes crash [#22462](https://github.com/scala/scala3/pull/22462) + +## Documentation + +- Improve the usage of inclusive language [#22360](https://github.com/scala/scala3/pull/22360) + +## Erasure + +- Handle type aliases in contextFunctionResultTypeAfter [#21517](https://github.com/scala/scala3/pull/21517) +- Align erasure of `Array[Nothing]` and `Array[Null]` with Scala 2 [#22517](https://github.com/scala/scala3/pull/22517) + +## Desugering + +- Under `betterFors` don't drop the trailing `map` if it would result in a different type (also drop `_ => ()`) [#22619](https://github.com/scala/scala3/pull/22619) + +## Experimental: Capture Checking + +- Canonicalize capture variable subtype comparisons [#22299](https://github.com/scala/scala3/pull/22299) +- Permit Capture Refs for Uninitialized Type and Term Params in BiTypeMap [#22451](https://github.com/scala/scala3/pull/22451) +- Fix maximal capability handling and expand aliases [#22341](https://github.com/scala/scala3/pull/22341) + +## Experimental: Modularity + +- Widen skolem types when adding parent refinements [#22488](https://github.com/scala/scala3/pull/22488) + +## Experimental: Global Initialization Checker + +- Refactor the abstract domain of global init checker to compile http4s [#22179](https://github.com/scala/scala3/pull/22179) +- Fix global init checking crash when using a value defined in by-name closure [#22625](https://github.com/scala/scala3/pull/22625) + +## Experimentals + +- Expand value references to packages to their underlying package objects [#22011](https://github.com/scala/scala3/pull/22011) + +## Implicits + +- Restrict implicit args to using [#22458](https://github.com/scala/scala3/pull/22458) + +## Linting + +- Suppress spurious Suppression [#22383](https://github.com/scala/scala3/pull/22383) +- CheckUnused checks span.exists before testing its parts [#22504](https://github.com/scala/scala3/pull/22504) +- Don't warn retainedBody [#22510](https://github.com/scala/scala3/pull/22510) +- Handle Typeable [#22663](https://github.com/scala/scala3/pull/22663) +- Nowarn public implicit val class params [#22664](https://github.com/scala/scala3/pull/22664) +- Exclude synthetic this.m, Any.m from import lookup [#22695](https://github.com/scala/scala3/pull/22695) +- Warn unused member of anonymous class [#22729](https://github.com/scala/scala3/pull/22729) +- Ignore params to default arg getters [#22749](https://github.com/scala/scala3/pull/22749) +- Lazy val def member is pattern var [#22750](https://github.com/scala/scala3/pull/22750) +- Restore resolving prefixes of implicit Ident [#22751](https://github.com/scala/scala3/pull/22751) +- No warning for parameter of overriding method [#22757](https://github.com/scala/scala3/pull/22757) +- Dealias before checking for member in lint [#22708](https://github.com/scala/scala3/pull/22708) +- Warn on bad extensions of aliases [#22362](https://github.com/scala/scala3/pull/22362) +- Warn universal extensions on opaque types [#22502](https://github.com/scala/scala3/pull/22502) +- Discourage default arg for extension receiver [#22492](https://github.com/scala/scala3/pull/22492) +- Rename on import is never wildcard [#22712](https://github.com/scala/scala3/pull/22712) +- Collect nowarn symbols instead of skipping them [#22766](https://github.com/scala/scala3/pull/22766) + +## Match Types + +- Handle NoType in TypeComparer.disjointnessBoundary [#21520](https://github.com/scala/scala3/pull/21520) + +## Named Tuples + +- Special case NamedTuple.From for arguments derived from Tuple [#22449](https://github.com/scala/scala3/pull/22449) +- Generate mirrors for named tuples [#22469](https://github.com/scala/scala3/pull/22469) + +## Opaque Types + +- Fix stack overflow errors when generating opaque type proxies [#22479](https://github.com/scala/scala3/pull/22479) +- Fix inline proxy generation for opaque types referencing other opaque types [#22381](https://github.com/scala/scala3/pull/22381) +- Fix opaque types leaking rhs when inlined and found in type params (and a related stale symbol issue) [#22655](https://github.com/scala/scala3/pull/22655) + +## Overloading + +- Make overload pruning based on result types less aggressive [#21744](https://github.com/scala/scala3/pull/21744) + +## Parser + +- Fix annotations being not expected in the middle of an array type by java parser [#22391](https://github.com/scala/scala3/pull/22391) +- No outdent at eof [#22435](https://github.com/scala/scala3/pull/22435) +- Allow observing an indent after conditional [#22611](https://github.com/scala/scala3/pull/22611) +- Correctly detect colon lambda eol indent for optional brace of argument [#22477](https://github.com/scala/scala3/pull/22477) + +## Pattern Matching + +- Avoid crash in uninhab check in Space [#22601](https://github.com/scala/scala3/pull/22601) +- Account for named tuples in space subtraction [#22658](https://github.com/scala/scala3/pull/22658) +- Check exhaustivity of any case class [#22604](https://github.com/scala/scala3/pull/22604) + +## Presentation Compiler + +- Add enum type param support in sourceSymbol [#18603](https://github.com/scala/scala3/pull/18603) +- Map name position to desugared version of named context bound [#22374](https://github.com/scala/scala3/pull/22374) +- Hover and go to definition for named tuples [#22202](https://github.com/scala/scala3/pull/22202) +- Completions: do not complete package [#20532](https://github.com/scala/scala3/pull/20532) +- Print parens for single method argument only if a direct tuple type [#21510](https://github.com/scala/scala3/pull/21510) +- Improvement: use heuristic to figure out `nameSpan` if `pointDelta` too big [#22484](https://github.com/scala/scala3/pull/22484) +- Fix inferredTypeEdits for symbols [#22485](https://github.com/scala/scala3/pull/22485) +- Fix: Only fallback to the definition of a synthetic valdef if it is zero extent [#22551](https://github.com/scala/scala3/pull/22551) +- Better LSP completions inside of backticks [#22555](https://github.com/scala/scala3/pull/22555) +- Don't search for members in pc info when irrelevant [#22674](https://github.com/scala/scala3/pull/22674) +- Backport from Metals [#22426](https://github.com/scala/scala3/pull/22426) +- Backport from Metals [#22491](https://github.com/scala/scala3/pull/22491) +- Backport from Metals [#22665](https://github.com/scala/scala3/pull/22665) + +## Runner + +- Upgrade Scala CLI to [1.7.0 highlights](https://github.com/VirtusLab/scala-cli/releases/tag/v1.7.0) + - Switch to scalameta/scalafmt images of scalafmt 3.9.1+ [#3502](https://github.com/VirtusLab/scala-cli/pull/3502) + - Support the `--test` command line option for `run` subcommand [#3519](https://github.com/VirtusLab/scala-cli/pull/3519) + - Support the `--test` command line option for `package` subcommand [#3519](https://github.com/VirtusLab/scala-cli/pull/3519) + - Detect objects with main class in scripts [#3479](https://github.com/VirtusLab/scala-cli/pull/3479) + - Support for Scala.js 1.18.2 [#3454](https://github.com/VirtusLab/scala-cli/pull/3454) + - Support for Scala Native 0.5.7 [#3527](https://github.com/VirtusLab/scala-cli/pull/3527) + - Add support for running a main method from the test scope [#3502](https://github.com/VirtusLab/scala-cli/pull/3502) + +## Quotes + +- Add a check for correct Array shape in quotes.reflect.ClassOfConstant [#22033](https://github.com/scala/scala3/pull/22033) +- Fix issue with static `this` references erroring in quoted code [#22618](https://github.com/scala/scala3/pull/22618) +- Fix #21721: make case TypeBlock(_,_) not match non-type Block [#21722](https://github.com/scala/scala3/pull/21722) +- Make Ref.apply() return trees usable in the largest scope possible [#22240](https://github.com/scala/scala3/pull/22240) +- Make sure Block does not incorrectly match a TypeBlock [#22716](https://github.com/scala/scala3/pull/22716) +- Do not approximate prefixes when using memberType in reflect API [#22448](https://github.com/scala/scala3/pull/22448) +- Bring back pattern match exhaustivity checking for macros [#22622](https://github.com/scala/scala3/pull/22622) + +## REPL + +- REPL: JLine 3.29.0 (was 3.27.1) [#22679](https://github.com/scala/scala3/pull/22679) +- Repl: emit warning for the `:sh` command [#22694](https://github.com/scala/scala3/pull/22694) +- Add warning for :kind command [#22572](https://github.com/scala/scala3/pull/22572) + +## Reporting + +- Filter help renders box border [#22434](https://github.com/scala/scala3/pull/22434) +- Register nowarn when inlining [#22682](https://github.com/scala/scala3/pull/22682) +- Rule out exports of member of the current class [#22545](https://github.com/scala/scala3/pull/22545) + +## Scaladoc + +- Render `@deprecated` correctly even when named arguments weren't used [#21925](https://github.com/scala/scala3/pull/21925) +- Remove DRI from Scaladoc warnings [#22330](https://github.com/scala/scala3/pull/22330) + +## SemanticDB + +- Don't add `()` to semanticdb symbol for java variables [#22573](https://github.com/scala/scala3/pull/22573) +- Fix compiler crash when using betasty with missing java classfiles [#22599](https://github.com/scala/scala3/pull/22599) + +## Transform + +- Check only stable qual for import prefix [#22633](https://github.com/scala/scala3/pull/22633) +- Treat static vals as enclosures in lambdalift [#22452](https://github.com/scala/scala3/pull/22452) +- Record calls to constructors in lambdaLift [#22487](https://github.com/scala/scala3/pull/22487) +- Only check logicalOwners for methods, and not for classes, when looking for proxies [#22356](https://github.com/scala/scala3/pull/22356) +- Add error-checking when fetching rhs of trees from TASTy [#22565](https://github.com/scala/scala3/pull/22565) + +## Typer + +- Root of Java select must be class or rooted package [#21800](https://github.com/scala/scala3/pull/21800) +- Check if a prefix is valid before selecting from a type [#22368](https://github.com/scala/scala3/pull/22368) +- Preserve hard unions in widenSingletons [#22369](https://github.com/scala/scala3/pull/22369) +- Constructor proxy is restricted if class is protected [#22563](https://github.com/scala/scala3/pull/22563) +- Constructor companion gets privateWithin [#22627](https://github.com/scala/scala3/pull/22627) +- Revert lambda cleanup [#22697](https://github.com/scala/scala3/pull/22697) +- Avoid infinite recursion when looking for suggestions [#22361](https://github.com/scala/scala3/pull/22361) +- Fix cyclic check, regardless of definition order [#22342](https://github.com/scala/scala3/pull/22342) +- Avoid inf recursion in provablyDisjointClasses [#22489](https://github.com/scala/scala3/pull/22489) + +## Value Classes + +- Allow private members when computing the denotation of a NamedType [#22549](https://github.com/scala/scala3/pull/22549) + +## Other changes + +- Remove locale dependent FileSystemException check [#21633](https://github.com/scala/scala3/pull/21633) + +# Contributors + +Thank you to all the contributors who made this release possible 🎉 + +According to `git shortlog -sn --no-merges 3.6.4..3.7.0-RC1` these are: + +``` + 59 Martin Odersky + 43 Som Snytt + 33 Adrien Piquerez + 32 Hamza Remmal + 21 Wojciech Mazur + 19 aherlihy + 19 kasiaMarek + 15 Jan Chyb + 13 Dale Wijnand + 11 Kacper Korban + 10 EnzeXing + 7 Guillaume Martres + 7 Matt Bovel + 7 Oliver Bračevac + 7 noti0na1 + 6 Sébastien Doeraene + 5 HarrisL2 + 5 Jamie Thompson + 5 dependabot[bot] + 4 Joel Wilsson + 4 Seth Tisue + 3 Piotr Chabelski + 3 Roman Janusz + 3 anna herlihy + 2 David Hua + 1 Alec Theriault + 1 Daisy Li + 1 Daniel Thoma + 1 Dmitrii Naumenko + 1 Felix Herrmann + 1 He-Pin(kerr) + 1 João Ferreira + 1 Jędrzej Rochala + 1 Katarzyna Marek + 1 Kenji Yoshida + 1 Niklas Fiekas + 1 Rocco Mathijn Andela + 1 Vadim Chelyshov + 1 Yichen Xu + 1 adpi2 + 1 fan-tom + 1 philwalk + 1 rochala +``` diff --git a/changelogs/3.7.0-RC2.md b/changelogs/3.7.0-RC2.md new file mode 100644 index 000000000000..d97aaf5c9812 --- /dev/null +++ b/changelogs/3.7.0-RC2.md @@ -0,0 +1,27 @@ +# Backported changes + +- Backport "Check trailing blank line at EOF for OUTDENT" to 3.7.0 [#22942](https://github.com/scala/scala3/pull/22942) +- Backport "Fail compilation if multiple conflicting top-level private defs/vals are in the same package" to 3.7 [#22932](https://github.com/scala/scala3/pull/22932) +- Backport "Deprecate `Yno-kind-polymorphism`" to 3.7 [#22931](https://github.com/scala/scala3/pull/22931) +- Backport "Revert unconditional lint of Inlined expansion" to 3.7 [#22930](https://github.com/scala/scala3/pull/22930) +- Backport "Bump Scala CLI to v1.7.1 (was v1.7.0)" to 3.7 [#22929](https://github.com/scala/scala3/pull/22929) +- Backport "Fix #22794: Emit the definition of Arrays.newArray even though it's a primitive." to 3.7.0 [#22801](https://github.com/scala/scala3/pull/22801) + +# Reverted changes + +- Revert "Make overload pruning based on result types less aggressive" in 3.7.0 [#22940](https://github.com/scala/scala3/pull/22940) + +# Contributors + +Thank you to all the contributors who made this release possible 🎉 + +According to `git shortlog -sn --no-merges 3.7.0-RC1..3.7.0-RC2` these are: + +``` + 4 Som Snytt + 4 Wojciech Mazur + 1 Jan Chyb + 1 Piotr Chabelski + 1 Sébastien Doeraene + 1 Yichen Xu +``` diff --git a/changelogs/3.7.0-RC3.md b/changelogs/3.7.0-RC3.md new file mode 100644 index 000000000000..3408fbf56d59 --- /dev/null +++ b/changelogs/3.7.0-RC3.md @@ -0,0 +1,16 @@ +# Backported changes + +- Backport "Two fixes to NamedTuple pattern matching" to 3.7.0 [#22995](https://github.com/scala/scala3/pull/22995) +- Backport "changes to scala.caps in preparation to make Capability stable" to 3.7.0 [#22967](https://github.com/scala/scala3/pull/22967) + +# Contributors + +Thank you to all the contributors who made this release possible 🎉 + +According to `git shortlog -sn --no-merges 3.7.0-RC2..3.7.0-RC3` these are: + +``` + 4 Martin Odersky + 2 Wojciech Mazur + 1 Natsu Kagami +``` diff --git a/changelogs/3.7.0-RC4.md b/changelogs/3.7.0-RC4.md new file mode 100644 index 000000000000..2b39d025541f --- /dev/null +++ b/changelogs/3.7.0-RC4.md @@ -0,0 +1,15 @@ +# Backported changes + +- Backport "Upgrade to Scala.js 1.19.0." to 3.7 [#23035](https://github.com/scala/scala3/pull/23035) + + +# Contributors + +Thank you to all the contributors who made this release possible 🎉 + +According to `git shortlog -sn --no-merges 3.7.0-RC3..3.7.0-RC4` these are: + +``` + 2 Sébastien Doeraene + 1 Tomasz Godzik +``` diff --git a/changelogs/3.7.0.md b/changelogs/3.7.0.md new file mode 100644 index 000000000000..8cfe9c75116f --- /dev/null +++ b/changelogs/3.7.0.md @@ -0,0 +1,279 @@ +# Highlights of the release + +- Add `@scala.annotation.internal.preview` annotation and `-preview` flag. [#22317](https://github.com/scala/scala3/pull/22317) +- Make SIP-52 `@publicInBinary` a stable feature in 3.7 [#22591](https://github.com/scala/scala3/pull/22591) +- Make SIP-58 - `Named Tuples` a stable feature in 3.7 [#22753](https://github.com/scala/scala3/pull/22753) +- Make SIP-62 - `Better Fors` a preview feature in 3.7 [#22776](https://github.com/scala/scala3/pull/22776) +- Implement SIP-61 `@unroll` annotation as experimental [#21693](https://github.com/scala/scala3/pull/21693) +- Upgrade Scala 2 Standard Library to 2.13.16 [#22386](https://github.com/scala/scala3/pull/22386) +- Upgrade Scala.js to 1.19.0 [#23026](https://github.com/scala/scala3/pull/23026) +- Upgrade Scala CLI to 1.7.1 [#22843](https://github.com/scala/scala3/pull/22843) +- Expression Compiler is now part of Scala 3 compiler [#22597](https://github.com/scala/scala3/pull/22597) +- Quotes API: Added `apply` methods to import selectors [#22457](https://github.com/scala/scala3/pull/22457) +- Quotes API: Implement experimental `summonIgnoring` method [#22417](https://github.com/scala/scala3/pull/22417) +- Quotes API: Add class parameters, flags, and privateWithin and annotations to experimental `newClass` methods [#21880](https://github.com/scala/scala3/pull/21880) +- Experimental: Capture Calculus - Inferring `tracked` modifier [#21628](https://github.com/scala/scala3/pull/21628) +- Presentation Compiler: Show inferred type on holes in hover [#21423](https://github.com/scala/scala3/pull/21423) +- JVM Backend: Fix #15736 blocking Scala 3 on Android [#22632](https://github.com/scala/scala3/pull/22632) +- REPL: add flag to quit after evaluating init script [#22636](https://github.com/scala/scala3/pull/22636) +- REPL: implement :jar (deprecate :require) [#22343](https://github.com/scala/scala3/pull/22343) +- Linting: Improvements to -Wunused and -Wconf [#20894](https://github.com/scala/scala3/pull/20894) +- Implicit parameters warn at call site without `using` keyword [#22441](https://github.com/scala/scala3/pull/22441) +- Support for dependent case classes [#21698](https://github.com/scala/scala3/pull/21698) +- Deprecate `Yno-kind-polymorphism` [#22814](https://github.com/scala/scala3/pull/22814) + +# Other changes and fixes + +## Annotations + +- Lift arguments of explicitly constructed annotations [#22553](https://github.com/scala/scala3/pull/22553) +- Fix copy of annotation on `@main` methods [#22582](https://github.com/scala/scala3/pull/22582) +- `@publicInBinary` has now effect on secondary constructors [#22630](https://github.com/scala/scala3/pull/22630) +- Fix mapping of annotations [#22407](https://github.com/scala/scala3/pull/22407) + +## Backend: Scala.js + +- Emit `js.NewArray` IR nodes when possible. [#22446](https://github.com/scala/scala3/pull/22446) +- Fix #22794: Emit the definition of Arrays.newArray even though it's a primitive [#22797](https://github.com/scala/scala3/pull/22797) + +## Classpath + +- Fix empty ClassPath attribute in one or more classpath jars causes crash [#22462](https://github.com/scala/scala3/pull/22462) + +## Documentation + +- Improve the usage of inclusive language [#22360](https://github.com/scala/scala3/pull/22360) + +## Erasure + +- Handle type aliases in contextFunctionResultTypeAfter [#21517](https://github.com/scala/scala3/pull/21517) +- Align erasure of `Array[Nothing]` and `Array[Null]` with Scala 2 [#22517](https://github.com/scala/scala3/pull/22517) + +## Desugering + +- Under `betterFors` don't drop the trailing `map` if it would result in a different type (also drop `_ => ()`) [#22619](https://github.com/scala/scala3/pull/22619) + +## Experimental: Capture Checking + +- Canonicalize capture variable subtype comparisons [#22299](https://github.com/scala/scala3/pull/22299) +- Permit Capture Refs for Uninitialized Type and Term Params in BiTypeMap [#22451](https://github.com/scala/scala3/pull/22451) +- Fix maximal capability handling and expand aliases [#22341](https://github.com/scala/scala3/pull/22341) + +## Experimental: Modularity + +- Widen skolem types when adding parent refinements [#22488](https://github.com/scala/scala3/pull/22488) + +## Experimental: Global Initialization Checker + +- Refactor the abstract domain of global init checker to compile http4s [#22179](https://github.com/scala/scala3/pull/22179) +- Fix global init checking crash when using a value defined in by-name closure [#22625](https://github.com/scala/scala3/pull/22625) + +## Experimentals + +- Expand value references to packages to their underlying package objects [#22011](https://github.com/scala/scala3/pull/22011) + +## Implicits + +- Restrict implicit args to using [#22458](https://github.com/scala/scala3/pull/22458) + +## Linting + +- Suppress spurious Suppression [#22383](https://github.com/scala/scala3/pull/22383) +- CheckUnused checks span.exists before testing its parts [#22504](https://github.com/scala/scala3/pull/22504) +- Don't warn retainedBody [#22510](https://github.com/scala/scala3/pull/22510) +- Handle Typeable [#22663](https://github.com/scala/scala3/pull/22663) +- Nowarn public implicit val class params [#22664](https://github.com/scala/scala3/pull/22664) +- Exclude synthetic this.m, Any.m from import lookup [#22695](https://github.com/scala/scala3/pull/22695) +- Warn unused member of anonymous class [#22729](https://github.com/scala/scala3/pull/22729) +- Ignore params to default arg getters [#22749](https://github.com/scala/scala3/pull/22749) +- Lazy val def member is pattern var [#22750](https://github.com/scala/scala3/pull/22750) +- Restore resolving prefixes of implicit Ident [#22751](https://github.com/scala/scala3/pull/22751) +- No warning for parameter of overriding method [#22757](https://github.com/scala/scala3/pull/22757) +- Dealias before checking for member in lint [#22708](https://github.com/scala/scala3/pull/22708) +- Warn on bad extensions of aliases [#22362](https://github.com/scala/scala3/pull/22362) +- Warn universal extensions on opaque types [#22502](https://github.com/scala/scala3/pull/22502) +- Discourage default arg for extension receiver [#22492](https://github.com/scala/scala3/pull/22492) +- Rename on import is never wildcard [#22712](https://github.com/scala/scala3/pull/22712) +- Collect nowarn symbols instead of skipping them [#22766](https://github.com/scala/scala3/pull/22766) +- Revert unconditional lint of Inlined expansion [#22815](https://github.com/scala/scala3/pull/22815) + +## Match Types + +- Handle NoType in TypeComparer.disjointnessBoundary [#21520](https://github.com/scala/scala3/pull/21520) + +## Named Tuples + +- Special case NamedTuple.From for arguments derived from Tuple [#22449](https://github.com/scala/scala3/pull/22449) +- Generate mirrors for named tuples [#22469](https://github.com/scala/scala3/pull/22469) +- Two fixes to NamedTuple pattern matching [#22953](https://github.com/scala/scala3/pull/22953) + +## Opaque Types + +- Fix stack overflow errors when generating opaque type proxies [#22479](https://github.com/scala/scala3/pull/22479) +- Fix inline proxy generation for opaque types referencing other opaque types [#22381](https://github.com/scala/scala3/pull/22381) +- Fix opaque types leaking rhs when inlined and found in type params (and a related stale symbol issue) [#22655](https://github.com/scala/scala3/pull/22655) + +## Overloading + +- Make overload pruning based on result types less aggressive [#21744](https://github.com/scala/scala3/pull/21744) +- Fail compilation if multiple conflicting top-level private defs/vals are in the same package [#22759](https://github.com/scala/scala3/pull/22759) + +## Parser + +- Fix annotations being not expected in the middle of an array type by java parser [#22391](https://github.com/scala/scala3/pull/22391) +- No outdent at eof [#22435](https://github.com/scala/scala3/pull/22435) +- Allow observing an indent after conditional [#22611](https://github.com/scala/scala3/pull/22611) +- Correctly detect colon lambda eol indent for optional brace of argument [#22477](https://github.com/scala/scala3/pull/22477) + +## Pattern Matching + +- Avoid crash in uninhab check in Space [#22601](https://github.com/scala/scala3/pull/22601) +- Account for named tuples in space subtraction [#22658](https://github.com/scala/scala3/pull/22658) +- Check exhaustivity of any case class [#22604](https://github.com/scala/scala3/pull/22604) + +## Presentation Compiler + +- Add enum type param support in sourceSymbol [#18603](https://github.com/scala/scala3/pull/18603) +- Map name position to desugared version of named context bound [#22374](https://github.com/scala/scala3/pull/22374) +- Hover and go to definition for named tuples [#22202](https://github.com/scala/scala3/pull/22202) +- Completions: do not complete package [#20532](https://github.com/scala/scala3/pull/20532) +- Print parens for single method argument only if a direct tuple type [#21510](https://github.com/scala/scala3/pull/21510) +- Improvement: use heuristic to figure out `nameSpan` if `pointDelta` too big [#22484](https://github.com/scala/scala3/pull/22484) +- Fix inferredTypeEdits for symbols [#22485](https://github.com/scala/scala3/pull/22485) +- Fix: Only fallback to the definition of a synthetic valdef if it is zero extent [#22551](https://github.com/scala/scala3/pull/22551) +- Better LSP completions inside of backticks [#22555](https://github.com/scala/scala3/pull/22555) +- Don't search for members in pc info when irrelevant [#22674](https://github.com/scala/scala3/pull/22674) +- Backport from Metals [#22426](https://github.com/scala/scala3/pull/22426) +- Backport from Metals [#22491](https://github.com/scala/scala3/pull/22491) +- Backport from Metals [#22665](https://github.com/scala/scala3/pull/22665) + +## Runner + +- Upgrade Scala CLI to [1.7.1 highlights](https://github.com/VirtusLab/scala-cli/releases/tag/v1.7.1) + - Switch to scalameta/scalafmt images of scalafmt 3.9.1+ [#3502](https://github.com/VirtusLab/scala-cli/pull/3502) + - Support the `--test` command line option for `run` subcommand [#3519](https://github.com/VirtusLab/scala-cli/pull/3519) + - Support the `--test` command line option for `package` subcommand [#3519](https://github.com/VirtusLab/scala-cli/pull/3519) + - Detect objects with main class in scripts [#3479](https://github.com/VirtusLab/scala-cli/pull/3479) + - Support for Scala.js 1.18.2 [#3454](https://github.com/VirtusLab/scala-cli/pull/3454) + - Support for Scala Native 0.5.7 [#3527](https://github.com/VirtusLab/scala-cli/pull/3527) + - Add support for running a main method from the test scope [#3502](https://github.com/VirtusLab/scala-cli/pull/3502) + +## Quotes + +- Add a check for correct Array shape in quotes.reflect.ClassOfConstant [#22033](https://github.com/scala/scala3/pull/22033) +- Fix issue with static `this` references erroring in quoted code [#22618](https://github.com/scala/scala3/pull/22618) +- Fix #21721: make case TypeBlock(_,_) not match non-type Block [#21722](https://github.com/scala/scala3/pull/21722) +- Make Ref.apply() return trees usable in the largest scope possible [#22240](https://github.com/scala/scala3/pull/22240) +- Make sure Block does not incorrectly match a TypeBlock [#22716](https://github.com/scala/scala3/pull/22716) +- Do not approximate prefixes when using memberType in reflect API [#22448](https://github.com/scala/scala3/pull/22448) +- Bring back pattern match exhaustivity checking for macros [#22622](https://github.com/scala/scala3/pull/22622) + +## REPL + +- REPL: JLine 3.29.0 (was 3.27.1) [#22679](https://github.com/scala/scala3/pull/22679) +- Repl: emit warning for the `:sh` command [#22694](https://github.com/scala/scala3/pull/22694) +- Add warning for :kind command [#22572](https://github.com/scala/scala3/pull/22572) +- Check trailing blank line at EOF for OUTDENT [#22855](https://github.com/scala/scala3/pull/22855) + +## Reporting + +- Filter help renders box border [#22434](https://github.com/scala/scala3/pull/22434) +- Register nowarn when inlining [#22682](https://github.com/scala/scala3/pull/22682) +- Rule out exports of member of the current class [#22545](https://github.com/scala/scala3/pull/22545) + +## Standard Library + +- Changes in preparation to make `caps.Capability` stable [#22849](https://github.com/scala/scala3/pull/22849) +- Mitigate change in status of scala.caps [#22956](https://github.com/scala/scala3/pull/22956) + +## Scaladoc + +- Render `@deprecated` correctly even when named arguments weren't used [#21925](https://github.com/scala/scala3/pull/21925) +- Remove DRI from Scaladoc warnings [#22330](https://github.com/scala/scala3/pull/22330) + +## SemanticDB + +- Don't add `()` to semanticdb symbol for java variables [#22573](https://github.com/scala/scala3/pull/22573) +- Fix compiler crash when using betasty with missing java classfiles [#22599](https://github.com/scala/scala3/pull/22599) + +## Transform + +- Check only stable qual for import prefix [#22633](https://github.com/scala/scala3/pull/22633) +- Treat static vals as enclosures in lambdalift [#22452](https://github.com/scala/scala3/pull/22452) +- Record calls to constructors in lambdaLift [#22487](https://github.com/scala/scala3/pull/22487) +- Only check logicalOwners for methods, and not for classes, when looking for proxies [#22356](https://github.com/scala/scala3/pull/22356) +- Add error-checking when fetching rhs of trees from TASTy [#22565](https://github.com/scala/scala3/pull/22565) + +## Typer + +- Root of Java select must be class or rooted package [#21800](https://github.com/scala/scala3/pull/21800) +- Check if a prefix is valid before selecting from a type [#22368](https://github.com/scala/scala3/pull/22368) +- Preserve hard unions in widenSingletons [#22369](https://github.com/scala/scala3/pull/22369) +- Constructor proxy is restricted if class is protected [#22563](https://github.com/scala/scala3/pull/22563) +- Constructor companion gets privateWithin [#22627](https://github.com/scala/scala3/pull/22627) +- Revert lambda cleanup [#22697](https://github.com/scala/scala3/pull/22697) +- Avoid infinite recursion when looking for suggestions [#22361](https://github.com/scala/scala3/pull/22361) +- Fix cyclic check, regardless of definition order [#22342](https://github.com/scala/scala3/pull/22342) +- Avoid inf recursion in provablyDisjointClasses [#22489](https://github.com/scala/scala3/pull/22489) + +## Value Classes + +- Allow private members when computing the denotation of a NamedType [#22549](https://github.com/scala/scala3/pull/22549) + +## Other changes + +- Remove locale dependent FileSystemException check [#21633](https://github.com/scala/scala3/pull/21633) + +# Contributors + +Thank you to all the contributors who made this release possible 🎉 + +According to `git shortlog -sn --no-merges 3.6.4..3.7.0` these are: + +``` + 63 Martin Odersky + 47 Som Snytt + 33 Adrien Piquerez + 32 Hamza Remmal + 29 Wojciech Mazur + 19 aherlihy + 19 kasiaMarek + 16 Jan Chyb + 13 Dale Wijnand + 11 Kacper Korban + 10 EnzeXing + 9 Sébastien Doeraene + 7 Guillaume Martres + 7 Matt Bovel + 7 Oliver Bračevac + 7 noti0na1 + 5 HarrisL2 + 5 Jamie Thompson + 5 dependabot[bot] + 4 Joel Wilsson + 4 Piotr Chabelski + 4 Seth Tisue + 3 Roman Janusz + 3 anna herlihy + 2 David Hua + 2 Tomasz Godzik + 2 Yichen Xu + 1 Alec Theriault + 1 Daisy Li + 1 Daniel Thoma + 1 Dmitrii Naumenko + 1 Felix Herrmann + 1 He-Pin(kerr) + 1 João Ferreira + 1 Jędrzej Rochala + 1 Katarzyna Marek + 1 Kenji Yoshida + 1 Natsu Kagami + 1 Niklas Fiekas + 1 Rocco Mathijn Andela + 1 Vadim Chelyshov + 1 adpi2 + 1 fan-tom + 1 philwalk + 1 rochala +``` diff --git a/changelogs/3.7.1-RC1.md b/changelogs/3.7.1-RC1.md new file mode 100644 index 000000000000..8f61c0d2bf32 --- /dev/null +++ b/changelogs/3.7.1-RC1.md @@ -0,0 +1,157 @@ +# Highlights of the release + +- Support for JDK 25 [#23004](https://github.com/scala/scala3/pull/23004) +- Warn if interpolator uses toString [#20578](https://github.com/scala/scala3/pull/20578) +- Warn if match in block is not used for PartialFunction [#23002](https://github.com/scala/scala3/pull/23002) + +# Other changes and fixes + +## Annotations + +- Approximate annotated types in `wildApprox` [#22893](https://github.com/scala/scala3/pull/22893) +- Keep unused annot on params [#23037](https://github.com/scala/scala3/pull/23037) + +## Erasure + +- Disallow context function types as value-class parameters [#23015](https://github.com/scala/scala3/pull/23015) + +## Experimental: Capture Checking + +- Two fixes to handling of abstract types with cap bounds [#22838](https://github.com/scala/scala3/pull/22838) +- Drop idempotent type maps [#22910](https://github.com/scala/scala3/pull/22910) +- Fix setup of class constructors [#22980](https://github.com/scala/scala3/pull/22980) + +## Named Tuples + +- Call dealias after stripping type variables for tupleElementTypesUpTo [#23005](https://github.com/scala/scala3/pull/23005) +- Avoid loosing denotations of named types during `integrate` [#22839](https://github.com/scala/scala3/pull/22839) + +## Experimental: Unroll + +- Fix #22833: allow unroll annotation in methods of final class [#22926](https://github.com/scala/scala3/pull/22926) + +## Experimental: Referencable Package Objects + +- Add experimental.packageObjectValues language setting [#23001](https://github.com/scala/scala3/pull/23001) + +## Exports + +- Respect export alias for default arg forwarder [#21109](https://github.com/scala/scala3/pull/21109) + +## Extension Methods + +- Extension check checks for no parens not empty parens [#22825](https://github.com/scala/scala3/pull/22825) + +## GADTs + +- Fix: Prevent GADT reasoning in pattern alternatives [#22853](https://github.com/scala/scala3/pull/22853) + +## Linting + +- Dealias when looking into imports [#22889](https://github.com/scala/scala3/pull/22889) +- Process Export for unused check [#22984](https://github.com/scala/scala3/pull/22984) +- Drill into QuotePattern bindings symbol info [#22987](https://github.com/scala/scala3/pull/22987) +- No warn implicit param of overriding method [#22901](https://github.com/scala/scala3/pull/22901) +- No warn for evidence params of marker traits such as NotGiven [#22985](https://github.com/scala/scala3/pull/22985) + +## Initialization + +- Check for tasty error in template trees. [#22867](https://github.com/scala/scala3/pull/22867) + +## Metaprogramming: Compile-time + +- Fix issue with certain synthetics missing in compiletime.typechecks [#22978](https://github.com/scala/scala3/pull/22978) + +## Pattern Matching + +- Fix existing GADT constraints with introduced pattern-bound symbols [#22928](https://github.com/scala/scala3/pull/22928) + +## Pickling + +- Fix fromProduct synthesized code for parameter-dependent case classes [#22961](https://github.com/scala/scala3/pull/22961) + +## Presentation Compiler + +- Completions for requests just before string [#22894](https://github.com/scala/scala3/pull/22894) +- Fix: go to def should lead to all: apply, object and class [#22771](https://github.com/scala/scala3/pull/22771) +- Ignore ending `$` when looking at end marker names [#22798](https://github.com/scala/scala3/pull/22798) +- Feature: Skip auto importing symbols we know are wrong in current context [#22813](https://github.com/scala/scala3/pull/22813) +- Show the Autofill completion case as what would be auto-filled [#22819](https://github.com/scala/scala3/pull/22819) +- Bugfix: Fix issues with annotations not detected [#22878](https://github.com/scala/scala3/pull/22878) +- Improvement: Rework IndexedContext to reuse the previously calculated scopes [#22898](https://github.com/scala/scala3/pull/22898) +- Pc: Properly adjust indentation when inlining blocks [#22915](https://github.com/scala/scala3/pull/22915) +- Improvement: Support using directives in worksheets [#22957](https://github.com/scala/scala3/pull/22957) +- Fix: show hover for synthetics if explicitly used [#22973](https://github.com/scala/scala3/pull/22973) +- Pc: fix: inline value when def indentation equals 2 [#22990](https://github.com/scala/scala3/pull/22990) + +## Rewrites + +- Fix insertion of `using` in applications with trailing lambda syntax [#22937](https://github.com/scala/scala3/pull/22937) +- Test chars safely when highlighting [#22918](https://github.com/scala/scala3/pull/22918) + +## Reporting + +- Print infix operations in infix form [#22854](https://github.com/scala/scala3/pull/22854) + +## Scaladoc + +- Chore: add support for 'abstract override' modifier [#22802](https://github.com/scala/scala3/pull/22802) +- Scaladoc: fix generation of unique header ids [#22779](https://github.com/scala/scala3/pull/22779) + +## Typer + +- Disallow context bounds in type lambdas [#22659](https://github.com/scala/scala3/pull/22659) +- Refuse trailing type parameters in extractors [#22699](https://github.com/scala/scala3/pull/22699) +- Fix #22724: Revert the PolyType case in #21744 [#22820](https://github.com/scala/scala3/pull/22820) +- Fix isGenericArrayElement for higher-kinded types [#22938](https://github.com/scala/scala3/pull/22938) +- Tighten condition to preserve denotation in IntegrateMap [#23060](https://github.com/scala/scala3/pull/23060) + +## Transform + +- Mix in the `productPrefix` hash statically in case class `hashCode` [#22865](https://github.com/scala/scala3/pull/22865) + +## Value Classes + +- Fix #21918: Disallow value classes extending type aliases of AnyVal [#23021](https://github.com/scala/scala3/pull/23021) + + +# Contributors + +Thank you to all the contributors who made this release possible 🎉 + +According to `git shortlog -sn --no-merges 3.7.0..3.7.1-RC1` these are: + +``` + 135 Martin Odersky + 27 Som Snytt + 13 Matt Bovel + 10 Wojciech Mazur + 9 Hamza Remmal + 5 Quentin Bernet + 5 Tomasz Godzik + 4 aherlihy + 3 HarrisL2 + 3 Jan Chyb + 3 Natsu Kagami + 3 Ondrej Lhotak + 3 Sébastien Doeraene + 2 Piotr Chabelski + 2 Yichen Xu + 2 Yoonjae Jeon + 2 kasiaMarek + 1 Aleksey Troitskiy + 1 Daisy Li + 1 Dale Wijnand + 1 Jan-Pieter van den Heuvel + 1 Jędrzej Rochala + 1 Kacper Korban + 1 Katarzyna Marek + 1 Lukas Rytz + 1 Mikołaj Fornal + 1 Nikita Glushchenko + 1 Oliver Bračevac + 1 Ondřej Lhoták + 1 dependabot[bot] + 1 noti0na1 + 1 philippus +``` diff --git a/changelogs/3.7.1-RC2.md b/changelogs/3.7.1-RC2.md new file mode 100644 index 000000000000..6a9b9d88bb79 --- /dev/null +++ b/changelogs/3.7.1-RC2.md @@ -0,0 +1,25 @@ +# Backported chnages + +- Backport "chore: filter allowed source versions by import and by settings" to 3.7.1 (#23231) +- Backport "Bump Scala CLI to v1.8.0 (was v1.7.1)" to 3.7.1 (#23230) +- Backport "Mention extension in unused param warning" to 3.7.1 (#23229) +- Backport "Revert recent changes to opaque type proxy generation" to 3.7.1 (#23228) +- Backport "Remove premature caching of lookups for unused lint" to 3.7.1 (#23227) + +# Reverted changes + +- Revert "Make overload pruning based on result types less aggressive (#21744)" in 3.7.1-RC2 (#23239) + +# Contributors + +Thank you to all the contributors who made this release possible 🎉 + +According to `git shortlog -sn --no-merges 3.7.1-RC1..3.7.1-RC2` these are: + +``` + 4 Hamza Remmal + 4 Som Snytt + 3 Jan Chyb + 3 Wojciech Mazur + 1 Piotr Chabelski +``` diff --git a/changelogs/3.7.1.md b/changelogs/3.7.1.md new file mode 100644 index 000000000000..4ffb178aea89 --- /dev/null +++ b/changelogs/3.7.1.md @@ -0,0 +1,171 @@ +# Highlights of the release + +- Support for JDK 25 [#23004](https://github.com/scala/scala3/pull/23004) +- Warn if interpolator uses toString [#20578](https://github.com/scala/scala3/pull/20578) +- Warn if match in block is not used for PartialFunction [#23002](https://github.com/scala/scala3/pull/23002) + +# Other changes and fixes + +## Annotations + +- Approximate annotated types in `wildApprox` [#22893](https://github.com/scala/scala3/pull/22893) +- Keep unused annot on params [#23037](https://github.com/scala/scala3/pull/23037) + +## Erasure + +- Disallow context function types as value-class parameters [#23015](https://github.com/scala/scala3/pull/23015) + +## Experimental: Capture Checking + +- Two fixes to handling of abstract types with cap bounds [#22838](https://github.com/scala/scala3/pull/22838) +- Drop idempotent type maps [#22910](https://github.com/scala/scala3/pull/22910) +- Fix setup of class constructors [#22980](https://github.com/scala/scala3/pull/22980) + +## Experimental: Unroll + +- Fix #22833: allow unroll annotation in methods of final class [#22926](https://github.com/scala/scala3/pull/22926) + +## Experimental: Referencable Package Objects + +- Add experimental.packageObjectValues language setting [#23001](https://github.com/scala/scala3/pull/23001) + +## Exports + +- Respect export alias for default arg forwarder [#21109](https://github.com/scala/scala3/pull/21109) + +## Extension Methods + +- Extension check checks for no parens not empty parens [#22825](https://github.com/scala/scala3/pull/22825) + +## GADTs + +- Fix: Prevent GADT reasoning in pattern alternatives [#22853](https://github.com/scala/scala3/pull/22853) + +## Linting + +- Dealias when looking into imports [#22889](https://github.com/scala/scala3/pull/22889) +- Process Export for unused check [#22984](https://github.com/scala/scala3/pull/22984) +- Drill into QuotePattern bindings symbol info [#22987](https://github.com/scala/scala3/pull/22987) +- No warn implicit param of overriding method [#22901](https://github.com/scala/scala3/pull/22901) +- No warn for evidence params of marker traits such as NotGiven [#22985](https://github.com/scala/scala3/pull/22985) +- Mention extension in unused param warning [#23132](https://github.com/scala/scala3/pull/23132) +- Remove premature caching of lookups for unused lint [#22982](https://github.com/scala/scala3/pull/22982) +- Enclosing package p.q not visible as q [#23069](https://github.com/scala/scala3/pull/23069) + +## Inline + +- Revert recent changes to opaque type proxy generation [#23059](https://github.com/scala/scala3/pull/23059) + +## Initialization + +- Check for tasty error in template trees. [#22867](https://github.com/scala/scala3/pull/22867) + +## Metaprogramming: Compile-time + +- Fix issue with certain synthetics missing in compiletime.typechecks [#22978](https://github.com/scala/scala3/pull/22978) + +## Named Tuples + +- Call dealias after stripping type variables for tupleElementTypesUpTo [#23005](https://github.com/scala/scala3/pull/23005) +- Avoid loosing denotations of named types during `integrate` [#22839](https://github.com/scala/scala3/pull/22839) + +## Pattern Matching + +- Fix existing GADT constraints with introduced pattern-bound symbols [#22928](https://github.com/scala/scala3/pull/22928) + +## Pickling + +- Fix fromProduct synthesized code for parameter-dependent case classes [#22961](https://github.com/scala/scala3/pull/22961) + +## Presentation Compiler + +- Completions for requests just before string [#22894](https://github.com/scala/scala3/pull/22894) +- Fix: go to def should lead to all: apply, object and class [#22771](https://github.com/scala/scala3/pull/22771) +- Ignore ending `$` when looking at end marker names [#22798](https://github.com/scala/scala3/pull/22798) +- Feature: Skip auto importing symbols we know are wrong in current context [#22813](https://github.com/scala/scala3/pull/22813) +- Show the Autofill completion case as what would be auto-filled [#22819](https://github.com/scala/scala3/pull/22819) +- Bugfix: Fix issues with annotations not detected [#22878](https://github.com/scala/scala3/pull/22878) +- Improvement: Rework IndexedContext to reuse the previously calculated scopes [#22898](https://github.com/scala/scala3/pull/22898) +- Pc: Properly adjust indentation when inlining blocks [#22915](https://github.com/scala/scala3/pull/22915) +- Improvement: Support using directives in worksheets [#22957](https://github.com/scala/scala3/pull/22957) +- Fix: show hover for synthetics if explicitly used [#22973](https://github.com/scala/scala3/pull/22973) +- Pc: fix: inline value when def indentation equals 2 [#22990](https://github.com/scala/scala3/pull/22990) + +## Rewrites + +- Fix insertion of `using` in applications with trailing lambda syntax [#22937](https://github.com/scala/scala3/pull/22937) +- Test chars safely when highlighting [#22918](https://github.com/scala/scala3/pull/22918) + +## Reporting + +- Print infix operations in infix form [#22854](https://github.com/scala/scala3/pull/22854) + +## Runner + +- Bump Scala CLI to v1.8.0 (was v1.7.1) [#23168](https://github.com/scala/scala3/pull/23168) + +## Scaladoc + +- Chore: add support for 'abstract override' modifier [#22802](https://github.com/scala/scala3/pull/22802) +- Scaladoc: fix generation of unique header ids [#22779](https://github.com/scala/scala3/pull/22779) + +## Settings + +- Filter allowed source versions by import and by settings [#23215](https://github.com/scala/scala3/pull/23215) + +## Typer + +- Disallow context bounds in type lambdas [#22659](https://github.com/scala/scala3/pull/22659) +- Refuse trailing type parameters in extractors [#22699](https://github.com/scala/scala3/pull/22699) +- Fix #22724: Revert the PolyType case in #21744 [#22820](https://github.com/scala/scala3/pull/22820) +- Fix isGenericArrayElement for higher-kinded types [#22938](https://github.com/scala/scala3/pull/22938) +- Tighten condition to preserve denotation in IntegrateMap [#23060](https://github.com/scala/scala3/pull/23060) + +## Transform + +- Mix in the `productPrefix` hash statically in case class `hashCode` [#22865](https://github.com/scala/scala3/pull/22865) + +## Value Classes + +- Fix #21918: Disallow value classes extending type aliases of AnyVal [#23021](https://github.com/scala/scala3/pull/23021) + +# Contributors + +Thank you to all the contributors who made this release possible 🎉 + +According to `git shortlog -sn --no-merges 3.7.0..3.7.1` these are: + +``` + 135 Martin Odersky + 31 Som Snytt + 14 Wojciech Mazur + 13 Hamza Remmal + 13 Matt Bovel + 6 Jan Chyb + 5 Quentin Bernet + 5 Tomasz Godzik + 4 aherlihy + 3 HarrisL2 + 3 Natsu Kagami + 3 Ondrej Lhotak + 3 Piotr Chabelski + 3 Sébastien Doeraene + 2 Yichen Xu + 2 Yoonjae Jeon + 2 kasiaMarek + 1 Aleksey Troitskiy + 1 Daisy Li + 1 Dale Wijnand + 1 Jan-Pieter van den Heuvel + 1 Jędrzej Rochala + 1 Kacper Korban + 1 Katarzyna Marek + 1 Lukas Rytz + 1 Mikołaj Fornal + 1 Nikita Glushchenko + 1 Oliver Bračevac + 1 Ondřej Lhoták + 1 dependabot[bot] + 1 noti0na1 + 1 philippus +``` diff --git a/community-build/community-projects/cats b/community-build/community-projects/cats index 771c6c802f59..683f28dd0da4 160000 --- a/community-build/community-projects/cats +++ b/community-build/community-projects/cats @@ -1 +1 @@ -Subproject commit 771c6c802f59c72dbc1be1898081c9c882ddfeb0 +Subproject commit 683f28dd0da42e20c4bbf1515c7a7839c3d3c7a9 diff --git a/community-build/community-projects/scalatest b/community-build/community-projects/scalatest index d6eeedbfc1e0..ab674686d089 160000 --- a/community-build/community-projects/scalatest +++ b/community-build/community-projects/scalatest @@ -1 +1 @@ -Subproject commit d6eeedbfc1e04f2eff55506f07f93f448cc21407 +Subproject commit ab674686d089f13da2e29c3b78fe6c3ab0211189 diff --git a/community-build/community-projects/utest b/community-build/community-projects/utest index f4a9789e2750..f828696abf2f 160000 --- a/community-build/community-projects/utest +++ b/community-build/community-projects/utest @@ -1 +1 @@ -Subproject commit f4a9789e2750523feee4a3477efb42eb15424fc7 +Subproject commit f828696abf2fd554d37e8020fc5b4aaa2d143325 diff --git a/community-build/src/scala/dotty/communitybuild/CommunityBuildRunner.scala b/community-build/src/scala/dotty/communitybuild/CommunityBuildRunner.scala index b3065fefe87f..6aaaedb8a3dd 100644 --- a/community-build/src/scala/dotty/communitybuild/CommunityBuildRunner.scala +++ b/community-build/src/scala/dotty/communitybuild/CommunityBuildRunner.scala @@ -13,8 +13,7 @@ object CommunityBuildRunner: * is necessary since we run tests each time on a fresh * Docker container. We run the update on Docker container * creation time to create the cache of the dependencies - * and avoid network overhead. See https://github.com/lampepfl/dotty-drone - * for more infrastructural details. + * and avoid network overhead. */ extension (self: CommunityProject) def run()(using suite: CommunityBuildRunner): Unit = diff --git a/compiler/src/dotty/tools/MainGenericCompiler.scala b/compiler/src/dotty/tools/MainGenericCompiler.scala index aa924a237f73..2c3f6f97e79e 100644 --- a/compiler/src/dotty/tools/MainGenericCompiler.scala +++ b/compiler/src/dotty/tools/MainGenericCompiler.scala @@ -1,7 +1,5 @@ package dotty.tools -import scala.language.unsafeNulls - import scala.annotation.tailrec import scala.io.Source import scala.util.Try diff --git a/compiler/src/dotty/tools/MainGenericRunner.scala b/compiler/src/dotty/tools/MainGenericRunner.scala index bf477f019cba..b32630a5d63b 100644 --- a/compiler/src/dotty/tools/MainGenericRunner.scala +++ b/compiler/src/dotty/tools/MainGenericRunner.scala @@ -1,7 +1,5 @@ package dotty.tools -import scala.language.unsafeNulls - import scala.annotation.tailrec import scala.io.Source import scala.util.Try @@ -160,7 +158,7 @@ object MainGenericRunner { .withScriptArgs(tail*) .noSave // -save not useful here case arg :: tail => - val line = Try(Source.fromFile(arg).getLines.toList).toOption.flatMap(_.headOption) + val line = Try(Source.fromFile(arg).getLines().toList).toOption.flatMap(_.headOption) lazy val hasScalaHashbang = { val s = line.getOrElse("") ; s.startsWith("#!") && s.contains("scala") } if arg.endsWith(".scala") || arg.endsWith(".sc") || hasScalaHashbang then settings diff --git a/compiler/src/dotty/tools/backend/jvm/AsmUtils.scala b/compiler/src/dotty/tools/backend/jvm/AsmUtils.scala index e6393ce82054..8a71a09aa7ab 100644 --- a/compiler/src/dotty/tools/backend/jvm/AsmUtils.scala +++ b/compiler/src/dotty/tools/backend/jvm/AsmUtils.scala @@ -2,8 +2,6 @@ package dotty.tools package backend package jvm -import scala.language.unsafeNulls - import scala.tools.asm.tree.{AbstractInsnNode} import java.io.PrintWriter import scala.tools.asm.util.{TraceClassVisitor, TraceMethodVisitor, Textifier} diff --git a/compiler/src/dotty/tools/backend/jvm/BCodeAsmCommon.scala b/compiler/src/dotty/tools/backend/jvm/BCodeAsmCommon.scala index e1ff94be6362..5b1a8e1683f0 100644 --- a/compiler/src/dotty/tools/backend/jvm/BCodeAsmCommon.scala +++ b/compiler/src/dotty/tools/backend/jvm/BCodeAsmCommon.scala @@ -2,8 +2,6 @@ package dotty.tools package backend package jvm -import scala.language.unsafeNulls - import dotty.tools.dotc.core.Flags.* import dotty.tools.dotc.core.Symbols.* import dotty.tools.dotc.report @@ -79,7 +77,7 @@ final class BCodeAsmCommon[I <: DottyBackendInterface](val interface: I) { enclosingClass(classSym.originalOwner.originalLexicallyEnclosingClass) } - /*final*/ case class EnclosingMethodEntry(owner: String, name: String, methodDescriptor: String) + /*final*/ case class EnclosingMethodEntry(owner: String, name: String | Null, methodDescriptor: String | Null) /** * Data for emitting an EnclosingMethod attribute. None if `classSym` is a member class (not diff --git a/compiler/src/dotty/tools/backend/jvm/BTypesFromSymbols.scala b/compiler/src/dotty/tools/backend/jvm/BTypesFromSymbols.scala index 97934935f352..817d0be54d26 100644 --- a/compiler/src/dotty/tools/backend/jvm/BTypesFromSymbols.scala +++ b/compiler/src/dotty/tools/backend/jvm/BTypesFromSymbols.scala @@ -285,7 +285,7 @@ class BTypesFromSymbols[I <: DottyBackendInterface](val int: I, val frontendAcce // tests/run/serialize.scala and https://github.com/typelevel/cats-effect/pull/2360). val privateFlag = !sym.isClass && (sym.is(Private) || (sym.isPrimaryConstructor && sym.owner.isTopLevelModuleClass)) - val finalFlag = sym.is(Final) && !toDenot(sym).isClassConstructor && !sym.is(Mutable, butNot = Accessor) && !sym.enclosingClass.is(Trait) + val finalFlag = sym.is(Final) && !toDenot(sym).isClassConstructor && !sym.isMutableVar && !sym.enclosingClass.is(Trait) import asm.Opcodes.* import GenBCodeOps.addFlagIf diff --git a/compiler/src/dotty/tools/backend/jvm/BackendUtils.scala b/compiler/src/dotty/tools/backend/jvm/BackendUtils.scala index ae423b6b80dd..dac019e985ef 100644 --- a/compiler/src/dotty/tools/backend/jvm/BackendUtils.scala +++ b/compiler/src/dotty/tools/backend/jvm/BackendUtils.scala @@ -8,7 +8,6 @@ import scala.collection.mutable import scala.jdk.CollectionConverters.* import dotty.tools.dotc.report -import scala.language.unsafeNulls /** * This component hosts tools and utilities used in the backend that require access to a `BTypes` @@ -186,6 +185,7 @@ object BackendUtils { 21 -> asm.Opcodes.V21, 22 -> asm.Opcodes.V22, 23 -> asm.Opcodes.V23, - 24 -> asm.Opcodes.V24 + 24 -> asm.Opcodes.V24, + 25 -> asm.Opcodes.V25 ) } diff --git a/compiler/src/dotty/tools/backend/jvm/ClassfileWriters.scala b/compiler/src/dotty/tools/backend/jvm/ClassfileWriters.scala index 81929c11fdcf..26531fbfe569 100644 --- a/compiler/src/dotty/tools/backend/jvm/ClassfileWriters.scala +++ b/compiler/src/dotty/tools/backend/jvm/ClassfileWriters.scala @@ -18,8 +18,6 @@ import dotty.tools.io.PlainFile.toPlainFile import BTypes.InternalName import dotty.tools.io.JarArchive -import scala.language.unsafeNulls - /** !!! This file is now copied in `dotty.tools.io.FileWriters` in a more general way that does not rely upon * `PostProcessorFrontendAccess`, this should probably be changed to wrap that class instead. * @@ -54,11 +52,11 @@ class ClassfileWriters(frontendAccess: PostProcessorFrontendAccess) { def close(): Unit protected def classRelativePath(className: InternalName, suffix: String = ".class"): String = - className.replace('.', '/').nn + suffix + className.replace('.', '/') + suffix } object ClassfileWriter { - private def getDirectory(dir: String): Path = Paths.get(dir).nn + private def getDirectory(dir: String): Path = Paths.get(dir) def apply(): ClassfileWriter = { val jarManifestMainClass: Option[String] = compilerSettings.mainClass.orElse { @@ -137,7 +135,7 @@ class ClassfileWriters(frontendAccess: PostProcessorFrontendAccess) { new JarEntryWriter(jarFile, jarManifestMainClass, jarCompressionLevel) } else if (file.isVirtual) new VirtualFileWriter(file) - else if (file.isDirectory) new DirEntryWriter(file.file.toPath.nn) + else if (file.isDirectory) new DirEntryWriter(file.file.nn.toPath) else throw new IllegalStateException(s"don't know how to handle an output of $file [${file.getClass}]") } @@ -151,7 +149,7 @@ class ClassfileWriters(frontendAccess: PostProcessorFrontendAccess) { val jarWriter: JarOutputStream = { import scala.util.Properties.* val manifest = new Manifest - val attrs = manifest.getMainAttributes.nn + val attrs = manifest.getMainAttributes attrs.put(MANIFEST_VERSION, "1.0") attrs.put(ScalaCompilerVersion, versionNumberString) mainClass.foreach(c => attrs.put(MAIN_CLASS, c)) @@ -184,7 +182,7 @@ class ClassfileWriters(frontendAccess: PostProcessorFrontendAccess) { // important detail here, even on Windows, Zinc expects the separator within the jar // to be the system default, (even if in the actual jar file the entry always uses '/'). // see https://github.com/sbt/zinc/blob/dcddc1f9cfe542d738582c43f4840e17c053ce81/internal/compiler-bridge/src/main/scala/xsbt/JarUtils.scala#L47 - val pathInJar = + val pathInJar = if File.separatorChar == '/' then relativePath else relativePath.replace('/', File.separatorChar) PlainFile.toPlainFile(Paths.get(s"${file.absolutePath}!$pathInJar")) @@ -293,5 +291,5 @@ class ClassfileWriters(frontendAccess: PostProcessorFrontendAccess) { } /** Can't output a file due to the state of the file system. */ - class FileConflictException(msg: String, cause: Throwable = null) extends IOException(msg, cause) + class FileConflictException(msg: String, cause: Throwable | Null = null) extends IOException(msg, cause) } diff --git a/compiler/src/dotty/tools/backend/jvm/CodeGen.scala b/compiler/src/dotty/tools/backend/jvm/CodeGen.scala index d4843cd56639..be86f704fa41 100644 --- a/compiler/src/dotty/tools/backend/jvm/CodeGen.scala +++ b/compiler/src/dotty/tools/backend/jvm/CodeGen.scala @@ -1,6 +1,5 @@ package dotty.tools.backend.jvm -import scala.language.unsafeNulls import dotty.tools.dotc.CompilationUnit import dotty.tools.dotc.ast.Trees.{PackageDef, ValDef} @@ -71,7 +70,7 @@ class CodeGen(val int: DottyBackendInterface, val primitives: DottyPrimitives)( val tastyAttrNode = if (mirrorClassNode ne null) mirrorClassNode else mainClassNode genTastyAndSetAttributes(sym, tastyAttrNode) - def registerGeneratedClass(classNode: ClassNode, isArtifact: Boolean): Unit = + def registerGeneratedClass(classNode: ClassNode | Null, isArtifact: Boolean): Unit = if classNode ne null then generatedClasses += GeneratedClass(classNode, sourceClassName = sym.javaClassName, @@ -131,7 +130,7 @@ class CodeGen(val int: DottyBackendInterface, val primitives: DottyPrimitives)( } clsFile => { val className = cls.name.replace('/', '.') - if (ctx.compilerCallback != null) + if (ctx.compilerCallback ne null) ctx.compilerCallback.onClassGenerated(sourceFile, convertAbstractFile(clsFile), className) ctx.withIncCallback: cb => diff --git a/compiler/src/dotty/tools/backend/jvm/DottyBackendInterface.scala b/compiler/src/dotty/tools/backend/jvm/DottyBackendInterface.scala index cab17b31c3f3..1cd83dba707a 100644 --- a/compiler/src/dotty/tools/backend/jvm/DottyBackendInterface.scala +++ b/compiler/src/dotty/tools/backend/jvm/DottyBackendInterface.scala @@ -1,6 +1,5 @@ package dotty.tools.backend.jvm -import scala.language.unsafeNulls import dotty.tools.dotc.ast.tpd import dotty.tools.dotc.core.Flags.* @@ -44,14 +43,14 @@ class DottyBackendInterface(val superCallsMap: ReadOnlyMap[Symbol, List[ClassSym object DesugaredSelect extends DeconstructorCommon[tpd.Tree] { - var desugared: tpd.Select = null + var desugared: tpd.Select | Null = null override def isEmpty: Boolean = desugared eq null - def _1: Tree = desugared.qualifier + def _1: Tree = desugared.nn.qualifier - def _2: Name = desugared.name + def _2: Name = desugared.nn.name override def unapply(s: tpd.Tree): this.type = { s match { @@ -69,17 +68,17 @@ class DottyBackendInterface(val superCallsMap: ReadOnlyMap[Symbol, List[ClassSym } object ArrayValue extends DeconstructorCommon[tpd.JavaSeqLiteral] { - def _1: Type = field.tpe match { + def _1: Type = field.nn.tpe match { case JavaArrayType(elem) => elem case _ => - report.error(em"JavaSeqArray with type ${field.tpe} reached backend: $field", ctx.source.atSpan(field.span)) + report.error(em"JavaSeqArray with type ${field.nn.tpe} reached backend: $field", ctx.source.atSpan(field.nn.span)) UnspecifiedErrorType } - def _2: List[Tree] = field.elems + def _2: List[Tree] = field.nn.elems } - abstract class DeconstructorCommon[T >: Null <: AnyRef] { - var field: T = null + abstract class DeconstructorCommon[T <: AnyRef] { + var field: T | Null = null def get: this.type = this def isEmpty: Boolean = field eq null def isDefined = !isEmpty diff --git a/compiler/src/dotty/tools/backend/jvm/GeneratedClassHandler.scala b/compiler/src/dotty/tools/backend/jvm/GeneratedClassHandler.scala index bf2ae9a131aa..70db11fc7029 100644 --- a/compiler/src/dotty/tools/backend/jvm/GeneratedClassHandler.scala +++ b/compiler/src/dotty/tools/backend/jvm/GeneratedClassHandler.scala @@ -14,7 +14,6 @@ import scala.util.control.NonFatal import dotty.tools.dotc.core.Phases import dotty.tools.dotc.core.Decorators.em -import scala.language.unsafeNulls import scala.compiletime.uninitialized /** @@ -189,4 +188,4 @@ final private class CompilationUnitInPostProcess(private var classes: List[Gener var task: Future[Unit] = uninitialized val bufferedReporting = new PostProcessorFrontendAccess.BufferingBackendReporting() -} \ No newline at end of file +} diff --git a/compiler/src/dotty/tools/backend/jvm/PostProcessor.scala b/compiler/src/dotty/tools/backend/jvm/PostProcessor.scala index 9f172806a3b5..e0910460ab0e 100644 --- a/compiler/src/dotty/tools/backend/jvm/PostProcessor.scala +++ b/compiler/src/dotty/tools/backend/jvm/PostProcessor.scala @@ -36,7 +36,7 @@ class PostProcessor(val frontendAccess: PostProcessorFrontendAccess, val bTypes: setInnerClasses(classNode) serializeClass(classNode) catch - case e: java.lang.RuntimeException if e.getMessage != null && e.getMessage.nn.contains("too large!") => + case e: java.lang.RuntimeException if e.getMessage != null && e.getMessage.contains("too large!") => backendReporting.error(em"Could not write class $internalName because it exceeds JVM code size limits. ${e.getMessage}") null case ex: Throwable => @@ -58,8 +58,8 @@ class PostProcessor(val frontendAccess: PostProcessorFrontendAccess, val bTypes: } private def warnCaseInsensitiveOverwrite(clazz: GeneratedClass) = { - val name = clazz.classNode.name.nn - val lowerCaseJavaName = name.nn.toLowerCase + val name = clazz.classNode.name + val lowerCaseJavaName = name.toLowerCase val clsPos = clazz.position caseInsensitively.putIfAbsent(lowerCaseJavaName, (name, clsPos)) match { case null => () @@ -71,7 +71,7 @@ class PostProcessor(val frontendAccess: PostProcessorFrontendAccess, val bTypes: val locationAddendum = if pos1.source.path == pos2.source.path then "" else s" (defined in ${pos2.source.file.name})" - def nicify(name: String): String = name.replace('/', '.').nn + def nicify(name: String): String = name.replace('/', '.') if name1 == name2 then backendReporting.error( em"${nicify(name1)} and ${nicify(name2)} produce classes that overwrite one another", pos1) diff --git a/compiler/src/dotty/tools/backend/jvm/PostProcessorFrontendAccess.scala b/compiler/src/dotty/tools/backend/jvm/PostProcessorFrontendAccess.scala index e1b2120fa848..3e28d2a949cb 100644 --- a/compiler/src/dotty/tools/backend/jvm/PostProcessorFrontendAccess.scala +++ b/compiler/src/dotty/tools/backend/jvm/PostProcessorFrontendAccess.scala @@ -146,7 +146,7 @@ object PostProcessorFrontendAccess { override def backendReporting: BackendReporting = { val local = localReporter.get() if local eq null then directBackendReporting - else local.nn + else local } override object directBackendReporting extends BackendReporting { diff --git a/compiler/src/dotty/tools/backend/jvm/Primitives.scala b/compiler/src/dotty/tools/backend/jvm/Primitives.scala index c9ddfeab24e1..b173736b946c 100644 --- a/compiler/src/dotty/tools/backend/jvm/Primitives.scala +++ b/compiler/src/dotty/tools/backend/jvm/Primitives.scala @@ -150,13 +150,12 @@ object Primitives { case object XOR extends LogicalOp /** Signals the beginning of a series of concatenations. - * On the JVM platform, it should create a new StringBuffer - */ + * On the JVM platform, it should create a new StringBuilder. + */ case object StartConcat extends Primitive - /** - * type: (buf) => STR - * jvm : It should turn the StringBuffer into a String. + /** type: (buf) => STR + * jvm : It should turn the StringBuilder into a String. */ case object EndConcat extends Primitive diff --git a/compiler/src/dotty/tools/backend/sjs/JSCodeGen.scala b/compiler/src/dotty/tools/backend/sjs/JSCodeGen.scala index deaad2d51dbc..8ffc9637a001 100644 --- a/compiler/src/dotty/tools/backend/sjs/JSCodeGen.scala +++ b/compiler/src/dotty/tools/backend/sjs/JSCodeGen.scala @@ -26,7 +26,7 @@ import dotty.tools.dotc.util.SourcePosition import dotty.tools.dotc.report import dotty.tools.sjs.ir -import dotty.tools.sjs.ir.{ClassKind, Position, Names => jsNames, Trees => js, Types => jstpe} +import dotty.tools.sjs.ir.{ClassKind, Position, Trees => js, Types => jstpe, WellKnownNames => jswkn} import dotty.tools.sjs.ir.Names.{ClassName, LocalName, MethodName, SimpleMethodName} import dotty.tools.sjs.ir.OriginalName import dotty.tools.sjs.ir.OriginalName.NoOriginalName @@ -133,7 +133,7 @@ class JSCodeGen()(using genCtx: Context) { def currentThisType: jstpe.Type = { currentThisTypeNullable match { case tpe @ jstpe.ClassType(cls, _) => - jstpe.BoxedClassToPrimType.getOrElse(cls, tpe.toNonNullable) + jswkn.BoxedClassToPrimType.getOrElse(cls, tpe.toNonNullable) case tpe @ jstpe.AnyType => // We are in a JS class, in which even `this` is nullable tpe @@ -424,7 +424,7 @@ class JSCodeGen()(using genCtx: Context) { val staticInitializerStats = reflectInit ::: staticModuleInit if (staticInitializerStats.nonEmpty) - List(genStaticConstructorWithStats(ir.Names.StaticInitializerName, js.Block(staticInitializerStats))) + List(genStaticConstructorWithStats(jswkn.StaticInitializerName, js.Block(staticInitializerStats))) else Nil } @@ -453,7 +453,7 @@ class JSCodeGen()(using genCtx: Context) { originalName, ClassKind.Class, None, - Some(js.ClassIdent(ir.Names.ObjectClass)), + Some(js.ClassIdent(jswkn.ObjectClass)), Nil, None, None, @@ -574,7 +574,7 @@ class JSCodeGen()(using genCtx: Context) { if (staticFields.nonEmpty) { generatedMethods += - genStaticConstructorWithStats(ir.Names.ClassInitializerName, genLoadModule(companionModuleClass)) + genStaticConstructorWithStats(jswkn.ClassInitializerName, genLoadModule(companionModuleClass)) } (staticFields, staticExports) @@ -1000,7 +1000,7 @@ class JSCodeGen()(using genCtx: Context) { val fqcnArg = js.StringLiteral(sym.fullName.toString) val runtimeClassArg = js.ClassOf(toTypeRef(sym.info)) val loadModuleFunArg = - js.Closure(arrow = true, Nil, Nil, None, genLoadModule(sym), Nil) + js.Closure(js.ClosureFlags.arrow, Nil, Nil, None, jstpe.AnyType, genLoadModule(sym), Nil) val stat = genApplyMethod( genLoadModule(jsdefn.ReflectModule), @@ -1035,7 +1035,7 @@ class JSCodeGen()(using genCtx: Context) { val paramTypesArray = js.JSArrayConstr(parameterTypes) - val newInstanceFun = js.Closure(arrow = true, Nil, formalParams, None, { + val newInstanceFun = js.Closure(js.ClosureFlags.arrow, Nil, formalParams, None, jstpe.AnyType, { js.New(encodeClassName(sym), encodeMethodSym(ctor), actualParams) }, Nil) @@ -1557,7 +1557,7 @@ class JSCodeGen()(using genCtx: Context) { def jsParams = params.map(genParamDef(_)) - if (primitives.isPrimitive(sym)) { + if (primitives.isPrimitive(sym) && sym != defn.newArrayMethod) { None } else if (sym.is(Deferred) && currentClassSym.isNonNativeJSClass) { // scala-js/#4409: Do not emit abstract methods in non-native JS classes @@ -2389,7 +2389,7 @@ class JSCodeGen()(using genCtx: Context) { // Make new class def with static members val newClassDef = { implicit val pos = originalClassDef.pos - val parent = js.ClassIdent(jsNames.ObjectClass) + val parent = js.ClassIdent(jswkn.ObjectClass) js.ClassDef(originalClassDef.name, originalClassDef.originalName, ClassKind.AbstractJSType, None, Some(parent), interfaces = Nil, jsSuperClass = None, jsNativeLoadSpec = None, fields = Nil, @@ -2427,7 +2427,7 @@ class JSCodeGen()(using genCtx: Context) { js.VarRef(selfIdent.name)(jstpe.AnyType) def memberLambda(params: List[js.ParamDef], restParam: Option[js.ParamDef], body: js.Tree)(implicit pos: ir.Position): js.Closure = - js.Closure(arrow = false, captureParams = Nil, params, restParam, body, captureValues = Nil) + js.Closure(js.ClosureFlags.function, captureParams = Nil, params, restParam, jstpe.AnyType, body, captureValues = Nil) val fieldDefinitions = jsFieldDefs.toList.map { fdef => implicit val pos = fdef.pos @@ -2539,7 +2539,8 @@ class JSCodeGen()(using genCtx: Context) { beforeSuper ::: superCall ::: afterSuper } - val closure = js.Closure(arrow = true, jsClassCaptures, Nil, None, + // Wrap everything in a lambda, for namespacing + val closure = js.Closure(js.ClosureFlags.arrow, jsClassCaptures, Nil, None, jstpe.AnyType, js.Block(inlinedCtorStats, selfRef), jsSuperClassValue :: args) js.JSFunctionApply(closure, Nil) } @@ -3350,7 +3351,7 @@ class JSCodeGen()(using genCtx: Context) { // Sanity check: we can handle Ints and Strings (including `null`s), but nothing else genSelector.tpe match { - case jstpe.IntType | jstpe.ClassType(jsNames.BoxedStringClass, _) | jstpe.NullType | jstpe.NothingType => + case jstpe.IntType | jstpe.ClassType(jswkn.BoxedStringClass, _) | jstpe.NullType | jstpe.NothingType => // ok case _ => abortMatch(s"Invalid selector type ${genSelector.tpe}") @@ -3514,6 +3515,8 @@ class JSCodeGen()(using genCtx: Context) { atPhase(elimRepeatedPhase)(samMethod.info.paramInfoss.flatten.exists(_.isRepeatedParam)) } } + val isFunctionXXL = + funInterfaceSym.name == tpnme.FunctionXXL && funInterfaceSym.owner == defn.ScalaRuntimePackageClass val formalParamNames = sym.info.paramNamess.flatten.drop(envSize) val formalParamTypes = sym.info.paramInfoss.flatten.drop(envSize) @@ -3523,8 +3526,11 @@ class JSCodeGen()(using genCtx: Context) { val formalAndActualParams = formalParamNames.lazyZip(formalParamTypes).lazyZip(formalParamRepeateds).map { (name, tpe, repeated) => + val formalTpe = + if (isFunctionXXL) jstpe.ArrayType(ObjectArrayTypeRef, nullable = true) + else jstpe.AnyType val formalParam = js.ParamDef(freshLocalIdent(name), - OriginalName(name.toString), jstpe.AnyType, mutable = false) + OriginalName(name.toString), formalTpe, mutable = false) val actualParam = if (repeated) genJSArrayToVarArgs(formalParam.ref)(tree.sourcePos) else unbox(formalParam.ref, tpe) @@ -3559,10 +3565,11 @@ class JSCodeGen()(using genCtx: Context) { if (isThisFunction) { val thisParam :: otherParams = formalParams: @unchecked js.Closure( - arrow = false, + js.ClosureFlags.function, formalCaptures, otherParams, restParam, + jstpe.AnyType, js.Block( js.VarDef(thisParam.name, thisParam.originalName, thisParam.ptpe, mutable = false, @@ -3570,23 +3577,32 @@ class JSCodeGen()(using genCtx: Context) { genBody), actualCaptures) } else { - val closure = js.Closure(arrow = true, formalCaptures, formalParams, restParam, genBody, actualCaptures) + val closure = js.Closure(js.ClosureFlags.typed, formalCaptures, + formalParams, restParam, jstpe.AnyType, genBody, actualCaptures) if (!funInterfaceSym.exists || defn.isFunctionClass(funInterfaceSym)) { val formalCount = formalParams.size - val cls = ClassName("scala.scalajs.runtime.AnonFunction" + formalCount) - val ctorName = MethodName.constructor( - jstpe.ClassRef(ClassName("scala.scalajs.js.Function" + formalCount)) :: Nil) - js.New(cls, js.MethodIdent(ctorName), List(closure)) - } else if (funInterfaceSym.name == tpnme.FunctionXXL && funInterfaceSym.owner == defn.ScalaRuntimePackageClass) { - val cls = ClassName("scala.scalajs.runtime.AnonFunctionXXL") - val ctorName = MethodName.constructor( - jstpe.ClassRef(ClassName("scala.scalajs.js.Function1")) :: Nil) - js.New(cls, js.MethodIdent(ctorName), List(closure)) + val descriptor = js.NewLambda.Descriptor( + superClass = encodeClassName(defn.AbstractFunctionClass(formalCount)), + interfaces = Nil, + methodName = MethodName(applySimpleMethodName, List.fill(formalCount)(jswkn.ObjectRef), jswkn.ObjectRef), + paramTypes = List.fill(formalCount)(jstpe.AnyType), + resultType = jstpe.AnyType + ) + js.NewLambda(descriptor, closure)(encodeClassType(defn.FunctionSymbol(formalCount)).toNonNullable) + } else if (isFunctionXXL) { + val descriptor = js.NewLambda.Descriptor( + superClass = jswkn.ObjectClass, + interfaces = List(encodeClassName(defn.FunctionXXLClass)), + methodName = MethodName(applySimpleMethodName, List(ObjectArrayTypeRef), jswkn.ObjectRef), + paramTypes = List(jstpe.ArrayType(ObjectArrayTypeRef, nullable = true)), + resultType = jstpe.AnyType + ) + js.NewLambda(descriptor, closure)(encodeClassType(funInterfaceSym).toNonNullable) } else { assert(funInterfaceSym.isJSType, s"Invalid functional interface $funInterfaceSym reached the back-end") - closure + closure.copy(flags = js.ClosureFlags.arrow) } } } @@ -3699,8 +3715,8 @@ class JSCodeGen()(using genCtx: Context) { } private def genThrowClassCastException()(implicit pos: Position): js.Tree = { - js.UnaryOp(js.UnaryOp.Throw, js.New(jsNames.ClassCastExceptionClass, - js.MethodIdent(jsNames.NoArgConstructorName), Nil)) + js.UnaryOp(js.UnaryOp.Throw, js.New(jswkn.ClassCastExceptionClass, + js.MethodIdent(jswkn.NoArgConstructorName), Nil)) } /** Gen JS code for an isInstanceOf test (for reference types only) */ @@ -3987,7 +4003,7 @@ class JSCodeGen()(using genCtx: Context) { case arg: js.JSGlobalRef => js.JSTypeOfGlobalRef(arg) case _ => js.JSUnaryOp(js.JSUnaryOp.typeof, arg) } - js.AsInstanceOf(typeofExpr, jstpe.ClassType(jsNames.BoxedStringClass, nullable = true)) + js.AsInstanceOf(typeofExpr, jstpe.ClassType(jswkn.BoxedStringClass, nullable = true)) case STRICT_EQ => // js.special.strictEquals(arg1, arg2) @@ -4235,7 +4251,7 @@ class JSCodeGen()(using genCtx: Context) { "literal classOf[T] expressions (typically compiler-generated). " + "Other uses are not supported in Scala.js.", otherTree.sourcePos) - (jstpe.AnyType, jstpe.ClassRef(jsNames.ObjectClass)) + (jstpe.AnyType, jstpe.ClassRef(jswkn.ObjectClass)) } // Gen the actual args, downcasting them to the formal param types @@ -4870,16 +4886,17 @@ object JSCodeGen { private val JSObjectClassName = ClassName("scala.scalajs.js.Object") private val JavaScriptExceptionClassName = ClassName("scala.scalajs.js.JavaScriptException") - private val ObjectClassRef = jstpe.ClassRef(ir.Names.ObjectClass) + private val ObjectArrayTypeRef = jstpe.ArrayTypeRef(jswkn.ObjectRef, 1) + private val applySimpleMethodName = SimpleMethodName("apply") private val newSimpleMethodName = SimpleMethodName("new") - private val selectedValueMethodName = MethodName("selectedValue", Nil, ObjectClassRef) + private val selectedValueMethodName = MethodName("selectedValue", Nil, jswkn.ObjectRef) private val JLRArrayNewInstanceMethodName = - MethodName("newInstance", List(jstpe.ClassRef(jsNames.ClassClass), jstpe.ArrayTypeRef(jstpe.IntRef, 1)), ObjectClassRef) + MethodName("newInstance", List(jstpe.ClassRef(jswkn.ClassClass), jstpe.ArrayTypeRef(jstpe.IntRef, 1)), jswkn.ObjectRef) - private val ObjectArgConstructorName = MethodName.constructor(List(ObjectClassRef)) + private val ObjectArgConstructorName = MethodName.constructor(List(jswkn.ObjectRef)) private val thisOriginalName = OriginalName("this") diff --git a/compiler/src/dotty/tools/backend/sjs/JSEncoding.scala b/compiler/src/dotty/tools/backend/sjs/JSEncoding.scala index b3d28b73d81c..959a05fd6c43 100644 --- a/compiler/src/dotty/tools/backend/sjs/JSEncoding.scala +++ b/compiler/src/dotty/tools/backend/sjs/JSEncoding.scala @@ -16,7 +16,7 @@ import StdNames.* import dotty.tools.dotc.transform.sjs.JSSymUtils.* import dotty.tools.sjs.ir -import dotty.tools.sjs.ir.{Trees => js, Types => jstpe} +import dotty.tools.sjs.ir.{Trees => js, Types => jstpe, WellKnownNames => jswkn} import dotty.tools.sjs.ir.Names.{LocalName, LabelName, SimpleFieldName, FieldName, SimpleMethodName, MethodName, ClassName} import dotty.tools.sjs.ir.OriginalName import dotty.tools.sjs.ir.OriginalName.NoOriginalName @@ -235,7 +235,7 @@ object JSEncoding { def encodeDynamicImportForwarderIdent(params: List[Symbol])(using Context, ir.Position): js.MethodIdent = { val paramTypeRefs = params.map(sym => paramOrResultTypeRef(sym.info)) - val resultTypeRef = jstpe.ClassRef(ir.Names.ObjectClass) + val resultTypeRef = jstpe.ClassRef(jswkn.ObjectClass) val methodName = MethodName(dynamicImportForwarderSimpleName, paramTypeRefs, resultTypeRef) js.MethodIdent(methodName) } @@ -282,7 +282,7 @@ object JSEncoding { * - scala.Null to scala.runtime.Null$. */ if (sym1 == defn.BoxedUnitClass) - ir.Names.BoxedUnitClass + jswkn.BoxedUnitClass else if (sym1 == defn.NothingClass) ScalaRuntimeNothingClassName else if (sym1 == defn.NullClass) @@ -326,6 +326,9 @@ object JSEncoding { case typeRef: jstpe.ArrayTypeRef => jstpe.ArrayType(typeRef, nullable = true) + + case typeRef: jstpe.TransientTypeRef => + throw AssertionError(s"Unexpected transient type ref $typeRef for ${typeRefInternal._2}") } } @@ -359,7 +362,7 @@ object JSEncoding { */ def nonClassTypeRefToTypeRef(sym: Symbol): (jstpe.TypeRef, Symbol) = { //assert(sym.isType && isCompilingArray, sym) - (jstpe.ClassRef(ir.Names.ObjectClass), defn.ObjectClass) + (jstpe.ClassRef(jswkn.ObjectClass), defn.ObjectClass) } tp.widenDealias match { diff --git a/compiler/src/dotty/tools/backend/sjs/JSExportsGen.scala b/compiler/src/dotty/tools/backend/sjs/JSExportsGen.scala index 42205f9b70c2..425710c6be9a 100644 --- a/compiler/src/dotty/tools/backend/sjs/JSExportsGen.scala +++ b/compiler/src/dotty/tools/backend/sjs/JSExportsGen.scala @@ -22,8 +22,7 @@ import TypeErasure.ErasedValueType import dotty.tools.dotc.util.{SourcePosition, SrcPos} import dotty.tools.dotc.report -import dotty.tools.sjs.ir.{Position, Names => jsNames, Trees => js, Types => jstpe} -import dotty.tools.sjs.ir.Names.DefaultModuleID +import dotty.tools.sjs.ir.{Position, Names => jsNames, Trees => js, Types => jstpe, WellKnownNames => jswkn} import dotty.tools.sjs.ir.OriginalName.NoOriginalName import dotty.tools.sjs.ir.Position.NoPosition import dotty.tools.sjs.ir.Trees.OptimizerHints @@ -87,7 +86,7 @@ final class JSExportsGen(jsCodeGen: JSCodeGen)(using Context) { symForAnnot.annotations.collect { case annot if annot.symbol == jsdefn.JSExportTopLevelAnnot => val jsName = annot.argumentConstantString(0).get - val moduleID = annot.argumentConstantString(1).getOrElse(DefaultModuleID) + val moduleID = annot.argumentConstantString(1).getOrElse(jswkn.DefaultModuleID) TopLevelExportInfo(moduleID, jsName)(annot.tree.sourcePos) } } @@ -947,8 +946,8 @@ final class JSExportsGen(jsCodeGen: JSCodeGen)(using Context) { case jstpe.FloatType => PrimitiveTypeTest(jstpe.FloatType, 7) case jstpe.DoubleType => PrimitiveTypeTest(jstpe.DoubleType, 8) - case jstpe.ClassType(Names.BoxedUnitClass, _) => PrimitiveTypeTest(jstpe.UndefType, 0) - case jstpe.ClassType(Names.BoxedStringClass, _) => PrimitiveTypeTest(jstpe.StringType, 9) + case jstpe.ClassType(jswkn.BoxedUnitClass, _) => PrimitiveTypeTest(jstpe.UndefType, 0) + case jstpe.ClassType(jswkn.BoxedStringClass, _) => PrimitiveTypeTest(jstpe.StringType, 9) case jstpe.ClassType(_, _) => InstanceOfTypeTest(tpe) case jstpe.ArrayType(_, _) => InstanceOfTypeTest(tpe) diff --git a/compiler/src/dotty/tools/dotc/Bench.scala b/compiler/src/dotty/tools/dotc/Bench.scala index cbc490919cfe..26259dcaa28b 100644 --- a/compiler/src/dotty/tools/dotc/Bench.scala +++ b/compiler/src/dotty/tools/dotc/Bench.scala @@ -30,7 +30,7 @@ object Bench extends Driver: println(s"time elapsed: ${times(curRun)}ms") if ctx.settings.Xprompt.value || waitAfter == curRun + 1 then print("hit to continue >") - System.in.nn.read() + System.in.read() reporter def extractNumArg(args: Array[String], name: String, default: Int = 1): (Int, Array[String]) = { diff --git a/compiler/src/dotty/tools/dotc/CompilationUnit.scala b/compiler/src/dotty/tools/dotc/CompilationUnit.scala index 9c985ecd84b3..b627e149e5fb 100644 --- a/compiler/src/dotty/tools/dotc/CompilationUnit.scala +++ b/compiler/src/dotty/tools/dotc/CompilationUnit.scala @@ -158,10 +158,10 @@ object CompilationUnit { unit1 } - /** Create a compilation unit corresponding to an in-memory String. + /** Create a compilation unit corresponding to an in-memory String. * Used for `compiletime.testing.typeChecks`. */ - def apply(name: String, source: String)(using Context): CompilationUnit = { + def apply(name: String, source: String): CompilationUnit = { val src = SourceFile.virtual(name = name, content = source, maybeIncomplete = false) new CompilationUnit(src, null) } diff --git a/compiler/src/dotty/tools/dotc/Driver.scala b/compiler/src/dotty/tools/dotc/Driver.scala index 98abe2ac6c38..a401bb689db0 100644 --- a/compiler/src/dotty/tools/dotc/Driver.scala +++ b/compiler/src/dotty/tools/dotc/Driver.scala @@ -38,7 +38,7 @@ class Driver { finish(compiler, run) catch case ex: FatalError => - report.error(ex.getMessage.nn) // signals that we should fail compilation. + report.error(ex.getMessage) // signals that we should fail compilation. case ex: Throwable if ctx.usedBestEffortTasty => report.bestEffortError(ex, "Some best-effort tasty files were not able to be read.") throw ex @@ -117,7 +117,7 @@ class Driver { .distinct val ctx1 = ctx.fresh val fullClassPath = - (newEntries :+ ctx.settings.classpath.value).mkString(java.io.File.pathSeparator.nn) + (newEntries :+ ctx.settings.classpath.value).mkString(java.io.File.pathSeparator) ctx1.setSetting(ctx1.settings.classpath, fullClassPath) else ctx diff --git a/compiler/src/dotty/tools/dotc/Run.scala b/compiler/src/dotty/tools/dotc/Run.scala index d0fe07303e41..e6f117c9f328 100644 --- a/compiler/src/dotty/tools/dotc/Run.scala +++ b/compiler/src/dotty/tools/dotc/Run.scala @@ -42,7 +42,8 @@ import dotty.tools.dotc.util.chaining.* import java.util.{Timer, TimerTask} /** A compiler run. Exports various methods to compile source files */ -class Run(comp: Compiler, ictx: Context) extends ImplicitRunInfo with ConstraintRunInfo { +class Run(comp: Compiler, ictx: Context) +extends ImplicitRunInfo, ConstraintRunInfo, cc.CaptureRunInfo { /** Default timeout to stop looking for further implicit suggestions, in ms. * This is usually for the first import suggestion; subsequent suggestions @@ -235,7 +236,7 @@ class Run(comp: Compiler, ictx: Context) extends ImplicitRunInfo with Constraint try trackProgress(_.cancel()) finally - Thread.currentThread().nn.interrupt() + Thread.currentThread().interrupt() private def doAdvancePhase(currentPhase: Phase, wasRan: Boolean)(using Context): Unit = trackProgress: progress => @@ -519,6 +520,7 @@ class Run(comp: Compiler, ictx: Context) extends ImplicitRunInfo with Constraint /** Print summary of warnings and errors encountered */ def printSummary(): Unit = { printMaxConstraint() + printMaxPath() val r = runContext.reporter if !r.errorsReported then profile.printSummary() @@ -529,6 +531,7 @@ class Run(comp: Compiler, ictx: Context) extends ImplicitRunInfo with Constraint override def reset(): Unit = { super[ImplicitRunInfo].reset() super[ConstraintRunInfo].reset() + super[CaptureRunInfo].reset() myCtx = null myUnits = Nil myUnitsCached = Nil diff --git a/compiler/src/dotty/tools/dotc/ast/Desugar.scala b/compiler/src/dotty/tools/dotc/ast/Desugar.scala index 1c86ba069433..8401d255155c 100644 --- a/compiler/src/dotty/tools/dotc/ast/Desugar.scala +++ b/compiler/src/dotty/tools/dotc/ast/Desugar.scala @@ -642,7 +642,7 @@ object desugar { .withMods(mods & (GivenOrImplicit | Erased | hasDefault | Tracked) | Param) } - /** Desugar type def (not param): Under x.moduliity this can expand + /** Desugar type def (not param): Under x.modularity this can expand * context bounds, which are expanded to evidence ValDefs. These will * ultimately map to deferred givens. */ @@ -652,6 +652,8 @@ object desugar { tdef, evidenceBuf, (tdef.mods.flags.toTermFlags & AccessFlags) | Lazy | DeferredGivenFlags, inventGivenName, Nil) + if tdef.mods.flags.is(Into, butNot = Opaque) then + report.error(ModifierNotAllowedForDefinition(Into), flagSourcePos(tdef, Into)) if evidenceBuf.isEmpty then result else Thicket(result :: evidenceBuf.toList) /** The expansion of a class definition. See inline comments for what is involved */ @@ -703,7 +705,7 @@ object desugar { def isNonEnumCase = !isEnumCase && (isCaseClass || isCaseObject) val isValueClass = parents.nonEmpty && isAnyVal(parents.head) // This is not watertight, but `extends AnyVal` will be replaced by `inline` later. - val caseClassInScala2Library = isCaseClass && ctx.settings.YcompileScala2Library.value + val caseClassInScala2Library = isCaseClass && Feature.shouldBehaveAsScala2 val originalTparams = constr1.leadingTypeParams val originalVparamss = asTermOnly(constr1.trailingParamss) @@ -922,7 +924,7 @@ object desugar { val copyRestParamss = derivedVparamss.tail.nestedMap(vparam => cpy.ValDef(vparam)(rhs = EmptyTree)) var flags = Synthetic | constr1.mods.flags & copiedAccessFlags - if ctx.settings.YcompileScala2Library.value then flags &~= Private + if Feature.shouldBehaveAsScala2 then flags &~= Private DefDef( nme.copy, joinParams(derivedTparams, copyFirstParams :: copyRestParamss), @@ -983,7 +985,7 @@ object desugar { else { val appMods = var flags = Synthetic | constr1.mods.flags & copiedAccessFlags - if ctx.settings.YcompileScala2Library.value then flags &~= Private + if Feature.shouldBehaveAsScala2 then flags &~= Private Modifiers(flags).withPrivateWithin(constr1.mods.privateWithin) val appParamss = derivedVparamss.nestedZipWithConserve(constrVparamss)((ap, cp) => @@ -1066,7 +1068,7 @@ object desugar { paramss // drop leading () that got inserted by class // TODO: drop this once we do not silently insert empty class parameters anymore case paramss => paramss - val finalFlag = if ctx.settings.YcompileScala2Library.value then EmptyFlags else Final + val finalFlag = if Feature.shouldBehaveAsScala2 then EmptyFlags else Final // implicit wrapper is typechecked in same scope as constructor, so // we can reuse the constructor parameters; no derived params are needed. DefDef( @@ -1868,18 +1870,18 @@ object desugar { /** Map n-ary function `(x1: T1, ..., xn: Tn) => body` where n != 1 to unary function as follows: * * (x$1: (T1, ..., Tn)) => { - * def x1: T1 = x$1._1 + * val x1: T1 = x$1._1 * ... - * def xn: Tn = x$1._n + * val xn: Tn = x$1._n * body * } * * or if `isGenericTuple` * * (x$1: (T1, ... Tn) => { - * def x1: T1 = x$1.apply(0) + * val x1: T1 = x$1.apply(0) * ... - * def xn: Tn = x$1.apply(n-1) + * val xn: Tn = x$1.apply(n-1) * body * } * @@ -1953,9 +1955,9 @@ object desugar { /** Create tree for for-comprehension `` or * `` where mapName and flatMapName are chosen * corresponding to whether this is a for-do or a for-yield. - * If sourceVersion >= 3.7 are enabled, the creation performs the following rewrite rules: + * If betterFors are enabled, the creation performs the following rewrite rules: * - * 1. if sourceVersion >= 3.7: + * 1. if betterFors is enabled: * * for () do E ==> E * or @@ -1986,13 +1988,13 @@ object desugar { * ==> * for (P <- G.withFilter (P => E); ...) ... * - * 6. For any N, if sourceVersion >= 3.7: + * 6. For any N, if betterFors is enabled: * * for (P <- G; P_1 = E_1; ... P_N = E_N; P1 <- G1; ...) ... * ==> * G.flatMap (P => for (P_1 = E_1; ... P_N = E_N; ...)) * - * 7. For any N, if sourceVersion >= 3.7: + * 7. For any N, if betterFors is enabled: * * for (P <- G; P_1 = E_1; ... P_N = E_N) ... * ==> @@ -2013,7 +2015,7 @@ object desugar { * If any of the P_i are variable patterns, the corresponding `x_i @ P_i` is not generated * and the variable constituting P_i is used instead of x_i * - * 9. For any N, if sourceVersion >= 3.7: + * 9. For any N, if betterFors is enabled: * * for (P_1 = E_1; ... P_N = E_N; ...) * ==> @@ -2157,7 +2159,7 @@ object desugar { case _ => false def markTrailingMap(aply: Apply, gen: GenFrom, selectName: TermName): Unit = - if sourceVersion.isAtLeast(`3.7`) + if sourceVersion.enablesBetterFors && selectName == mapName && gen.checkMode != GenCheckMode.Filtered // results of withFilter have the wrong type && (deepEquals(gen.pat, body) || deepEquals(body, Tuple(Nil))) @@ -2165,7 +2167,7 @@ object desugar { aply.putAttachment(TrailingForMap, ()) enums match { - case Nil if sourceVersion.isAtLeast(`3.7`) => body + case Nil if sourceVersion.enablesBetterFors => body case (gen: GenFrom) :: Nil => val aply = Apply(rhsSelect(gen, mapName), makeLambda(gen, body)) markTrailingMap(aply, gen, mapName) @@ -2174,7 +2176,7 @@ object desugar { val cont = makeFor(mapName, flatMapName, rest, body) Apply(rhsSelect(gen, flatMapName), makeLambda(gen, cont)) case (gen: GenFrom) :: rest - if sourceVersion.isAtLeast(`3.7`) + if sourceVersion.enablesBetterFors && rest.dropWhile(_.isInstanceOf[GenAlias]).headOption.forall(e => e.isInstanceOf[GenFrom]) // possible aliases followed by a generator or end of for && !rest.takeWhile(_.isInstanceOf[GenAlias]).exists(a => isNestedGivenPattern(a.asInstanceOf[GenAlias].pat)) => val cont = makeFor(mapName, flatMapName, rest, body) @@ -2202,9 +2204,9 @@ object desugar { makeFor(mapName, flatMapName, vfrom1 :: rest1, body) case (gen: GenFrom) :: test :: rest => val filtered = Apply(rhsSelect(gen, nme.withFilter), makeLambda(gen, test)) - val genFrom = GenFrom(gen.pat, filtered, if sourceVersion.isAtLeast(`3.7`) then GenCheckMode.Filtered else GenCheckMode.Ignore) + val genFrom = GenFrom(gen.pat, filtered, if sourceVersion.enablesBetterFors then GenCheckMode.Filtered else GenCheckMode.Ignore) makeFor(mapName, flatMapName, genFrom :: rest, body) - case GenAlias(_, _) :: _ if sourceVersion.isAtLeast(`3.7`) => + case GenAlias(_, _) :: _ if sourceVersion.enablesBetterFors => val (valeqs, rest) = enums.span(_.isInstanceOf[GenAlias]) val pats = valeqs.map { case GenAlias(pat, _) => pat } val rhss = valeqs.map { case GenAlias(_, rhs) => rhs } @@ -2262,15 +2264,14 @@ object desugar { New(ref(defn.RepeatedAnnot.typeRef), Nil :: Nil)) else if op.name == nme.CC_REACH then Apply(ref(defn.Caps_reachCapability), t :: Nil) + else if op.name == nme.CC_READONLY then + Apply(ref(defn.Caps_readOnlyCapability), t :: Nil) else assert(ctx.mode.isExpr || ctx.reporter.errorsReported || ctx.mode.is(Mode.Interactive), ctx.mode) Select(t, op.name) case PrefixOp(op, t) => - if op.name == tpnme.into then - Annotated(t, New(ref(defn.IntoAnnot.typeRef), Nil :: Nil)) - else - val nspace = if (ctx.mode.is(Mode.Type)) tpnme else nme - Select(t, nspace.UNARY_PREFIX ++ op.name) + val nspace = if (ctx.mode.is(Mode.Type)) tpnme else nme + Select(t, nspace.UNARY_PREFIX ++ op.name) case ForDo(enums, body) => makeFor(nme.foreach, nme.foreach, enums, body) orElse tree case ForYield(enums, body) => diff --git a/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala b/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala index 32ab8378ae16..b0a085d596a0 100644 --- a/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala +++ b/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala @@ -9,6 +9,8 @@ import Annotations.Annotation import NameKinds.ContextBoundParamName import typer.ConstFold import reporting.trace +import config.Feature +import util.SrcPos import Decorators.* import Constants.Constant @@ -264,6 +266,19 @@ trait TreeInfo[T <: Untyped] { self: Trees.Instance[T] => case _ => false } + /** Expression was written `e: Unit` to quell warnings. Looks into adapted tree. */ + def isAscribedToUnit(tree: Tree): Boolean = + import typer.Typer.AscribedToUnit + tree.hasAttachment(AscribedToUnit) + || { + def loop(tree: Tree): Boolean = tree match + case Apply(fn, _) => fn.hasAttachment(AscribedToUnit) || loop(fn) + case TypeApply(fn, _) => fn.hasAttachment(AscribedToUnit) || loop(fn) + case Block(_, expr) => expr.hasAttachment(AscribedToUnit) || loop(expr) + case _ => false + loop(tree) + } + /** Does this CaseDef catch Throwable? */ def catchesThrowable(cdef: CaseDef)(using Context): Boolean = catchesAllOf(cdef, defn.ThrowableType) @@ -466,7 +481,7 @@ trait UntypedTreeInfo extends TreeInfo[Untyped] { self: Trees.Instance[Untyped] */ private def defKind(tree: Tree)(using Context): FlagSet = unsplice(tree) match { case EmptyTree | _: Import => NoInitsInterface - case tree: TypeDef if ctx.settings.YcompileScala2Library.value => + case tree: TypeDef if Feature.shouldBehaveAsScala2 => if (tree.isClassDef) EmptyFlags else NoInitsInterface case tree: TypeDef => if (tree.isClassDef) NoInits else NoInitsInterface case tree: DefDef => @@ -479,7 +494,7 @@ trait UntypedTreeInfo extends TreeInfo[Untyped] { self: Trees.Instance[Untyped] NoInitsInterface else if tree.mods.is(Given) && tree.paramss.isEmpty then EmptyFlags // might become a lazy val: TODO: check whether we need to suppress NoInits once we have new lazy val impl - else if ctx.settings.YcompileScala2Library.value then + else if Feature.shouldBehaveAsScala2 then EmptyFlags else NoInits @@ -522,6 +537,10 @@ trait UntypedTreeInfo extends TreeInfo[Untyped] { self: Trees.Instance[Untyped] if id.span == result.span.startPos => Some(result) case _ => None end ImpureByNameTypeTree + + /** The position of the modifier associated with given flag in this definition. */ + def flagSourcePos(mdef: DefTree, flag: FlagSet): SrcPos = + mdef.mods.mods.find(_.flags == flag).getOrElse(mdef).srcPos } trait TypedTreeInfo extends TreeInfo[Type] { self: Trees.Instance[Type] => @@ -759,7 +778,7 @@ trait TypedTreeInfo extends TreeInfo[Type] { self: Trees.Instance[Type] => */ def isVariableOrGetter(tree: Tree)(using Context): Boolean = { def sym = tree.symbol - def isVar = sym.is(Mutable) + def isVar = sym.isMutableVarOrAccessor def isGetter = mayBeVarGetter(sym) && sym.owner.info.member(sym.name.asTermName.setterName).exists diff --git a/compiler/src/dotty/tools/dotc/ast/TreeTypeMap.scala b/compiler/src/dotty/tools/dotc/ast/TreeTypeMap.scala index 98d9a0ca85f6..414b27101b7d 100644 --- a/compiler/src/dotty/tools/dotc/ast/TreeTypeMap.scala +++ b/compiler/src/dotty/tools/dotc/ast/TreeTypeMap.scala @@ -56,7 +56,7 @@ class TreeTypeMap( /** Replace occurrences of `This(oldOwner)` in some prefix of a type * by the corresponding `This(newOwner)`. */ - private val mapOwnerThis = new TypeMap with cc.CaptureSet.IdempotentCaptRefMap { + private val mapOwnerThis = new TypeMap { private def mapPrefix(from: List[Symbol], to: List[Symbol], tp: Type): Type = from match { case Nil => tp case (cls: ClassSymbol) :: from1 => mapPrefix(from1, to.tail, tp.substThis(cls, to.head.thisType)) diff --git a/compiler/src/dotty/tools/dotc/ast/Trees.scala b/compiler/src/dotty/tools/dotc/ast/Trees.scala index fdefc14aadd6..c6cde66374b3 100644 --- a/compiler/src/dotty/tools/dotc/ast/Trees.scala +++ b/compiler/src/dotty/tools/dotc/ast/Trees.scala @@ -34,6 +34,9 @@ object Trees { val SyntheticUnit: Property.StickyKey[Unit] = Property.StickyKey() + /** Property key for marking capture-set variables and members */ + val CaptureVar: Property.StickyKey[Unit] = Property.StickyKey() + /** Trees take a parameter indicating what the type of their `tpe` field * is. Two choices: `Type` or `Untyped`. * Untyped trees have type `Tree[Untyped]`. @@ -741,11 +744,11 @@ object Trees { } /** A tree representing a quote pattern `'{ type binding1; ...; body }` or `'[ type binding1; ...; body ]`. - * `QuotePattern`s are created the type checker when typing an `untpd.Quote` in a pattern context. + * `QuotePattern`s are created by the type checker when typing an `untpd.Quote` in a pattern context. * * `QuotePattern`s are checked are encoded into `unapply`s in the `staging` phase. * - * The `bindings` contain the list of quote pattern type variable definitions (`Bind`s) in the oreder in + * The `bindings` contain the list of quote pattern type variable definitions (`Bind`s) in the order in * which they are defined in the source. * * @param bindings Type variable definitions (`Bind` tree) diff --git a/compiler/src/dotty/tools/dotc/ast/tpd.scala b/compiler/src/dotty/tools/dotc/ast/tpd.scala index ae3ed9fcad3b..2abae103780f 100644 --- a/compiler/src/dotty/tools/dotc/ast/tpd.scala +++ b/compiler/src/dotty/tools/dotc/ast/tpd.scala @@ -842,14 +842,6 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { Closure(tree: Tree)(env, meth, tpt) } - // This is a more fault-tolerant copier that does not cause errors when - // function types in applications are undefined. - // This was called `Inliner.InlineCopier` before 3.6.3. - class ConservativeTreeCopier() extends TypedTreeCopier: - override def Apply(tree: Tree)(fun: Tree, args: List[Tree])(using Context): Apply = - if fun.tpe.widen.exists then super.Apply(tree)(fun, args) - else untpd.cpy.Apply(tree)(fun, args).withTypeUnchecked(tree.tpe) - override def skipTransform(tree: Tree)(using Context): Boolean = tree.tpe.isError implicit class TreeOps[ThisTree <: tpd.Tree](private val tree: ThisTree) extends AnyVal { @@ -1551,7 +1543,7 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { * @param selectorPredicate A test to find the selector to use. * @return The symbols imported. */ - def importedSymbols(imp: Import, + def importedSymbols(imp: ImportOrExport, selectorPredicate: untpd.ImportSelector => Boolean = util.common.alwaysTrue) (using Context): List[Symbol] = imp.selectors.find(selectorPredicate) match diff --git a/compiler/src/dotty/tools/dotc/ast/untpd.scala b/compiler/src/dotty/tools/dotc/ast/untpd.scala index 2acfc4cf86e3..e9a6e148ce86 100644 --- a/compiler/src/dotty/tools/dotc/ast/untpd.scala +++ b/compiler/src/dotty/tools/dotc/ast/untpd.scala @@ -206,6 +206,8 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { case class Var()(implicit @constructorOnly src: SourceFile) extends Mod(Flags.Mutable) + case class Mut()(implicit @constructorOnly src: SourceFile) extends Mod(Flags.Mutable) + case class Implicit()(implicit @constructorOnly src: SourceFile) extends Mod(Flags.Implicit) case class Given()(implicit @constructorOnly src: SourceFile) extends Mod(Flags.Given) @@ -234,6 +236,8 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { case class Tracked()(implicit @constructorOnly src: SourceFile) extends Mod(Flags.Tracked) + case class Into()(implicit @constructorOnly src: SourceFile) extends Mod(Flags.Into) + /** Used under pureFunctions to mark impure function types `A => B` in `FunctionWithMods` */ case class Impure()(implicit @constructorOnly src: SourceFile) extends Mod(Flags.Impure) } @@ -332,6 +336,7 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { def isEnumCase: Boolean = isEnum && is(Case) def isEnumClass: Boolean = isEnum && !is(Case) + def isMutableVar: Boolean = is(Mutable) && mods.exists(_.isInstanceOf[Mod.Var]) } @sharable val EmptyModifiers: Modifiers = Modifiers() @@ -518,6 +523,9 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { def scalaUnit(implicit src: SourceFile): Select = scalaDot(tpnme.Unit) def scalaAny(implicit src: SourceFile): Select = scalaDot(tpnme.Any) + def capsInternalDot(name: Name)(using SourceFile): Select = + Select(Select(scalaDot(nme.caps), nme.internal), name) + def captureRoot(using Context): Select = Select(scalaDot(nme.caps), nme.CAPTURE_ROOT) @@ -525,15 +533,7 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { Annotated(parent, New(scalaAnnotationDot(annotName), List(refs))) def makeCapsOf(tp: RefTree)(using Context): Tree = - TypeApply(Select(scalaDot(nme.caps), nme.capsOf), tp :: Nil) - - // Capture set variable `[C^]` becomes: `[C >: CapSet <: CapSet^{cap}]` - def makeCapsBound()(using Context): TypeBoundsTree = - TypeBoundsTree( - Select(scalaDot(nme.caps), tpnme.CapSet), - makeRetaining( - Select(scalaDot(nme.caps), tpnme.CapSet), - Nil, tpnme.retainsCap)) + TypeApply(capsInternalDot(nme.capsOf), tp :: Nil) def makeConstructor(tparams: List[TypeDef], vparamss: List[List[ValDef]], rhs: Tree = EmptyTree)(using Context): DefDef = DefDef(nme.CONSTRUCTOR, joinParams(tparams, vparamss), TypeTree(), rhs) @@ -567,12 +567,6 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { ValDef(nme.syntheticParamName(n), if (tpt == null) TypeTree() else tpt, EmptyTree) .withFlags(flags) - def isInto(t: Tree)(using Context): Boolean = t match - case PrefixOp(Ident(tpnme.into), _) => true - case Function(_, res) => isInto(res) - case Parens(t) => isInto(t) - case _ => false - def lambdaAbstract(params: List[ValDef] | List[TypeDef], tpt: Tree)(using Context): Tree = params match case Nil => tpt diff --git a/compiler/src/dotty/tools/dotc/cc/CCState.scala b/compiler/src/dotty/tools/dotc/cc/CCState.scala new file mode 100644 index 000000000000..5feae257b97e --- /dev/null +++ b/compiler/src/dotty/tools/dotc/cc/CCState.scala @@ -0,0 +1,166 @@ +package dotty.tools +package dotc +package cc + +import core.* +import CaptureSet.VarState +import collection.mutable +import reporting.Message +import Contexts.Context +import Types.MethodType +import Symbols.Symbol + +/** Capture checking state, which is known to other capture checking components */ +class CCState: + import CCState.* + + // ------ Error diagnostics ----------------------------- + + /** Warnings relating to upper approximations of capture sets with + * existentially bound variables. + */ + val approxWarnings: mutable.ListBuffer[Message] = mutable.ListBuffer() + + // ------ Level handling --------------------------- + + private var curLevel: Level = outermostLevel + + /** The level of the current environment. Levels start at 0 and increase for + * each nested function or class. -1 means the level is undefined. + */ + def currentLevel(using Context): Level = curLevel + + /** Perform `op` in the next inner level */ + inline def inNestedLevel[T](inline op: T)(using Context): T = + val saved = curLevel + curLevel = curLevel.nextInner + try op finally curLevel = saved + + /** Perform `op` in the next inner level unless `p` holds. */ + inline def inNestedLevelUnless[T](inline p: Boolean)(inline op: T)(using Context): T = + val saved = curLevel + if !p then curLevel = curLevel.nextInner + try op finally curLevel = saved + + /** A map recording the level of a symbol */ + private val mySymLevel: mutable.Map[Symbol, Level] = mutable.Map() + + def symLevel(sym: Symbol): Level = mySymLevel.getOrElse(sym, undefinedLevel) + + def recordLevel(sym: Symbol)(using Context): Unit = mySymLevel(sym) = curLevel + + // ------ BiTypeMap adjustment ----------------------- + + private var myMapFutureElems = true + + /** When mapping a capture set with a BiTypeMap, should we create a BiMapped set + * so that future elements can also be mapped, and elements added to the BiMapped + * are back-propagated? Turned off when creating capture set variables for the + * first time, since we then do not want to change the binder to the original type + * without capture sets when back propagating. Error case where this shows: + * pos-customargs/captures/lists.scala, method m2c. + */ + def mapFutureElems(using Context) = myMapFutureElems + + /** Don't map future elements in this `op` */ + inline def withoutMappedFutureElems[T](op: => T)(using Context): T = + val saved = mapFutureElems + myMapFutureElems = false + try op finally myMapFutureElems = saved + + // ------ Iteration count of capture checking run + + private var iterCount = 1 + + def iterationId = iterCount + + def nextIteration[T](op: => T): T = + iterCount += 1 + try op finally iterCount -= 1 + + // ------ Global counters ----------------------- + + /** Next CaptureSet.Var id */ + var varId = 0 + + /** Next root id */ + var rootId = 0 + + // ------ VarState singleton objects ------------ + // See CaptureSet.VarState creation methods for documentation + + object Separate extends VarState.Separating + object HardSeparate extends VarState.Separating + object Unrecorded extends VarState.Unrecorded + object ClosedUnrecorded extends VarState.ClosedUnrecorded + + // ------ Context info accessed from companion object when isCaptureCheckingOrSetup is true + + private var openExistentialScopes: List[MethodType] = Nil + + private var capIsRoot: Boolean = false + + private var collapseFresh: Boolean = false + +object CCState: + + opaque type Level = Int + + val undefinedLevel: Level = -1 + + val outermostLevel: Level = 0 + + extension (x: Level) + def isDefined: Boolean = x >= 0 + def <= (y: Level) = (x: Int) <= y + def nextInner: Level = if isDefined then x + 1 else x + + /** If we are currently in capture checking or setup, and `mt` is a method + * type that is not a prefix of a curried method, perform `op` assuming + * a fresh enclosing existential scope `mt`, otherwise perform `op` directly. + */ + inline def inNewExistentialScope[T](mt: MethodType)(op: => T)(using Context): T = + if isCaptureCheckingOrSetup then + val ccs = ccState + val saved = ccs.openExistentialScopes + if mt.marksExistentialScope then ccs.openExistentialScopes = mt :: ccs.openExistentialScopes + try op finally ccs.openExistentialScopes = saved + else + op + + /** The currently opened existential scopes */ + def openExistentialScopes(using Context): List[MethodType] = ccState.openExistentialScopes + + /** Run `op` under the assumption that `cap` can subsume all other capabilties + * except Result capabilities. Every use of this method should be scrutinized + * for whether it introduces an unsoundness hole. + */ + inline def withCapAsRoot[T](op: => T)(using Context): T = + if isCaptureCheckingOrSetup then + val ccs = ccState + val saved = ccs.capIsRoot + ccs.capIsRoot = true + try op finally ccs.capIsRoot = saved + else op + + /** Is `caps.cap` a root capability that is allowed to subsume other capabilities? */ + def capIsRoot(using Context): Boolean = ccState.capIsRoot + + /** Run `op` under the assumption that all FreshCap instances are equal + * to each other and to GlobalCap. + * Needed to make override checking of types containing fresh work. + * Asserted in override checking, tested in maxSubsumes. + * Is this sound? Test case is neg-custom-args/captures/leaked-curried.scala. + */ + inline def withCollapsedFresh[T](op: => T)(using Context): T = + if isCaptureCheckingOrSetup then + val ccs = ccState + val saved = ccs.collapseFresh + ccs.collapseFresh = true + try op finally ccs.collapseFresh = saved + else op + + /** Should all FreshCap instances be treated as equal to GlobalCap? */ + def collapseFresh(using Context): Boolean = ccState.collapseFresh + +end CCState diff --git a/compiler/src/dotty/tools/dotc/cc/Capability.scala b/compiler/src/dotty/tools/dotc/cc/Capability.scala new file mode 100644 index 000000000000..95f8f180b339 --- /dev/null +++ b/compiler/src/dotty/tools/dotc/cc/Capability.scala @@ -0,0 +1,861 @@ +package dotty.tools +package dotc +package cc + +import core.* +import Types.*, Symbols.*, Contexts.*, Decorators.* +import util.{SimpleIdentitySet, EqHashMap} +import typer.ErrorReporting.Addenda +import util.common.alwaysTrue +import scala.collection.mutable +import CCState.* +import Periods.{NoRunId, RunWidth} +import compiletime.uninitialized +import StdNames.nme +import CaptureSet.VarState +import Annotations.Annotation +import Flags.* +import config.Printers.capt +import CCState.{Level, undefinedLevel} +import annotation.constructorOnly +import ast.tpd +import printing.{Printer, Showable} +import printing.Texts.Text +import reporting.Message +import NameOps.isImpureFunction +import annotation.internal.sharable + +/** Capabilities are members of capture sets. They partially overlap with types + * as shown in the trait hierarchy below. + * + * Capability --+-- RootCapabilty -----+-- GlobalCap + * | +-- FreshCap + * | +-- ResultCap + * | + * +-- CoreCapability ----+-- ObjectCapability --+-- TermRef + * | | +-- ThisType + * | | +-- TermParamRef + * | | + * | +-- SetCapability -----+-- TypeRef + * | +-- TypeParamRef + * | + * +-- DerivedCapability -+-- ReadOnly + * +-- Reach + * +-- Maybe + * + * All CoreCapabilities are Types, or, more specifically instances of TypeProxy. + */ +object Capabilities: + opaque type Validity = Int + def validId(runId: Int, iterId: Int): Validity = + runId + (iterId << RunWidth) + def currentId(using Context): Validity = validId(ctx.runId, ccState.iterationId) + val invalid: Validity = validId(NoRunId, 0) + + @sharable private var nextRootId = 0 + + /** The base trait of all root capabilities */ + trait RootCapability extends Capability: + val rootId = nextRootId + nextRootId += 1 + def descr(using Context): String + + /** The base trait of all capabilties represented as types */ + trait CoreCapability extends TypeProxy, Capability: + override def toText(printer: Printer): Text = printer.toText(this) + + trait ObjectCapability extends CoreCapability + + trait SetCapability extends CoreCapability + + trait DerivedCapability extends Capability: + def underlying: Capability + + /** If `x` is a capability, its maybe capability `x?`. `x?` stands for a capability + * `x` that might or might not be part of a capture set. We have `{} <: {x?} <: {x}`. + * Maybe capabilities cannot be propagated between sets. If `a <: b` and `a` + * acquires `x?` then `x` is propagated to `b` as a conservative approximation. + * + * Maybe capabilities should only arise for capture sets that appear in invariant + * position in their surrounding type. They are similar to TypeBounds types, but + * restricted to capture sets. For instance, + * + * Array[C^{x?}] + * + * should be morally equivalent to + * + * Array[_ >: C^{} <: C^{x}] + * + * but it has fewer issues with type inference. + */ + case class Maybe(underlying: Capability) extends DerivedCapability + + /** The readonly capability `x.rd`. We have {x.rd} <: {x}. + * + * Read-only capabilities cannot wrap maybe capabilities + * but they can wrap reach capabilities. We have + * (x?).readOnly = (x.rd)? + */ + case class ReadOnly(underlying: ObjectCapability | RootCapability | Reach) + extends DerivedCapability: + assert(!underlying.isInstanceOf[Maybe]) + + /** If `x` is a capability, its reach capability `x*`. `x*` stands for all + * capabilities reachable through `x`. + * We have `{x} <: {x*} <: dcs(x)}` where the deep capture set `dcs(x)` of `x` + * is the union of all capture sets that appear in covariant position in the + * type of `x`. If `x` and `y` are different variables then `{x*}` and `{y*}` + * are unrelated. + * + * Reach capabilities cannot wrap read-only capabilities or maybe capabilities. + * We have + * (x.rd).reach = x*.rd + * (x.rd)? = (x*)? + */ + case class Reach(underlying: ObjectCapability) extends DerivedCapability: + assert(!underlying.isInstanceOf[Maybe | ReadOnly]) + + /** The global root capability referenced as `caps.cap` + * `cap` does not subsume other capabilities, except in arguments of + * `withCapAsRoot` calls. + */ + @sharable // We override below all operations that access internal capability state + object GlobalCap extends RootCapability: + def descr(using Context) = "the universal root capability" + override val maybe = Maybe(this) + override val readOnly = ReadOnly(this) + override def reach = unsupported("cap.reach") + override def singletonCaptureSet(using Context) = CaptureSet.universal + override def captureSetOfInfo(using Context) = singletonCaptureSet + override def cached[C <: DerivedCapability](newRef: C): C = unsupported("cached") + override def invalidateCaches() = () + + /** The class of "fresh" roots. These do subsume other capabilties in scope. + * They track with hidden sets which other capabilities were subsumed. + * Hidden sets are inspected by separation checking. + * @param owner the owner of the context in which the FreshCap was created + * @param origin an indication where and why the FreshCap was created, used + * for diagnostics + */ + case class FreshCap private (owner: Symbol, origin: Origin)(using @constructorOnly ctx: Context) extends RootCapability: + val hiddenSet = CaptureSet.HiddenSet(owner, this: @unchecked) + // fails initialization check without the @unchecked + + override def equals(that: Any) = that match + case that: FreshCap => this eq that + case _ => false + + def descr(using Context) = + val originStr = origin match + case Origin.InDecl(sym) if sym.exists => + origin.explanation + case _ => + i" created in ${hiddenSet.owner.sanitizedDescription}${origin.explanation}" + i"a fresh root capability$originStr" + + object FreshCap: + def apply(origin: Origin)(using Context): FreshCap | GlobalCap.type = + FreshCap(ctx.owner, origin) + + /** A root capability associated with a function type. These are conceptually + * existentially quantified over the function's result type. + * @param binder The function type with which the capability is associated. + * It is a MethodicType since we also have ResultCaps that are + * associated with the ExprTypes of parameterless functions. + * Currently we never create results over PolyTypes. TODO change this? + * Setup: + * + * In the setup phase, `cap` instances in the result of a dependent function type + * or method type such as `(x: T): C^{cap}` are converted to `ResultCap(binder)` instances, + * where `binder` refers to the method type. Most other cap instances are mapped to + * Fresh instances instead. For example the `cap` in the result of `T => C^{cap}` + * is mapped to a Fresh instance. + * + * If one needs to use a dependent function type yet one still want to map `cap` to + * a fresh instance instead an existential root, one can achieve that by the use + * of a type alias. For instance, the following type creates an existential for `^`: + * + * (x: A) => (C^{x}, D^) + * + * By contrast, this variant creates a fresh instance instead: + * + * type F[X] = (x: A) => (C^{x}, X) + * F[D^] + * + * The trick is that the argument D^ is mapped to D^{fresh} before the `F` alias + * is expanded. + */ + case class ResultCap(binder: MethodicType) extends RootCapability: + + private var myOrigin: RootCapability = GlobalCap + private var variants: SimpleIdentitySet[ResultCap] = SimpleIdentitySet.empty + + /** Every ResultCap capability has an origin. This is + * - A FreshCap capability `f`, if the current capability was created as a mirror + * of `f` in the ToResult map. + * - Another ResultCap capability `r`, if the current capability was created + * via a chain of `derivedResult` calls from an original ResultCap `r` + * (which was not created using `derivedResult`). + * - GlobalCap otherwise + */ + def origin: RootCapability = myOrigin + + /** Initialize origin of this capability to a FreshCap instance (or to GlobalCap + * if separation checks are turned off). + * @pre The capability's origin was not yet set. + */ + def setOrigin(freshOrigin: FreshCap | GlobalCap.type): Unit = + assert(myOrigin eq GlobalCap) + myOrigin = freshOrigin + + /** If the current capability was created via a chain of `derivedResult` calls + * from an original ResultCap `r`, that `r`. Otherwise `this`. + */ + def primaryResultCap: ResultCap = origin match + case origin: ResultCap => origin + case _ => this + + def originalBinder: MethodicType = primaryResultCap.binder + + /** A ResultCap with given `binder1` derived from this capability. + * This is typically done as a result of a SubstBinding map. + * ResultCaps so created are cached, so that for every pair + * of a ResultCap `r` and a binder `b`, there exists at most one ResultCap + * instance that is derived transitively from `r` and has binder `b`. + */ + def derivedResult(binder1: MethodicType): ResultCap = + if binder1 eq binder then this + else + val primary = primaryResultCap + primary.variants.iterator.find(_.binder eq binder1) match + case Some(rcap) => rcap + case None => + val rcap = ResultCap(binder1) + rcap.myOrigin = primary + primary.variants += rcap + rcap + + def descr(using Context) = + i"a root capability associated with the result type of $binder" + end ResultCap + + /** A trait for references in CaptureSets. These can be NamedTypes, ThisTypes or ParamRefs, + * as well as three kinds of AnnotatedTypes representing readOnly, reach, and maybe capabilities. + * If there are several annotations they come with an order: + * `*` first, `.rd` next, `?` last. + */ + trait Capability extends Showable: + + private var myCaptureSet: CaptureSet | Null = uninitialized + private var myCaptureSetValid: Validity = invalid + private var mySingletonCaptureSet: CaptureSet.Const | Null = null + private var myDerived: List[DerivedCapability] = Nil + + protected def cached[C <: DerivedCapability](newRef: C): C = + def recur(refs: List[DerivedCapability]): C = refs match + case ref :: refs1 => + if ref.getClass == newRef.getClass then ref.asInstanceOf[C] else recur(refs1) + case Nil => + myDerived = newRef :: myDerived + newRef + recur(myDerived) + + def maybe: Maybe = this match + case self: Maybe => self + case _ => cached(Maybe(this)) + + def readOnly: ReadOnly | Maybe = this match + case Maybe(ref1) => Maybe(ref1.readOnly) + case self: ReadOnly => self + case self: (ObjectCapability | RootCapability | Reach) => cached(ReadOnly(self)) + + def reach: Reach | ReadOnly | Maybe = this match + case Maybe(ref1) => Maybe(ref1.reach) + case ReadOnly(ref1) => ReadOnly(ref1.reach.asInstanceOf[Reach]) + case self: Reach => self + case self: ObjectCapability => cached(Reach(self)) + + /** Is this a maybe reference of the form `x?`? */ + final def isMaybe(using Context): Boolean = this ne stripMaybe + + /** Is this a read-only reference of the form `x.rd` or `x.rd?` or a + * capture set variable with only read-ony references in its upper bound? + */ + final def isReadOnly(using Context): Boolean = this match + case tp: SetCapability => tp.captureSetOfInfo.isReadOnly + case _ => this ne stripReadOnly + + /** Is this a reach reference of the form `x*` or a readOnly or maybe variant + * of a reach reference? + */ + final def isReach(using Context): Boolean = this ne stripReach + + final def stripMaybe(using Context): Capability = this match + case Maybe(ref1) => ref1 + case _ => this + + final def stripReadOnly(using Context): Capability = this match + case ReadOnly(ref1) => ref1 + case Maybe(ref1) => ref1.stripReadOnly.maybe + case _ => this + + final def stripReach(using Context): Capability = this match + case Reach(ref1) => ref1 + case ReadOnly(ref1) => ref1.stripReach.readOnly + case Maybe(ref1) => ref1.stripReach.maybe + case _ => this + + /** Is this reference the generic root capability `cap` or a Fresh instance? */ + final def isCapOrFresh(using Context): Boolean = this match + case GlobalCap | _: FreshCap => true + case _ => false + + /** Is this reference a root capability or a derived version of one? + * These capabilities have themselves as their captureSetOfInfo. + */ + final def isTerminalCapability(using Context): Boolean = + core.isInstanceOf[RootCapability] + + /** Is the reference tracked? This is true if it can be tracked and the capture + * set of the underlying type is not always empty. + */ + final def isTracked(using Context): Boolean = this.core match + case _: RootCapability => true + case tp: CoreCapability => tp.isTrackableRef && !captureSetOfInfo.isAlwaysEmpty + + /** An exclusive capability is a capability that derives + * indirectly from a maximal capability without going through + * a read-only capability first. + */ + final def isExclusive(using Context): Boolean = + !isReadOnly && (isTerminalCapability || captureSetOfInfo.isExclusive) + + final def isWellformed(using Context): Boolean = this match + case self: CoreCapability => self.isTrackableRef + case _ => true + + /** The non-derived capability underlying this capability */ + final def core: CoreCapability | RootCapability = this match + case self: (CoreCapability | RootCapability) => self + case self: DerivedCapability => self.underlying.core + + /** The type underlying this capability, NoType for root capabilities */ + final def coreType: CoreCapability | NoType.type = core match + case c: CoreCapability => c + case _ => NoType + + /** The first element of this path type, skipping selections + * and qualifiers. Note that class parameter references are of + * the form this.C but their pathroot is still this.C, not this. + */ + final def pathRoot(using Context): Capability = this match + case _: RootCapability => this + case self: DerivedCapability => self.underlying.pathRoot + case self: CoreCapability => self.dealias match + case tp1: (TermRef | TypeRef) => // can't use NamedType here since it is not a capability + if tp1.symbol.maybeOwner.isClass && !tp1.symbol.is(TypeParam) then + tp1.prefix match + case pre: Capability => pre.pathRoot + case _ => tp1 + else tp1 + case tp1: CoreCapability => tp1 + case _ => self + + /** The logical owner of the root of this class: + * - If this path starts with `C.this`, the class `C`. + * - If it starts with a reference `r`, `r`'s owner. + * - If it starts with cap, the `scala.caps` package class. + * - If it starts with a fresh instance, its owner. + * - If it starts with a ParamRef or a ResultCap, NoSymbol. + */ + final def pathOwner(using Context): Symbol = pathRoot match + case tp1: ThisType => tp1.cls + case tp1: NamedType => tp1.symbol.owner + case GlobalCap => defn.CapsModule.moduleClass + case tp1: FreshCap => tp1.ccOwner + case _ => NoSymbol + + final def isParamPath(using Context): Boolean = this match + case tp1: NamedType => + tp1.prefix match + case _: ThisType | NoPrefix => + tp1.symbol.is(Param) || tp1.symbol.is(ParamAccessor) + case prefix: CoreCapability => prefix.isParamPath + case _ => false + case _: ParamRef => true + case _ => false + + final def ccOwner(using Context): Symbol = this match + case self: ThisType => self.cls + case TermRef(prefix: Capability, _) => prefix.ccOwner + case self: NamedType => self.symbol + case self: DerivedCapability => self.underlying.ccOwner + case self: FreshCap => self.hiddenSet.owner + case _ /* : GlobalCap | ResultCap | ParamRef */ => NoSymbol + + /** The symbol that represents the level closest-enclosing ccOwner. + * Symbols representing levels are + * - class symbols, but not inner (non-static) module classes + * - method symbols, but not accessors or constructors + */ + final def levelOwner(using Context): Symbol = + def adjust(owner: Symbol): Symbol = + if !owner.exists + || owner.isClass && (!owner.is(Flags.Module) || owner.isStatic) + || owner.is(Flags.Method, butNot = Flags.Accessor) && !owner.isConstructor + then owner + else adjust(owner.owner) + adjust(ccOwner) + + /** Tests whether the capability derives from capability class `cls`. */ + def derivesFromCapTrait(cls: ClassSymbol)(using Context): Boolean = this match + case Reach(ref1) => ref1.widen.derivesFromCapTraitDeeply(cls) + case self: DerivedCapability => self.underlying.derivesFromCapTrait(cls) + case self: CoreCapability => self.superType.derivesFromCapTrait(cls) + case _ => false + + def derivesFromCapability(using Context): Boolean = derivesFromCapTrait(defn.Caps_Capability) + def derivesFromMutable(using Context): Boolean = derivesFromCapTrait(defn.Caps_Mutable) + def derivesFromSharedCapability(using Context): Boolean = derivesFromCapTrait(defn.Caps_SharedCapability) + + /** The capture set consisting of exactly this reference */ + def singletonCaptureSet(using Context): CaptureSet.Const = + if mySingletonCaptureSet == null then + mySingletonCaptureSet = CaptureSet(this) + mySingletonCaptureSet.uncheckedNN + + /** The capture set of the type underlying this reference */ + def captureSetOfInfo(using Context): CaptureSet = + if myCaptureSetValid == currentId then myCaptureSet.nn + else if myCaptureSet.asInstanceOf[AnyRef] eq CaptureSet.Pending then CaptureSet.empty + else + myCaptureSet = CaptureSet.Pending + val computed = CaptureSet.ofInfo(this) + def isProvisional = this.core match + case core: TypeProxy => !core.underlying.exists || core.underlying.isProvisional + case _ => false + if !isCaptureChecking || ctx.mode.is(Mode.IgnoreCaptures) || isProvisional then + myCaptureSet = null + else + myCaptureSet = computed + myCaptureSetValid = currentId + computed + + def invalidateCaches() = + myCaptureSetValid = invalid + + /** x subsumes x + * x =:= y ==> x subsumes y + * x subsumes y ==> x subsumes y.f + * x subsumes y ==> x* subsumes y, x subsumes y? + * x subsumes y ==> x* subsumes y*, x? subsumes y? + * x: x1.type /\ x1 subsumes y ==> x subsumes y + * X = CapSet^cx, exists rx in cx, rx subsumes y ==> X subsumes y + * Y = CapSet^cy, forall ry in cy, x subsumes ry ==> x subsumes Y + * X: CapSet^c1...CapSet^c2, (CapSet^c1) subsumes y ==> X subsumes y + * Y: CapSet^c1...CapSet^c2, x subsumes (CapSet^c2) ==> x subsumes Y + * Contains[X, y] ==> X subsumes y + */ + final def subsumes(y: Capability)(using ctx: Context)(using vs: VarState = VarState.Separate): Boolean = + + /** Are `x` and `y` capabilities such that x subsumes y? */ + def subsumingRefs(x: Type | Capability, y: Type | Capability): Boolean = x match + case x: Capability => y match + case y: Capability => x.subsumes(y) + case _ => false + case _ => false + + /** Perform `test` on all object capabilities in `info` */ + def viaInfo(info: Type)(test: Type => Boolean): Boolean = info.dealias match + case info: ObjectCapability => test(info) + case CapturingType(parent, _) => viaInfo(parent)(test) + case info: AndType => viaInfo(info.tp1)(test) || viaInfo(info.tp2)(test) + case info: OrType => viaInfo(info.tp1)(test) && viaInfo(info.tp2)(test) + case _ => false + + try (this eq y) + || maxSubsumes(y, canAddHidden = !vs.isOpen) + || y.match + case y: TermRef => + y.prefix.match + case ypre: Capability => + this.subsumes(ypre) + || this.match + case x @ TermRef(xpre: Capability, _) if x.symbol == y.symbol => + // To show `{x.f} <:< {y.f}`, it is important to prove `x` and `y` + // are equvalent, which means `x =:= y` in terms of subtyping, + // not just `{x} =:= {y}` in terms of subcapturing. + // It is possible to construct two singleton types `x` and `y`, + // which subsume each other, but are not equal references. + // See `tests/neg-custom-args/captures/path-prefix.scala` for example. + withMode(Mode.IgnoreCaptures): + TypeComparer.isSameRef(xpre, ypre) + case _ => + false + case _ => false + || viaInfo(y.info)(subsumingRefs(this, _)) + case Maybe(y1) => this.stripMaybe.subsumes(y1) + case ReadOnly(y1) => this.stripReadOnly.subsumes(y1) + case y: TypeRef if y.derivesFrom(defn.Caps_CapSet) => + // The upper and lower bounds don't have to be in the form of `CapSet^{...}`. + // They can be other capture set variables, which are bounded by `CapSet`, + // like `def test[X^, Y^, Z >: X <: Y]`. + y.info match + case TypeBounds(_, hi @ CapturingType(parent, refs)) => + refs.elems.forall(this.subsumes) + case TypeBounds(_, hi: Capability) => + this.subsumes(hi) + case _ => + y.captureSetOfInfo.elems.forall(this.subsumes) + case _ => false + || this.match + case Reach(x1) => x1.subsumes(y.stripReach) + case x: TermRef => viaInfo(x.info)(subsumingRefs(_, y)) + case x: TypeRef if assumedContainsOf(x).contains(y) => true + case x: TypeRef if x.derivesFrom(defn.Caps_CapSet) => + x.info match + case TypeBounds(CapturingType(_, lorefs), _) => + lorefs.elems.exists(_.subsumes(y)) + case TypeBounds(lo: Capability, _) => + lo.subsumes(y) + case _ => + x.captureSetOfInfo.elems.exists(_.subsumes(y)) + case _ => false + catch case ex: AssertionError => + println(i"error while subsumes $this >> $y") + throw ex + end subsumes + + /** This is a maximal capability that subsumes `y` in given context and VarState. + * @param canAddHidden If true we allow maximal capabilities to subsume all other capabilities. + * We add those capabilities to the hidden set if this is a Fresh instance. + * If false we only accept `y` elements that are already in the + * hidden set of this Fresh instance. The idea is that in a VarState that + * accepts additions we first run `maxSubsumes` with `canAddHidden = false` + * so that new variables get added to the sets. If that fails, we run + * the test again with canAddHidden = true as a last effort before we + * fail a comparison. + */ + def maxSubsumes(y: Capability, canAddHidden: Boolean)(using ctx: Context)(using vs: VarState = VarState.Separate): Boolean = + (this eq y) + || this.match + case x: FreshCap => + def levelOK = + if ccConfig.useFreshLevels && !CCState.collapseFresh then + val yOwner = y.levelOwner + yOwner.isStaticOwner || x.ccOwner.isContainedIn(yOwner) + else y.core match + case ResultCap(_) | _: ParamRef => false + case _ => true + + vs.ifNotSeen(this)(x.hiddenSet.elems.exists(_.subsumes(y))) + || levelOK + && canAddHidden + && vs.addHidden(x.hiddenSet, y) + case x: ResultCap => + val result = y match + case y: ResultCap => vs.unify(x, y) + case _ => y.derivesFromSharedCapability + if !result then + TypeComparer.addErrorNote(CaptureSet.ExistentialSubsumesFailure(x, y)) + result + case GlobalCap => + y match + case GlobalCap => true + case _: ResultCap => false + case _: FreshCap if CCState.collapseFresh => true + case _ => + y.derivesFromSharedCapability + || canAddHidden && vs != VarState.HardSeparate && CCState.capIsRoot + case _ => + y match + case ReadOnly(y1) => this.stripReadOnly.maxSubsumes(y1, canAddHidden) + case _ => false + + /** `x covers y` if we should retain `y` when computing the overlap of + * two footprints which have `x` respectively `y` as elements. + * We assume that .rd have already been stripped on both sides. + * We have: + * + * x covers x + * x covers y ==> x covers y.f + * x covers y ==> x* covers y*, x? covers y? + * TODO what other clauses from subsumes do we need to port here? + */ + final def covers(y: Capability)(using Context): Boolean = + (this eq y) + || y.match + case y @ TermRef(ypre: Capability, _) => + this.covers(ypre) + case Reach(y1) => + this match + case Reach(x1) => x1.covers(y1) + case _ => false + case Maybe(y1) => + this match + case Maybe(x1) => x1.covers(y1) + case _ => false + case y: FreshCap => + y.hiddenSet.superCaps.exists(this covers _) + case _ => + false + + def assumedContainsOf(x: TypeRef)(using Context): SimpleIdentitySet[Capability] = + CaptureSet.assumedContains.getOrElse(x, SimpleIdentitySet.empty) + + def toText(printer: Printer): Text = printer.toTextCapability(this) + end Capability + + /** The place of - and cause for - creating a fresh capability. Used for + * error diagnostics + */ + enum Origin: + case InDecl(sym: Symbol) + case TypeArg(tp: Type) + case UnsafeAssumePure + case Formal(pref: ParamRef, app: tpd.Apply) + case ResultInstance(methType: Type, meth: Symbol) + case UnapplyInstance(info: MethodType) + case NewMutable(tp: Type) + case NewCapability(tp: Type) + case LambdaExpected(respt: Type) + case LambdaActual(restp: Type) + case OverriddenType(member: Symbol) + case DeepCS(ref: TypeRef) + case Unknown + + def explanation(using Context): String = this match + case InDecl(sym: Symbol) => + if sym.is(Method) then i" in the result type of $sym" + else if sym.exists then i" in the type of $sym" + else "" + case TypeArg(tp: Type) => + i" of type argument $tp" + case UnsafeAssumePure => + " when instantiating argument of unsafeAssumePure" + case Formal(pref, app) => + val meth = app.symbol + if meth.exists + then i" when checking argument to parameter ${pref.paramName} of $meth" + else "" + case ResultInstance(mt, meth) => + val methDescr = if meth.exists then i"$meth's type " else "" + i" when instantiating $methDescr$mt" + case UnapplyInstance(info) => + i" when instantiating argument of unapply with type $info" + case NewMutable(tp) => + i" when constructing mutable $tp" + case NewCapability(tp) => + i" when constructing Capability instance $tp" + case LambdaExpected(respt) => + i" when instantiating expected result type $respt of lambda" + case LambdaActual(restp: Type) => + i" when instantiating result type $restp of lambda" + case OverriddenType(member: Symbol) => + i" when instantiating upper bound of member overridden by $member" + case DeepCS(ref: TypeRef) => + i" when computing deep capture set of $ref" + case Unknown => + "" + end Origin + + // ---------- Maps between different kinds of root capabilities ----------------- + + + /** Map each occurrence of cap to a different Fresh instance + * Exception: CapSet^ stays as it is. + */ + class CapToFresh(origin: Origin)(using Context) extends BiTypeMap, FollowAliasesMap: + thisMap => + + override def apply(t: Type) = + if variance <= 0 then t + else t match + case t @ CapturingType(_, _) => + mapOver(t) + case t @ AnnotatedType(parent, ann) => + val parent1 = this(parent) + if ann.symbol.isRetains && ann.tree.toCaptureSet.containsCap then + this(CapturingType(parent1, ann.tree.toCaptureSet)) + else + t.derivedAnnotatedType(parent1, ann) + case _ => + mapFollowingAliases(t) + + override def mapCapability(c: Capability, deep: Boolean): Capability = c match + case GlobalCap => FreshCap(origin) + case _ => super.mapCapability(c, deep) + + override def fuse(next: BiTypeMap)(using Context) = next match + case next: Inverse => assert(false); Some(IdentityTypeMap) + case _ => None + + override def toString = "CapToFresh" + + class Inverse extends BiTypeMap, FollowAliasesMap: + def apply(t: Type): Type = t match + case t @ CapturingType(_, refs) => mapOver(t) + case _ => mapFollowingAliases(t) + + override def mapCapability(c: Capability, deep: Boolean): Capability = c match + case _: FreshCap => GlobalCap + case _ => super.mapCapability(c, deep) + + def inverse = thisMap + override def toString = thisMap.toString + ".inverse" + + lazy val inverse = Inverse() + + end CapToFresh + + /** Maps cap to fresh. CapToFresh is a BiTypeMap since we don't want to + * freeze a set when it is mapped. On the other hand, we do not want Fresh + * values to flow back to cap since that would fail disallowRootCapability + * tests elsewhere. We therefore use `withoutMappedFutureElems` to prevent + * the map being installed for future use. + */ + def capToFresh(tp: Type, origin: Origin)(using Context): Type = + ccState.withoutMappedFutureElems: + CapToFresh(origin)(tp) + + /** Maps fresh to cap */ + def freshToCap(tp: Type)(using Context): Type = + CapToFresh(Origin.Unknown).inverse(tp) + + /** Map top-level free existential variables one-to-one to Fresh instances */ + def resultToFresh(tp: Type, origin: Origin)(using Context): Type = + val subst = new TypeMap: + val seen = EqHashMap[ResultCap, FreshCap | GlobalCap.type]() + var localBinders: SimpleIdentitySet[MethodType] = SimpleIdentitySet.empty + + def apply(t: Type): Type = t match + case t: MethodType => + // skip parameters + val saved = localBinders + if t.marksExistentialScope then localBinders = localBinders + t + try t.derivedLambdaType(resType = this(t.resType)) + finally localBinders = saved + case t: PolyType => + // skip parameters + t.derivedLambdaType(resType = this(t.resType)) + case _ => + mapOver(t) + + override def mapCapability(c: Capability, deep: Boolean) = c match + case c @ ResultCap(binder) => + if localBinders.contains(binder) then c // keep bound references + else seen.getOrElseUpdate(c, FreshCap(origin)) // map free references to FreshCap + case _ => super.mapCapability(c, deep) + end subst + + subst(tp) + end resultToFresh + + /** Replace all occurrences of `cap` (or fresh) in parts of this type by an existentially bound + * variable bound by `mt`. + * Stop at function or method types since these have been mapped before. + */ + def toResult(tp: Type, mt: MethodicType, fail: Message => Unit)(using Context): Type = + + abstract class CapMap extends BiTypeMap: + override def mapOver(t: Type): Type = t match + case t @ FunctionOrMethod(args, res) if variance > 0 && !t.isAliasFun => + t // `t` should be mapped in this case by a different call to `mapCap`. + case t: (LazyRef | TypeVar) => + mapConserveSuper(t) + case _ => + super.mapOver(t) + + object toVar extends CapMap: + + def apply(t: Type) = t match + case defn.FunctionNOf(args, res, contextual) if t.typeSymbol.name.isImpureFunction => + if variance > 0 then + super.mapOver: + defn.FunctionNOf(args, res, contextual) + .capturing(ResultCap(mt).singletonCaptureSet) + else mapOver(t) + case _ => + mapOver(t) + + override def mapCapability(c: Capability, deep: Boolean) = c match + case c: (FreshCap | GlobalCap.type) => + if variance > 0 then + val res = ResultCap(mt) + c match + case c: FreshCap => res.setOrigin(c) + case _ => + res + else + if variance == 0 then + fail(em"""$tp captures the root capability `cap` in invariant position. + |This capability cannot be converted to an existential in the result type of a function.""") + // we accept variance < 0, and leave the cap as it is + c + case _ => + super.mapCapability(c, deep) + + //.showing(i"mapcap $t = $result") + override def toString = "toVar" + + object inverse extends BiTypeMap: + def apply(t: Type) = mapOver(t) + + override def mapCapability(c: Capability, deep: Boolean) = c match + case c @ ResultCap(`mt`) => + // do a reverse getOrElseUpdate on `seen` to produce the + // `Fresh` assosicated with `t` + val primary = c.primaryResultCap + primary.origin match + case GlobalCap => + val fresh = FreshCap(Origin.Unknown) + primary.setOrigin(fresh) + fresh + case origin: FreshCap => + origin + case _ => + super.mapCapability(c, deep) + + def inverse = toVar.this + override def toString = "toVar.inverse" + end inverse + end toVar + + toVar(tp) + end toResult + + /** Map global roots in function results to result roots. Also, + * map roots in the types of parameterless def methods. + */ + def toResultInResults(sym: Symbol, fail: Message => Unit, keepAliases: Boolean = false)(tp: Type)(using Context): Type = + val m = new TypeMap with FollowAliasesMap: + def apply(t: Type): Type = t match + case AnnotatedType(parent @ defn.RefinedFunctionOf(mt), ann) if ann.symbol == defn.InferredDepFunAnnot => + val mt1 = mapOver(mt).asInstanceOf[MethodType] + if mt1 ne mt then mt1.toFunctionType(alwaysDependent = true) + else parent + case defn.RefinedFunctionOf(mt) => + val mt1 = apply(mt) + if mt1 ne mt then mt1.toFunctionType(alwaysDependent = true) + else t + case t: MethodType if variance > 0 && t.marksExistentialScope => + val t1 = mapOver(t).asInstanceOf[MethodType] + t1.derivedLambdaType(resType = toResult(t1.resType, t1, fail)) + case CapturingType(parent, refs) => + t.derivedCapturingType(this(parent), refs) + case t: (LazyRef | TypeVar) => + mapConserveSuper(t) + case _ => + try + if keepAliases then mapOver(t) + else mapFollowingAliases(t) + catch case ex: AssertionError => + println(i"error while mapping $t") + throw ex + m(tp) match + case tp1: ExprType if sym.is(Method, butNot = Accessor) => + tp1.derivedExprType(toResult(tp1.resType, tp1, fail)) + case tp1 => tp1 + end toResultInResults + +end Capabilities \ No newline at end of file diff --git a/compiler/src/dotty/tools/dotc/cc/CaptureAnnotation.scala b/compiler/src/dotty/tools/dotc/cc/CaptureAnnotation.scala index f0018cc93d7e..2af01594192f 100644 --- a/compiler/src/dotty/tools/dotc/cc/CaptureAnnotation.scala +++ b/compiler/src/dotty/tools/dotc/cc/CaptureAnnotation.scala @@ -10,6 +10,7 @@ import Decorators.* import config.Printers.capt import printing.Printer import printing.Texts.Text +import cc.Capabilities.{Capability, RootCapability} /** An annotation representing a capture set and whether it is boxed. * It simulates a normal @retains annotation except that it is more efficient, @@ -39,10 +40,11 @@ case class CaptureAnnotation(refs: CaptureSet, boxed: Boolean)(cls: Symbol) exte /** Reconstitute annotation tree from capture set */ override def tree(using Context) = val elems = refs.elems.toList.map { - case cr: TermRef => ref(cr) - case cr: TermParamRef => untpd.Ident(cr.paramName).withType(cr) - case cr: ThisType => This(cr.cls) - // TODO: Will crash if the type is an annotated type, for example `cap?` + case c: TermRef => ref(c) + case c: TermParamRef => untpd.Ident(c.paramName).withType(c) + case c: ThisType => This(c.cls) + case c: RootCapability => ref(defn.captureRoot) + // TODO: Will crash if the type is an annotated type, for example `cap.rd` } val arg = repeated(elems, TypeTree(defn.AnyType)) New(symbol.typeRef, arg :: Nil) @@ -62,10 +64,12 @@ case class CaptureAnnotation(refs: CaptureSet, boxed: Boolean)(cls: Symbol) exte override def mapWith(tm: TypeMap)(using Context) = val elems = refs.elems.toList - val elems1 = elems.mapConserve(tm) + val elems1 = elems.mapConserve(tm.mapCapability(_)) if elems1 eq elems then this - else if elems1.forall(_.isTrackableRef) - then derivedAnnotation(CaptureSet(elems1.asInstanceOf[List[CaptureRef]]*), boxed) + else if elems1.forall: + case elem1: Capability => elem1.isWellformed + case _ => false + then derivedAnnotation(CaptureSet(elems1.asInstanceOf[List[Capability]]*), boxed) else EmptyAnnotation override def refersToParamOf(tl: TermLambda)(using Context): Boolean = diff --git a/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala b/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala index 92cd40a65d5a..3dd847f19b56 100644 --- a/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala +++ b/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala @@ -4,51 +4,20 @@ package cc import core.* import Types.*, Symbols.*, Contexts.*, Annotations.*, Flags.* -import Names.TermName +import Names.{Name, TermName} import ast.{tpd, untpd} import Decorators.*, NameOps.* -import config.SourceVersion import config.Printers.capt import util.Property.Key import tpd.* +import Annotations.Annotation +import CaptureSet.VarState +import Capabilities.* import StdNames.nme -import config.Feature -import collection.mutable -import CCState.* -import reporting.Message +/** Attachment key for capturing type trees */ private val Captures: Key[CaptureSet] = Key() -object ccConfig: - - /** If true, allow mapping capture set variables under captureChecking with maps that are neither - * bijective nor idempotent. We currently do now know how to do this correctly in all - * cases, though. - */ - inline val allowUnsoundMaps = false - - /** If enabled, use a special path in recheckClosure for closures - * that are eta expansions. This can improve some error messages. - */ - inline val handleEtaExpansionsSpecially = true - - /** Don't require @use for reach capabilities that are accessed - * only in a nested closure. This is unsound without additional - * mitigation measures, as shown by unsound-reach-5.scala. - */ - inline val deferredReaches = false - - /** If true, use "sealed" as encapsulation mechanism, meaning that we - * check that type variable instantiations don't have `cap` in any of - * their capture sets. This is an alternative of the original restriction - * that `cap` can't be boxed or unboxed. It is dropped in 3.5 but used - * again in 3.6. - */ - def useSealed(using Context) = - Feature.sourceVersion.stable != SourceVersion.`3.5` - -end ccConfig - /** Are we at checkCaptures phase? */ def isCaptureChecking(using Context): Boolean = ctx.phaseId == Phases.checkCapturesPhase.id @@ -69,77 +38,30 @@ def depFun(args: List[Type], resultType: Type, isContextual: Boolean, paramNames else make(args, resultType) mt.toFunctionType(alwaysDependent = true) -/** An exception thrown if a @retains argument is not syntactically a CaptureRef */ +/** An exception thrown if a @retains argument is not syntactically a Capability */ class IllegalCaptureRef(tpe: Type)(using Context) extends Exception(tpe.show) -/** Capture checking state, which is known to other capture checking components */ -class CCState: - - /** The last pair of capture reference and capture set where - * the reference could not be added to the set due to a level conflict. - */ - var levelError: Option[CaptureSet.CompareResult.LevelError] = None - - /** Warnings relating to upper approximations of capture sets with - * existentially bound variables. - */ - val approxWarnings: mutable.ListBuffer[Message] = mutable.ListBuffer() - - private var curLevel: Level = outermostLevel - private val symLevel: mutable.Map[Symbol, Int] = mutable.Map() - -object CCState: - - opaque type Level = Int - - val undefinedLevel: Level = -1 - - val outermostLevel: Level = 0 - - /** The level of the current environment. Levels start at 0 and increase for - * each nested function or class. -1 means the level is undefined. - */ - def currentLevel(using Context): Level = ccState.curLevel - - inline def inNestedLevel[T](inline op: T)(using Context): T = - val ccs = ccState - val saved = ccs.curLevel - ccs.curLevel = ccs.curLevel.nextInner - try op finally ccs.curLevel = saved - - inline def inNestedLevelUnless[T](inline p: Boolean)(inline op: T)(using Context): T = - val ccs = ccState - val saved = ccs.curLevel - if !p then ccs.curLevel = ccs.curLevel.nextInner - try op finally ccs.curLevel = saved - - extension (x: Level) - def isDefined: Boolean = x >= 0 - def <= (y: Level) = (x: Int) <= y - def nextInner: Level = if isDefined then x + 1 else x - - extension (sym: Symbol)(using Context) - def ccLevel: Level = ccState.symLevel.getOrElse(sym, -1) - def recordLevel() = ccState.symLevel(sym) = currentLevel -end CCState - /** The currently valid CCState */ -def ccState(using Context) = +def ccState(using Context): CCState = Phases.checkCapturesPhase.asInstanceOf[CheckCaptures].ccState1 extension (tree: Tree) - /** Map tree with CaptureRef type to its type, + /** Map tree with a Capability type to the corresponding capability, * map CapSet^{refs} to the `refs` references, * throw IllegalCaptureRef otherwise */ - def toCaptureRefs(using Context): List[CaptureRef] = tree match + def toCapabilities(using Context): List[Capability] = tree match case ReachCapabilityApply(arg) => - arg.toCaptureRefs.map(_.reach) + arg.toCapabilities.map(_.reach) + case ReadOnlyCapabilityApply(arg) => + arg.toCapabilities.map(_.readOnly) case CapsOfApply(arg) => - arg.toCaptureRefs + arg.toCapabilities case _ => tree.tpe.dealiasKeepAnnots match - case ref: CaptureRef if ref.isTrackableRef => + case ref: TermRef if ref.isCapRef => + GlobalCap :: Nil + case ref: Capability if ref.isTrackableRef => ref :: Nil case AnnotatedType(parent, ann) if ann.symbol.isRetains && parent.derivesFrom(defn.Caps_CapSet) => @@ -154,7 +76,7 @@ extension (tree: Tree) tree.getAttachment(Captures) match case Some(refs) => refs case None => - val refs = CaptureSet(tree.retainedElems.flatMap(_.toCaptureRefs)*) + val refs = CaptureSet(tree.retainedElems.flatMap(_.toCapabilities)*) //.showing(i"toCaptureSet $tree --> $result", capt) tree.putAttachment(Captures, refs) refs @@ -165,12 +87,12 @@ extension (tree: Tree) elems case _ => if tree.symbol.maybeOwner == defn.RetainsCapAnnot - then ref(defn.captureRoot.termRef) :: Nil + then ref(defn.captureRoot) :: Nil else Nil extension (tp: Type) - /** Is this type a CaptureRef that can be tracked? + /** Is this type a Capability that can be tracked? * This is true for * - all ThisTypes and all TermParamRef, * - stable TermRefs with NoPrefix or ThisTypes as prefixes, @@ -179,35 +101,32 @@ extension (tp: Type) * - annotated types that represent reach or maybe capabilities */ final def isTrackableRef(using Context): Boolean = tp match - case _: (ThisType | TermParamRef) => - true + case _: (ThisType | TermParamRef) => true case tp: TermRef => - ((tp.prefix eq NoPrefix) - || tp.symbol.isField && !tp.symbol.isStatic && tp.prefix.isTrackableRef - || tp.isRootCapability - ) && !tp.symbol.isOneOf(UnstableValueFlags) + !tp.underlying.exists // might happen during construction of lambdas with annotations on parameters + || + ((tp.prefix eq NoPrefix) + || tp.symbol.isField && !tp.symbol.isStatic && tp.prefix.isTrackableRef + ) && !tp.symbol.isOneOf(UnstableValueFlags) case tp: TypeRef => tp.symbol.isType && tp.derivesFrom(defn.Caps_CapSet) case tp: TypeParamRef => - tp.derivesFrom(defn.Caps_CapSet) - case AnnotatedType(parent, annot) => - (annot.symbol == defn.ReachCapabilityAnnot - || annot.symbol == defn.MaybeCapabilityAnnot - ) && parent.isTrackableRef + !tp.underlying.exists // might happen during construction of lambdas + || tp.derivesFrom(defn.Caps_CapSet) case _ => false /** The capture set of a type. This is: - * - For trackable capture references: The singleton capture set consisting of + * - For object capabilities: The singleton capture set consisting of * just the reference, provided the underlying capture set of their info is not empty. - * - For other capture references: The capture set of their info + * - For other capabilities: The capture set of their info * - For all other types: The result of CaptureSet.ofType */ final def captureSet(using Context): CaptureSet = tp match - case tp: CaptureRef if tp.isTrackableRef => + case tp: CoreCapability if tp.isTrackableRef => val cs = tp.captureSetOfInfo if cs.isAlwaysEmpty then cs else tp.singletonCaptureSet - case tp: SingletonCaptureRef => tp.captureSetOfInfo + case tp: ObjectCapability => tp.captureSetOfInfo case _ => CaptureSet.ofType(tp, followResult = false) /** The deep capture set of a type. This is by default the union of all @@ -220,18 +139,14 @@ extension (tp: Type) val dcs = CaptureSet.ofTypeDeeply(tp.widen.stripCapturing, includeTypevars) if dcs.isAlwaysEmpty then tp.captureSet else tp match - case tp @ ReachCapability(_) => - tp.singletonCaptureSet - case tp: SingletonCaptureRef if tp.isTrackableRef => - tp.reach.singletonCaptureSet - case _ => - tp.captureSet ++ dcs + case tp: ObjectCapability if tp.isTrackableRef => tp.reach.singletonCaptureSet + case _ => tp.captureSet ++ dcs def deepCaptureSet(using Context): CaptureSet = deepCaptureSet(includeTypevars = false) /** A type capturing `ref` */ - def capturing(ref: CaptureRef)(using Context): Type = + def capturing(ref: Capability)(using Context): Type = if tp.captureSet.accountsFor(ref) then tp else CapturingType(tp, ref.singletonCaptureSet) @@ -239,7 +154,7 @@ extension (tp: Type) * the two capture sets are combined. */ def capturing(cs: CaptureSet)(using Context): Type = - if (cs.isAlwaysEmpty || cs.isConst && cs.subCaptures(tp.captureSet, frozen = true).isOK) + if (cs.isAlwaysEmpty || cs.isConst && cs.subCaptures(tp.captureSet, VarState.Separate)) && !cs.keepAlways then tp else tp match @@ -259,8 +174,7 @@ extension (tp: Type) def boxed(using Context): Type = tp.dealias match case tp @ CapturingType(parent, refs) if !tp.isBoxed && !refs.isAlwaysEmpty => tp.annot match - case ann: CaptureAnnotation => - assert(!parent.derivesFrom(defn.Caps_CapSet)) + case ann: CaptureAnnotation if !parent.derivesFrom(defn.Caps_CapSet) => AnnotatedType(parent, ann.boxedAnnot) case ann => tp case tp: RealTypeBounds => @@ -268,29 +182,6 @@ extension (tp: Type) case _ => tp - /** The first element of this path type */ - final def pathRoot(using Context): Type = tp.dealias match - case tp1: NamedType if tp1.symbol.owner.isClass => tp1.prefix.pathRoot - case tp1 => tp1 - - /** If this part starts with `C.this`, the class `C`. - * Otherwise, if it starts with a reference `r`, `r`'s owner. - * Otherwise NoSymbol. - */ - final def pathOwner(using Context): Symbol = pathRoot match - case tp1: NamedType => tp1.symbol.owner - case tp1: ThisType => tp1.cls - case _ => NoSymbol - - final def isParamPath(using Context): Boolean = tp.dealias match - case tp1: NamedType => - tp1.prefix match - case _: ThisType | NoPrefix => - tp1.symbol.is(Param) || tp1.symbol.is(ParamAccessor) - case prefix => prefix.isParamPath - case _: ParamRef => true - case _ => false - /** If this is a unboxed capturing type with nonempty capture set, its boxed version. * Or, if type is a TypeBounds of capturing types, the version where the bounds are boxed. * The identity for all other types. @@ -306,18 +197,28 @@ extension (tp: Type) /** The capture set consisting of all top-level captures of `tp` that appear under a box. * Unlike for `boxed` this also considers parents of capture types, unions and * intersections, and type proxies other than abstract types. + * Furthermore, if the original type is a capability `x`, it replaces boxed universal sets + * on the fly with x*. */ def boxedCaptureSet(using Context): CaptureSet = - def getBoxed(tp: Type): CaptureSet = tp match + def getBoxed(tp: Type, pre: Type): CaptureSet = tp match case tp @ CapturingType(parent, refs) => - val pcs = getBoxed(parent) - if tp.isBoxed then refs ++ pcs else pcs + val pcs = getBoxed(parent, pre) + if !tp.isBoxed then + pcs + else pre match + case pre: ObjectCapability if refs.containsTerminalCapability => + val reachRef = if refs.isReadOnly then pre.reach.readOnly else pre.reach + pcs ++ reachRef.singletonCaptureSet + case _ => + pcs ++ refs + case ref: Capability if ref.isTracked && !pre.exists => getBoxed(ref, ref) case tp: TypeRef if tp.symbol.isAbstractOrParamType => CaptureSet.empty - case tp: TypeProxy => getBoxed(tp.superType) - case tp: AndType => getBoxed(tp.tp1) ** getBoxed(tp.tp2) - case tp: OrType => getBoxed(tp.tp1) ++ getBoxed(tp.tp2) + case tp: TypeProxy => getBoxed(tp.superType, pre) + case tp: AndType => getBoxed(tp.tp1, pre) ** getBoxed(tp.tp2, pre) + case tp: OrType => getBoxed(tp.tp1, pre) ++ getBoxed(tp.tp2, pre) case _ => CaptureSet.empty - getBoxed(tp) + getBoxed(tp, NoType) /** Is the boxedCaptureSet of this type nonempty? */ def isBoxedCapturing(using Context): Boolean = @@ -345,7 +246,8 @@ extension (tp: Type) def forceBoxStatus(boxed: Boolean)(using Context): Type = tp.widenDealias match case tp @ CapturingType(parent, refs) if tp.isBoxed != boxed => val refs1 = tp match - case ref: CaptureRef if ref.isTracked || ref.isReach => ref.singletonCaptureSet + case ref: Capability if ref.isTracked || ref.isReach || ref.isReadOnly => + ref.singletonCaptureSet case _ => refs CapturingType(parent, refs1, boxed) case _ => @@ -379,23 +281,59 @@ extension (tp: Type) case _ => false - /** Tests whether the type derives from `caps.Capability`, which means - * references of this type are maximal capabilities. + /** Is this a type extending `Mutable` that has update methods? */ + def isMutableType(using Context): Boolean = + tp.derivesFrom(defn.Caps_Mutable) + && tp.membersBasedOnFlags(Mutable | Method, EmptyFlags) + .exists(_.hasAltWith(_.symbol.isUpdateMethod)) + + /** Is this a reference to caps.cap? Note this is _not_ the GlobalCap capability. */ + def isCapRef(using Context): Boolean = tp match + case tp: TermRef => tp.name == nme.CAPTURE_ROOT && tp.symbol == defn.captureRoot + case _ => false + + /** Knowing that `tp` is a function type, is it an alias to a function other + * than `=>`? */ - def derivesFromCapability(using Context): Boolean = tp.dealias match + def isAliasFun(using Context): Boolean = tp match + case AppliedType(tycon, _) => !defn.isFunctionSymbol(tycon.typeSymbol) + case _ => false + + /** Tests whether all CapturingType parts of the type that are traversed for + * dcs computation satisfy at least one of two conditions: + * 1. They decorate classes that extend the given capability class `cls`, or + * 2. Their capture set is constant and consists only of capabilities + * the derive from `cls` in the sense of `derivesFromCapTrait`. + */ + def derivesFromCapTraitDeeply(cls: ClassSymbol)(using Context): Boolean = + val accumulate = new DeepTypeAccumulator[Boolean]: + def capturingCase(acc: Boolean, parent: Type, refs: CaptureSet) = + this(acc, parent) + && (parent.derivesFromCapTrait(cls) + || refs.isConst && refs.elems.forall(_.derivesFromCapTrait(cls))) + def abstractTypeCase(acc: Boolean, t: TypeRef, upperBound: Type) = + this(acc, upperBound) + accumulate(true, tp) + + /** Tests whether the type derives from capability class `cls`. */ + def derivesFromCapTrait(cls: ClassSymbol)(using Context): Boolean = tp.dealiasKeepAnnots match case tp: (TypeRef | AppliedType) => val sym = tp.typeSymbol - if sym.isClass then sym.derivesFrom(defn.Caps_Capability) - else tp.superType.derivesFromCapability + if sym.isClass then sym.derivesFrom(cls) + else tp.superType.derivesFromCapTrait(cls) case tp: (TypeProxy & ValueType) => - tp.superType.derivesFromCapability + tp.superType.derivesFromCapTrait(cls) case tp: AndType => - tp.tp1.derivesFromCapability || tp.tp2.derivesFromCapability + tp.tp1.derivesFromCapTrait(cls) || tp.tp2.derivesFromCapTrait(cls) case tp: OrType => - tp.tp1.derivesFromCapability && tp.tp2.derivesFromCapability + tp.tp1.derivesFromCapTrait(cls) && tp.tp2.derivesFromCapTrait(cls) case _ => false + def derivesFromCapability(using Context): Boolean = derivesFromCapTrait(defn.Caps_Capability) + def derivesFromMutable(using Context): Boolean = derivesFromCapTrait(defn.Caps_Mutable) + def derivesFromSharedCapability(using Context): Boolean = derivesFromCapTrait(defn.Caps_SharedCapability) + /** Drop @retains annotations everywhere */ def dropAllRetains(using Context): Type = // TODO we should drop retains from inferred types before unpickling val tm = new TypeMap: @@ -406,112 +344,61 @@ extension (tp: Type) mapOver(t) tm(tp) - /** If `x` is a capture ref, its reach capability `x*`, represented internally - * as `x @reachCapability`. `x*` stands for all capabilities reachable through `x`". - * We have `{x} <: {x*} <: dcs(x)}` where the deep capture set `dcs(x)` of `x` - * is the union of all capture sets that appear in covariant position in the - * type of `x`. If `x` and `y` are different variables then `{x*}` and `{y*}` - * are unrelated. + /** If `x` is a capability, replace all no-flip covariant occurrences of `cap` + * in type `tp` with `x*`. */ - def reach(using Context): CaptureRef = tp match - case tp: CaptureRef if tp.isTrackableRef => - if tp.isReach then tp else ReachCapability(tp) - - /** If `x` is a capture ref, its maybe capability `x?`, represented internally - * as `x @maybeCapability`. `x?` stands for a capability `x` that might or might - * not be part of a capture set. We have `{} <: {x?} <: {x}`. Maybe capabilities - * cannot be propagated between sets. If `a <: b` and `a` acquires `x?` then - * `x` is propagated to `b` as a conservative approximation. - * - * Maybe capabilities should only arise for capture sets that appear in invariant - * position in their surrounding type. They are similar to TypeBunds types, but - * restricted to capture sets. For instance, - * - * Array[C^{x?}] - * - * should be morally equivalent to - * - * Array[_ >: C^{} <: C^{x}] - * - * but it has fewer issues with type inference. - */ - def maybe(using Context): CaptureRef = tp match - case tp: CaptureRef if tp.isTrackableRef => - if tp.isMaybe then tp else MaybeCapability(tp) - - /** If `ref` is a trackable capture ref, and `tp` has only covariant occurrences of a - * universal capture set, replace all these occurrences by `{ref*}`. This implements - * the new aspect of the (Var) rule, which can now be stated as follows: - * - * x: T in E - * ----------- - * E |- x: T' - * - * where T' is T with (1) the toplevel capture set replaced by `{x}` and - * (2) all covariant occurrences of cap replaced by `x*`, provided there - * are no occurrences in `T` at other variances. (1) is standard, whereas - * (2) is new. - * - * For (2), multiple-flipped covariant occurrences of cap won't be replaced. - * In other words, - * - * - For xs: List[File^] ==> List[File^{xs*}], the cap is replaced; - * - while f: [R] -> (op: File^ => R) -> R remains unchanged. - * - * Without this restriction, the signature of functions like withFile: - * - * (path: String) -> [R] -> (op: File^ => R) -> R - * - * could be refined to - * - * (path: String) -> [R] -> (op: File^{withFile*} => R) -> R - * - * which is clearly unsound. - * - * Why is this sound? Covariant occurrences of cap must represent capabilities - * that are reachable from `x`, so they are included in the meaning of `{x*}`. - * At the same time, encapsulation is still maintained since no covariant - * occurrences of cap are allowed in instance types of type variables. - */ - def withReachCaptures(ref: Type)(using Context): Type = - object narrowCaps extends TypeMap: - var change = false - def apply(t: Type) = - if variance <= 0 then t - else t.dealiasKeepAnnots match - case t @ CapturingType(p, cs) if cs.isUniversal => - change = true - t.derivedCapturingType(apply(p), ref.reach.singletonCaptureSet) + def withReachCaptures(ref: Type)(using Context): Type = ref match + case ref: ObjectCapability if ref.isTrackableRef => + object narrowCaps extends TypeMap: + var change = false + def apply(t: Type) = + if variance <= 0 then t + else t.dealias match + case t @ CapturingType(p, cs) if cs.containsCapOrFresh => + change = true + val reachRef = if cs.isReadOnly then ref.reach.readOnly else ref.reach + t.derivedCapturingType(apply(p), reachRef.singletonCaptureSet) + case t @ AnnotatedType(parent, ann) => + // Don't map annotations, which includes capture sets + t.derivedAnnotatedType(this(parent), ann) + case t @ FunctionOrMethod(args, res) => + t.derivedFunctionOrMethod(args, apply(res)) + case _ => + mapOver(t) + end narrowCaps + val tp1 = narrowCaps(tp) + if narrowCaps.change then + capt.println(i"narrow $tp of $ref to $tp1") + tp1 + else + tp + case _ => + tp + end withReachCaptures + + /** Does this type contain no-flip covariant occurrences of `cap`? */ + def containsCap(using Context): Boolean = + val acc = new TypeAccumulator[Boolean]: + def apply(x: Boolean, t: Type) = + x + || variance > 0 && t.dealiasKeepAnnots.match + case t @ CapturingType(p, cs) if cs.containsCap => + true case t @ AnnotatedType(parent, ann) => - // Don't map annotations, which includes capture sets - t.derivedAnnotatedType(this(parent), ann) - case t @ FunctionOrMethod(args, res @ Existential(_, _)) - if args.forall(_.isAlwaysPure) => - // Also map existentials in results to reach capabilities if all - // preceding arguments are known to be always pure - apply(t.derivedFunctionOrMethod(args, Existential.toCap(res))) - case Existential(_, _) => - t + // Don't traverse annotations, which includes capture sets + this(x, parent) case _ => - mapOver(t) - end narrowCaps - - ref match - case ref: CaptureRef if ref.isTrackableRef => - val tp1 = narrowCaps(tp) - if narrowCaps.change then - capt.println(i"narrow $tp of $ref to $tp1") - tp1 - else - tp - case _ => - tp + foldOver(x, t) + acc(false, tp) - def level(using Context): Level = - tp match - case tp: TermRef => tp.symbol.ccLevel - case tp: ThisType => tp.cls.ccLevel.nextInner - case _ => undefinedLevel + def refinedOverride(name: Name, rinfo: Type)(using Context): Type = + RefinedType(tp, name, + AnnotatedType(rinfo, Annotation(defn.RefineOverrideAnnot, util.Spans.NoSpan))) + +extension (tp: MethodType) + /** A method marks an existential scope unless it is the prefix of a curried method */ + def marksExistentialScope(using Context): Boolean = + !tp.resType.isInstanceOf[MethodOrPoly] extension (cls: ClassSymbol) @@ -615,6 +502,16 @@ extension (sym: Symbol) case c: TypeRef => c.symbol == sym case _ => false + def isUpdateMethod(using Context): Boolean = + sym.isAllOf(Mutable | Method, butNot = Accessor) + + def isReadOnlyMethod(using Context): Boolean = + sym.is(Method, butNot = Mutable | Accessor) && sym.owner.derivesFrom(defn.Caps_Mutable) + + def isInReadOnlyMethod(using Context): Boolean = + if sym.is(Method) && sym.owner.isClass then isReadOnlyMethod + else sym.owner.isInReadOnlyMethod + extension (tp: AnnotatedType) /** Is this a boxed capturing type? */ def isBoxed(using Context): Boolean = tp.annot match @@ -629,19 +526,6 @@ class CleanupRetains(using Context) extends TypeMap: RetainingType(tp, Nil, byName = annot.symbol == defn.RetainsByNameAnnot) case _ => mapOver(tp) -/** A typemap that follows aliases and keeps their transformed results if - * there is a change. - */ -trait FollowAliasesMap(using Context) extends TypeMap: - var follow = true // Used for debugging so that we can compare results with and w/o following. - def mapFollowingAliases(t: Type): Type = - val t1 = t.dealiasKeepAnnots - if follow && (t1 ne t) then - val t2 = apply(t1) - if t2 ne t1 then t2 - else t - else mapOver(t) - /** An extractor for `caps.reachCapability(ref)`, which is used to express a reach * capability as a tree in a @retains annotation. */ @@ -650,6 +534,14 @@ object ReachCapabilityApply: case Apply(reach, arg :: Nil) if reach.symbol == defn.Caps_reachCapability => Some(arg) case _ => None +/** An extractor for `caps.readOnlyCapability(ref)`, which is used to express a read-only + * capability as a tree in a @retains annotation. + */ +object ReadOnlyCapabilityApply: + def unapply(tree: Apply)(using Context): Option[Tree] = tree match + case Apply(ro, arg :: Nil) if ro.symbol == defn.Caps_readOnlyCapability => Some(arg) + case _ => None + /** An extractor for `caps.capsOf[X]`, which is used to express a generic capture set * as a tree in a @retains annotation. */ @@ -658,50 +550,6 @@ object CapsOfApply: case TypeApply(capsOf, arg :: Nil) if capsOf.symbol == defn.Caps_capsOf => Some(arg) case _ => None -class AnnotatedCapability(annot: Context ?=> ClassSymbol): - def apply(tp: Type)(using Context) = - AnnotatedType(tp, Annotation(annot, util.Spans.NoSpan)) - def unapply(tree: AnnotatedType)(using Context): Option[CaptureRef] = tree match - case AnnotatedType(parent: CaptureRef, ann) if ann.symbol == annot => Some(parent) - case _ => None - -/** An extractor for `ref @annotation.internal.reachCapability`, which is used to express - * the reach capability `ref*` as a type. - */ -object ReachCapability extends AnnotatedCapability(defn.ReachCapabilityAnnot) - -/** An extractor for `ref @maybeCapability`, which is used to express - * the maybe capability `ref?` as a type. - */ -object MaybeCapability extends AnnotatedCapability(defn.MaybeCapabilityAnnot) - -/** Offers utility method to be used for type maps that follow aliases */ -trait ConservativeFollowAliasMap(using Context) extends TypeMap: - - /** If `mapped` is a type alias, apply the map to the alias, while keeping - * annotations. If the result is different, return it, otherwise return `mapped`. - * Furthermore, if `original` is a LazyRef or TypeVar and the mapped result is - * the same as the underlying type, keep `original`. This avoids spurious differences - * which would lead to spurious dealiasing in the result - */ - protected def applyToAlias(original: Type, mapped: Type) = - val mapped1 = mapped match - case t: (TypeRef | AppliedType) => - val t1 = t.dealiasKeepAnnots - if t1 eq t then t - else - // If we see a type alias, map the alias type and keep it if it's different - val t2 = apply(t1) - if t2 ne t1 then t2 else t - case _ => - mapped - original match - case original: (LazyRef | TypeVar) if mapped1 eq original.underlying => - original - case _ => - mapped1 -end ConservativeFollowAliasMap - /** An extractor for all kinds of function types as well as method and poly types. * It includes aliases of function types such as `=>`. TODO: Can we do without? * @return 1st half: The argument types or empty if this is a type function @@ -749,9 +597,41 @@ object ContainsImpl: /** An extractor for a contains parameter */ object ContainsParam: - def unapply(sym: Symbol)(using Context): Option[(TypeRef, CaptureRef)] = + def unapply(sym: Symbol)(using Context): Option[(TypeRef, Capability)] = sym.info.dealias match - case AppliedType(tycon, (cs: TypeRef) :: (ref: CaptureRef) :: Nil) + case AppliedType(tycon, (cs: TypeRef) :: (ref: Capability) :: Nil) if tycon.typeSymbol == defn.Caps_ContainsTrait && cs.typeSymbol.isAbstractOrParamType => Some((cs, ref)) case _ => None + +/** A class encapsulating the assumulator logic needed for `CaptureSet.ofTypeDeeply` + * and `derivesFromCapTraitDeeply`. + * NOTE: The traversal logic needs to be in sync with narrowCaps in CaptureOps, which + * replaces caps with reach capabilties. There are two exceptions, however. + * - First, invariant arguments. These have to be included to be conservative + * in dcs but must be excluded in narrowCaps. + * - Second, unconstrained type variables are handled specially in `ofTypeDeeply`. + */ +abstract class DeepTypeAccumulator[T](using Context) extends TypeAccumulator[T]: + val seen = util.HashSet[Symbol]() + + protected def capturingCase(acc: T, parent: Type, refs: CaptureSet): T + + protected def abstractTypeCase(acc: T, t: TypeRef, upperBound: Type): T + + def apply(acc: T, t: Type) = + if variance < 0 then acc + else t.dealias match + case t @ CapturingType(parent, cs) => + capturingCase(acc, parent, cs) + case t: TypeRef if t.symbol.isAbstractOrParamType && !seen.contains(t.symbol) => + seen += t.symbol + abstractTypeCase(acc, t, t.info.bounds.hi) + case AnnotatedType(parent, _) => + this(acc, parent) + case t @ FunctionOrMethod(args, res) => + this(acc, res) + case _ => + foldOver(acc, t) +end DeepTypeAccumulator + diff --git a/compiler/src/dotty/tools/dotc/cc/CaptureRef.scala b/compiler/src/dotty/tools/dotc/cc/CaptureRef.scala deleted file mode 100644 index 2caba4cf7d89..000000000000 --- a/compiler/src/dotty/tools/dotc/cc/CaptureRef.scala +++ /dev/null @@ -1,189 +0,0 @@ -package dotty.tools -package dotc -package cc - -import core.* -import Types.*, Symbols.*, Contexts.*, Decorators.* -import util.{SimpleIdentitySet, Property} -import typer.ErrorReporting.Addenda -import TypeComparer.subsumesExistentially -import util.common.alwaysTrue -import scala.collection.mutable -import CCState.* -import Periods.NoRunId -import compiletime.uninitialized -import StdNames.nme - -/** A trait for references in CaptureSets. These can be NamedTypes, ThisTypes or ParamRefs, - * as well as two kinds of AnnotatedTypes representing reach and maybe capabilities. - */ -trait CaptureRef extends TypeProxy, ValueType: - private var myCaptureSet: CaptureSet | Null = uninitialized - private var myCaptureSetRunId: Int = NoRunId - private var mySingletonCaptureSet: CaptureSet.Const | Null = null - - /** Is the reference tracked? This is true if it can be tracked and the capture - * set of the underlying type is not always empty. - */ - final def isTracked(using Context): Boolean = - this.isTrackableRef && (isMaxCapability || !captureSetOfInfo.isAlwaysEmpty) - - /** Is this a reach reference of the form `x*`? */ - final def isReach(using Context): Boolean = this match - case AnnotatedType(_, annot) => annot.symbol == defn.ReachCapabilityAnnot - case _ => false - - /** Is this a maybe reference of the form `x?`? */ - final def isMaybe(using Context): Boolean = this match - case AnnotatedType(_, annot) => annot.symbol == defn.MaybeCapabilityAnnot - case _ => false - - final def stripReach(using Context): CaptureRef = - if isReach then - val AnnotatedType(parent: CaptureRef, _) = this: @unchecked - parent - else this - - final def stripMaybe(using Context): CaptureRef = - if isMaybe then - val AnnotatedType(parent: CaptureRef, _) = this: @unchecked - parent - else this - - /** Is this reference the generic root capability `cap` ? */ - final def isRootCapability(using Context): Boolean = this match - case tp: TermRef => tp.name == nme.CAPTURE_ROOT && tp.symbol == defn.captureRoot - case _ => false - - /** Is this reference capability that does not derive from another capability ? */ - final def isMaxCapability(using Context): Boolean = this match - case tp: TermRef => tp.isRootCapability || tp.info.derivesFrom(defn.Caps_Exists) - case tp: TermParamRef => tp.underlying.derivesFrom(defn.Caps_Exists) - case _ => false - - // With the support of pathes, we don't need to normalize the `TermRef`s anymore. - // /** Normalize reference so that it can be compared with `eq` for equality */ - // final def normalizedRef(using Context): CaptureRef = this match - // case tp @ AnnotatedType(parent: CaptureRef, annot) if tp.isTrackableRef => - // tp.derivedAnnotatedType(parent.normalizedRef, annot) - // case tp: TermRef if tp.isTrackableRef => - // tp.symbol.termRef - // case _ => this - - /** The capture set consisting of exactly this reference */ - final def singletonCaptureSet(using Context): CaptureSet.Const = - if mySingletonCaptureSet == null then - mySingletonCaptureSet = CaptureSet(this) - mySingletonCaptureSet.uncheckedNN - - /** The capture set of the type underlying this reference */ - final def captureSetOfInfo(using Context): CaptureSet = - if ctx.runId == myCaptureSetRunId then myCaptureSet.nn - else if myCaptureSet.asInstanceOf[AnyRef] eq CaptureSet.Pending then CaptureSet.empty - else - myCaptureSet = CaptureSet.Pending - val computed = CaptureSet.ofInfo(this) - if !isCaptureChecking || ctx.mode.is(Mode.IgnoreCaptures) || underlying.isProvisional then - myCaptureSet = null - else - myCaptureSet = computed - myCaptureSetRunId = ctx.runId - computed - - final def invalidateCaches() = - myCaptureSetRunId = NoRunId - - /** x subsumes x - * x =:= y ==> x subsumes y - * x subsumes y ==> x subsumes y.f - * x subsumes y ==> x* subsumes y, x subsumes y? - * x subsumes y ==> x* subsumes y*, x? subsumes y? - * x: x1.type /\ x1 subsumes y ==> x subsumes y - * X = CapSet^cx, exists rx in cx, rx subsumes y ==> X subsumes y - * Y = CapSet^cy, forall ry in cy, x subsumes ry ==> x subsumes Y - * X: CapSet^c1...CapSet^c2, (CapSet^c1) subsumes y ==> X subsumes y - * Y: CapSet^c1...CapSet^c2, x subsumes (CapSet^c2) ==> x subsumes Y - * Contains[X, y] ==> X subsumes y - * - * TODO: Document cases with more comments. - */ - final def subsumes(y: CaptureRef)(using Context): Boolean = - def subsumingRefs(x: Type, y: Type): Boolean = x match - case x: CaptureRef => y match - case y: CaptureRef => x.subsumes(y) - case _ => false - case _ => false - - def viaInfo(info: Type)(test: Type => Boolean): Boolean = info.match - case info: SingletonCaptureRef => test(info) - case info: AndType => viaInfo(info.tp1)(test) || viaInfo(info.tp2)(test) - case info: OrType => viaInfo(info.tp1)(test) && viaInfo(info.tp2)(test) - case info @ CapturingType(_,_) if this.derivesFrom(defn.Caps_CapSet) => - /* - If `this` is a capture set variable `C^`, then it is possible that it can be - reached from term variables in a reachability chain through the context. - For instance, in `def test[C^](src: Foo^{C^}) = { val x: Foo^{src} = src; val y: Foo^{x} = x; y }` - we expect that `C^` subsumes `x` and `y` in the body of the method - (cf. test case cc-poly-varargs.scala for a more involved example). - */ - test(info) - case _ => false - - (this eq y) - || this.isRootCapability - || y.match - case y: TermRef if !y.isRootCapability => - y.prefix.match - case ypre: CaptureRef => - this.subsumes(ypre) - || this.match - case x @ TermRef(xpre: CaptureRef, _) if x.symbol == y.symbol => - // To show `{x.f} <:< {y.f}`, it is important to prove `x` and `y` - // are equvalent, which means `x =:= y` in terms of subtyping, - // not just `{x} =:= {y}` in terms of subcapturing. - // It is possible to construct two singleton types `x` and `y`, - // which subsume each other, but are not equal references. - // See `tests/neg-custom-args/captures/path-prefix.scala` for example. - withMode(Mode.IgnoreCaptures) {TypeComparer.isSameRef(xpre, ypre)} - case _ => - false - case _ => false - || viaInfo(y.info)(subsumingRefs(this, _)) - case MaybeCapability(y1) => this.stripMaybe.subsumes(y1) - case y: TypeRef if y.derivesFrom(defn.Caps_CapSet) => - // The upper and lower bounds don't have to be in the form of `CapSet^{...}`. - // They can be other capture set variables, which are bounded by `CapSet`, - // like `def test[X^, Y^, Z >: X <: Y]`. - y.info match - case TypeBounds(_, hi: CaptureRef) => this.subsumes(hi) - case _ => y.captureSetOfInfo.elems.forall(this.subsumes) - case CapturingType(parent, refs) if parent.derivesFrom(defn.Caps_CapSet) || this.derivesFrom(defn.Caps_CapSet) => - /* The second condition in the guard is for `this` being a `CapSet^{a,b...}` and etablishing a - potential reachability chain through `y`'s capture to a binding with - `this`'s capture set (cf. `CapturingType` case in `def viaInfo` above for more context). - */ - refs.elems.forall(this.subsumes) - case _ => false - || this.match - case ReachCapability(x1) => x1.subsumes(y.stripReach) - case x: TermRef => viaInfo(x.info)(subsumingRefs(_, y)) - case x: TermParamRef => subsumesExistentially(x, y) - case x: TypeRef if assumedContainsOf(x).contains(y) => true - case x: TypeRef if x.derivesFrom(defn.Caps_CapSet) => - x.info match - case TypeBounds(lo: CaptureRef, _) => - lo.subsumes(y) - case _ => - x.captureSetOfInfo.elems.exists(_.subsumes(y)) - case CapturingType(parent, refs) if parent.derivesFrom(defn.Caps_CapSet) => - refs.elems.exists(_.subsumes(y)) - case _ => false - end subsumes - - def assumedContainsOf(x: TypeRef)(using Context): SimpleIdentitySet[CaptureRef] = - CaptureSet.assumedContains.getOrElse(x, SimpleIdentitySet.empty) - -end CaptureRef - -trait SingletonCaptureRef extends SingletonType, CaptureRef - diff --git a/compiler/src/dotty/tools/dotc/cc/CaptureRunInfo.scala b/compiler/src/dotty/tools/dotc/cc/CaptureRunInfo.scala new file mode 100644 index 000000000000..06107992b592 --- /dev/null +++ b/compiler/src/dotty/tools/dotc/cc/CaptureRunInfo.scala @@ -0,0 +1,25 @@ +package dotty.tools.dotc +package cc + +import core.Contexts.{Context, ctx} +import config.Printers.capt + +trait CaptureRunInfo: + self: Run => + private var maxSize = 0 + private var maxPath: List[CaptureSet.DerivedVar] = Nil + + def recordPath(size: Int, path: => List[CaptureSet.DerivedVar]): Unit = + if size > maxSize then + maxSize = size + maxPath = path + + def printMaxPath()(using Context): Unit = + if maxSize > 0 then + println(s"max derived capture set path length: $maxSize") + println(s"max derived capture set path: ${maxPath.map(_.summarize).reverse}") + + protected def reset(): Unit = + maxSize = 0 + maxPath = Nil +end CaptureRunInfo diff --git a/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala b/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala index 39c41c369864..22236a3853ef 100644 --- a/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala +++ b/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala @@ -14,10 +14,13 @@ import printing.{Showable, Printer} import printing.Texts.* import util.{SimpleIdentitySet, Property} import typer.ErrorReporting.Addenda -import TypeComparer.subsumesExistentially import util.common.alwaysTrue import scala.collection.{mutable, immutable} +import TypeComparer.ErrorNote import CCState.* +import TypeOps.AvoidMap +import compiletime.uninitialized +import Capabilities.* /** A class for capture sets. Capture sets can be constants or variables. * Capture sets support inclusion constraints <:< where <:< is subcapturing. @@ -30,14 +33,14 @@ import CCState.* * That is, constraints can be of the forms * * cs1 <:< cs2 - * cs1 = ∪ {f(x) | x ∈ cs2} where f is a function from capture references to capture sets. - * cs1 = ∪ {x | x ∈ cs2, p(x)} where p is a predicate on capture references + * cs1 = ∪ {f(x) | x ∈ cs2} where f is a function from capabilities to capture sets. + * cs1 = ∪ {x | x ∈ cs2, p(x)} where p is a predicate on capabilities * cs1 = cs2 ∩ cs2 * * We call the resulting constraint system "monadic set constraints". * To support capture propagation across maps, mappings are supported only * if the mapped function is either a bijection or if it is idempotent - * on capture references (c.f. doc comment on `map` below). + * on capabilities (c.f. doc comment on `map` below). */ sealed abstract class CaptureSet extends Showable: import CaptureSet.* @@ -50,12 +53,15 @@ sealed abstract class CaptureSet extends Showable: /** Is this capture set constant (i.e. not an unsolved capture variable)? * Solved capture variables count as constant. */ - def isConst: Boolean + def isConst(using Context): Boolean /** Is this capture set always empty? For unsolved capture veriables, returns * always false. */ - def isAlwaysEmpty: Boolean + def isAlwaysEmpty(using Context): Boolean + + /** Is this set provisionally solved, so that another cc run might unfreeze it? */ + def isProvisionallySolved(using Context): Boolean /** An optional level limit, or undefinedLevel if none exists. All elements of the set * must be at levels equal or smaller than the level of the set, if it is defined. @@ -69,27 +75,64 @@ sealed abstract class CaptureSet extends Showable: /** Is this capture set definitely non-empty? */ final def isNotEmpty: Boolean = !elems.isEmpty + /** If this is a Var, its `id`, otherwise -1 */ + def maybeId: Int = -1 + /** Convert to Const. @pre: isConst */ - def asConst: Const = this match + def asConst(using Context): Const = this match case c: Const => c case v: Var => assert(v.isConst) Const(v.elems) /** Cast to variable. @pre: !isConst */ - def asVar: Var = + def asVar(using Context): Var = assert(!isConst) asInstanceOf[Var] + /** Convert to Const with current elements unconditionally */ + def toConst: Const = this match + case c: Const => c + case v: Var => Const(v.elems) + /** Does this capture set contain the root reference `cap` as element? */ final def isUniversal(using Context) = - elems.exists(_.isRootCapability) + elems.contains(GlobalCap) + + /** Does this capture set contain a root reference `cap` or `cap.rd` as element? */ + final def containsTerminalCapability(using Context) = + elems.exists(_.isTerminalCapability) + + /** Does this capture set contain a ResultCap element? */ + final def containsResultCapability(using Context) = + elems.exists(_.core.isInstanceOf[ResultCap]) + + /** Does this capture set contain a GlobalCap or FreshCap, and at the same time + * does not contain a ResultCap? + */ + final def containsCapOrFresh(using Context) = + !containsResultCapability + && elems.exists: elem => + elem.core match + case GlobalCap => true + case _: FreshCap => true + case _ => false + + final def containsCap(using Context) = + elems.exists(_.core eq GlobalCap) - final def isUnboxable(using Context) = - elems.exists(elem => elem.isRootCapability || Existential.isExistentialVar(elem)) + final def isReadOnly(using Context): Boolean = + elems.forall(_.isReadOnly) + + final def isExclusive(using Context): Boolean = + elems.exists(_.isExclusive) final def keepAlways: Boolean = this.isInstanceOf[EmptyWithProvenance] + def failWith(fail: TypeComparer.ErrorNote)(using Context): false = + TypeComparer.addErrorNote(fail) + false + /** Try to include an element in this capture set. * @param elem The element to be added * @param origin The set that originated the request, or `empty` if the request came from outside. @@ -109,14 +152,17 @@ sealed abstract class CaptureSet extends Showable: * element is not the root capability, try instead to include its underlying * capture set. */ - protected def tryInclude(elem: CaptureRef, origin: CaptureSet)(using Context, VarState): CompareResult = - if accountsFor(elem) then CompareResult.OK - else addNewElem(elem) + protected def tryInclude(elem: Capability, origin: CaptureSet)(using Context, VarState): Boolean = reporting.trace(i"try include $elem in $this # ${maybeId}"): + accountsFor(elem) || addNewElem(elem) /** Try to include all element in `refs` to this capture set. */ - protected final def tryInclude(newElems: Refs, origin: CaptureSet)(using Context, VarState): CompareResult = - (CompareResult.OK /: newElems): (r, elem) => - r.andAlso(tryInclude(elem, origin)) + protected final def tryInclude(newElems: Refs, origin: CaptureSet)(using Context, VarState): Boolean = + TypeComparer.inNestedLevel: + // Run in nested level so that a error notes for a failure here can be + // cancelled in case the whole comparison succeeds. + // We do this here because all nested tryInclude and subCaptures calls go + // through this method. + newElems.forall(tryInclude(_, origin)) /** Add an element to this capture set, assuming it is not already accounted for, * and omitting any mapping or filtering. @@ -125,48 +171,59 @@ sealed abstract class CaptureSet extends Showable: * element is not the root capability, try instead to include its underlying * capture set. */ - protected final def addNewElem(elem: CaptureRef)(using Context, VarState): CompareResult = - if elem.isMaxCapability || summon[VarState] == FrozenState then - addThisElem(elem) - else - addThisElem(elem).orElse: - val underlying = elem.captureSetOfInfo - tryInclude(underlying.elems, this).andAlso: - underlying.addDependent(this) - CompareResult.OK - - /** Add new elements one by one using `addNewElem`, abort on first failure */ - protected final def addNewElems(newElems: Refs)(using Context, VarState): CompareResult = - (CompareResult.OK /: newElems): (r, elem) => - r.andAlso(addNewElem(elem)) + protected final def addNewElem(elem: Capability)(using ctx: Context, vs: VarState): Boolean = + addThisElem(elem) + || !elem.isTerminalCapability + && vs.isOpen + && { + val underlying = elem.captureSetOfInfo + val res = tryInclude(underlying.elems, this) + if res then underlying.addDependent(this) + res + } /** Add a specific element, assuming it is not already accounted for, * and omitting any mapping or filtering, without possibility to backtrack * to the underlying capture set. */ - protected def addThisElem(elem: CaptureRef)(using Context, VarState): CompareResult + protected def addThisElem(elem: Capability)(using Context, VarState): Boolean + + protected def addIfHiddenOrFail(elem: Capability)(using ctx: Context, vs: VarState): Boolean = + elems.exists(_.maxSubsumes(elem, canAddHidden = true)) + || failWith(IncludeFailure(this, elem)) /** If this is a variable, add `cs` as a dependent set */ - protected def addDependent(cs: CaptureSet)(using Context, VarState): CompareResult + protected def addDependent(cs: CaptureSet)(using Context, VarState): Boolean /** If `cs` is a variable, add this capture set as one of its dependent sets */ protected def addAsDependentTo(cs: CaptureSet)(using Context): this.type = - cs.addDependent(this)(using ctx, UnrecordedState) + cs.addDependent(this)(using ctx, VarState.Unrecorded) this /** {x} <:< this where <:< is subcapturing, but treating all variables * as frozen. */ - def accountsFor(x: CaptureRef)(using Context): Boolean = - def debugInfo(using Context) = i"$this accountsFor $x, which has capture set ${x.captureSetOfInfo}" + def accountsFor(x: Capability)(using ctx: Context)(using vs: VarState = VarState.Separate): Boolean = + + def debugInfo(using Context) = + val suffix = if ctx.settings.YccVerbose.value then i" with ${x.captureSetOfInfo}" else "" + i"$this accountsFor $x$suffix" + def test(using Context) = reporting.trace(debugInfo): - elems.exists(_.subsumes(x)) - || !x.isMaxCapability - && !x.derivesFrom(defn.Caps_CapSet) - && x.captureSetOfInfo.subCaptures(this, frozen = true).isOK + TypeComparer.noNotes: // Any failures in accountsFor should not lead to error notes + elems.exists(_.subsumes(x)) + || // Even though subsumes already follows captureSetOfInfo, this is not enough. + // For instance x: C^{y, z}. Then neither y nor z subsumes x but {y, z} accounts for x. + !x.isTerminalCapability + && !x.coreType.derivesFrom(defn.Caps_CapSet) + && !(vs.isSeparating && x.captureSetOfInfo.containsTerminalCapability) + // in VarState.Separate, don't try to widen to cap since that might succeed with {cap} <: {cap} + && x.captureSetOfInfo.subCaptures(this, VarState.Separate) + comparer match case comparer: ExplainingTypeComparer => comparer.traceIndented(debugInfo)(test) case _ => test + end accountsFor /** A more optimistic version of accountsFor, which does not take variable supersets * of the `x` reference into account. A set might account for `x` if it accounts @@ -175,15 +232,16 @@ sealed abstract class CaptureSet extends Showable: * a set `cs` might account for `x` only if it subsumes `x` or it contains the * root capability `cap`. */ - def mightAccountFor(x: CaptureRef)(using Context): Boolean = - reporting.trace(i"$this mightAccountFor $x, ${x.captureSetOfInfo}?", show = true) { - elems.exists(_.subsumes(x)) - || !x.isMaxCapability + def mightAccountFor(x: Capability)(using Context): Boolean = + reporting.trace(i"$this mightAccountFor $x, ${x.captureSetOfInfo}?", show = true): + CCState.withCollapsedFresh: // OK here since we opportunistically choose an alternative which gets checked later + TypeComparer.noNotes: + elems.exists(_.subsumes(x)(using ctx)(using VarState.ClosedUnrecorded)) + || !x.isTerminalCapability && { val elems = x.captureSetOfInfo.elems !elems.isEmpty && elems.forall(mightAccountFor) } - } /** A more optimistic version of subCaptures used to choose one of two typing rules * for selections and applications. `cs1 mightSubcapture cs2` if `cs2` might account for @@ -194,52 +252,48 @@ sealed abstract class CaptureSet extends Showable: elems.forall(that.mightAccountFor) && !that.elems.forall(this.mightAccountFor) - /** The subcapturing test. - * @param frozen if true, no new variables or dependent sets are allowed to - * be added when making this test. An attempt to add either - * will result in failure. - */ - final def subCaptures(that: CaptureSet, frozen: Boolean)(using Context): CompareResult = - subCaptures(that)(using ctx, if frozen then FrozenState else VarState()) + /** The subcapturing test, taking an explicit VarState. */ + final def subCaptures(that: CaptureSet, vs: VarState)(using Context): Boolean = + subCaptures(that)(using ctx, vs) /** The subcapturing test, using a given VarState */ - private def subCaptures(that: CaptureSet)(using Context, VarState): CompareResult = - val result = that.tryInclude(elems, this) - if result.isOK then + final def subCaptures(that: CaptureSet)(using ctx: Context, vs: VarState = VarState()): Boolean = + if that.tryInclude(elems, this) then addDependent(that) else - ccState.levelError = ccState.levelError.orElse(result.levelError) varState.rollBack() - result - //.showing(i"subcaptures $this <:< $that = ${result.show}", capt) + false /** Two capture sets are considered =:= equal if they mutually subcapture each other * in a frozen state. */ def =:= (that: CaptureSet)(using Context): Boolean = - this.subCaptures(that, frozen = true).isOK - && that.subCaptures(this, frozen = true).isOK + this.subCaptures(that, VarState.Separate) + && that.subCaptures(this, VarState.Separate) /** The smallest capture set (via <:<) that is a superset of both * `this` and `that` */ def ++ (that: CaptureSet)(using Context): CaptureSet = - if this.subCaptures(that, frozen = true).isOK then + if this.subCaptures(that, VarState.HardSeparate) then if that.isAlwaysEmpty && this.keepAlways then this else that - else if that.subCaptures(this, frozen = true).isOK then this + else if that.subCaptures(this, VarState.HardSeparate) then this else if this.isConst && that.isConst then Const(this.elems ++ that.elems) else Union(this, that) + def ++ (that: CaptureSet.Const)(using Context): CaptureSet.Const = + Const(this.elems ++ that.elems) + /** The smallest superset (via <:<) of this capture set that also contains `ref`. */ - def + (ref: CaptureRef)(using Context): CaptureSet = + def + (ref: Capability)(using Context): CaptureSet = this ++ ref.singletonCaptureSet /** The largest capture set (via <:<) that is a subset of both `this` and `that` */ def **(that: CaptureSet)(using Context): CaptureSet = - if this.subCaptures(that, frozen = true).isOK then this - else if that.subCaptures(this, frozen = true).isOK then that + if this.subCaptures(that, VarState.Closed()) then this + else if that.subCaptures(this, VarState.Closed()) then that else if this.isConst && that.isConst then Const(elemIntersection(this, that)) else Intersection(this, that) @@ -254,55 +308,63 @@ sealed abstract class CaptureSet extends Showable: if that.isAlwaysEmpty then this else Diff(asVar, that) /** The largest subset (via <:<) of this capture set that does not account for `ref` */ - def - (ref: CaptureRef)(using Context): CaptureSet = + def - (ref: Capability)(using Context): CaptureSet = this -- ref.singletonCaptureSet /** The largest subset (via <:<) of this capture set that only contains elements * for which `p` is true. */ - def filter(p: Context ?=> CaptureRef => Boolean)(using Context): CaptureSet = + def filter(p: Context ?=> Capability => Boolean)(using Context): CaptureSet = if this.isConst then val elems1 = elems.filter(p) if elems1 == elems then this else Const(elems.filter(p)) - else Filtered(asVar, p) + else + this match + case self: Filtered => Filtered(self.source, ref => self.p(ref) && p(ref)) + case _ => Filtered(asVar, p) /** Capture set obtained by applying `tm` to all elements of the current capture set - * and joining the results. If the current capture set is a variable, the same - * transformation is applied to all future additions of new elements. - * - * Note: We have a problem how we handle the situation where we have a mapped set - * - * cs2 = tm(cs1) - * - * and then the propagation solver adds a new element `x` to `cs2`. What do we - * know in this case about `cs1`? We can answer this question in a sound way only - * if `tm` is a bijection on capture references or it is idempotent on capture references. - * (see definition in IdempotentCapRefMap). - * If `tm` is a bijection we know that `tm^-1(x)` must be in `cs1`. If `tm` is idempotent - * one possible solution is that `x` is in `cs1`, which is what we assume in this case. - * That strategy is sound but not complete. - * - * If `tm` is some other map, we don't know how to handle this case. For now, - * we simply refuse to handle other maps. If they do need to be handled, - * `OtherMapped` provides some approximation to a solution, but it is neither - * sound nor complete. + * and joining the results. If the current capture set is a variable we handle this as + * follows: + * - If the map is a BiTypeMap, the same transformation is applied to all + * future additions of new elements. We try to fuse with previous maps to + * avoid long paths of BiTypeMapped sets. + * - If the map is some other map that maps the current set of elements + * to itself, return the current var. We implicitly assume that the map + * will also map any elements added in the future to themselves. This assumption + * can be tested to hold by setting the ccConfig.checkSkippedMaps setting to true. + * - If the map is some other map that does not map all elements to themselves, + * freeze the current set (i.e. make it provisionally solved) and return + * the mapped elements as a constant set. */ - def map(tm: TypeMap)(using Context): CaptureSet = tm match - case tm: BiTypeMap => - val mappedElems = elems.map(tm.forward) - if isConst then - if mappedElems == elems then this - else Const(mappedElems) - else BiMapped(asVar, tm, mappedElems) - case tm: IdentityCaptRefMap => - this - case _ => - val mapped = mapRefs(elems, tm, tm.variance) - if isConst then - if mapped.isConst && mapped.elems == elems && !mapped.keepAlways then this - else mapped - else Mapped(asVar, tm, tm.variance, mapped) + def map(tm: TypeMap)(using Context): CaptureSet = + tm match + case tm: BiTypeMap => + val mappedElems = elems.map(tm.mapCapability(_)) + if isConst then + if mappedElems == elems then this + else Const(mappedElems) + else if ccState.mapFutureElems then + def unfused = BiMapped(asVar, tm, mappedElems) + this match + case self: BiMapped => self.bimap.fuse(tm) match + case Some(fused: BiTypeMap) => BiMapped(self.source, fused, mappedElems) + case _ => unfused + case _ => unfused + else this + case tm: IdentityCaptRefMap => + this + case tm: AvoidMap if this.isInstanceOf[HiddenSet] => + this + case _ => + val mapped = mapRefs(elems, tm, tm.variance) + if mapped.elems == elems then + if ccConfig.checkSkippedMaps && !isConst then asVar.skippedMaps += tm + this + else + if !isConst then asVar.markSolved(provisional = true) + mapped /** A mapping resulting from substituting parameters of a BindingType to a list of types */ def substParams(tl: BindingType, to: List[Type])(using Context) = @@ -310,15 +372,34 @@ sealed abstract class CaptureSet extends Showable: def maybe(using Context): CaptureSet = map(MaybeMap()) - /** Invoke handler if this set has (or later aquires) the root capability `cap` */ - def disallowRootCapability(handler: () => Context ?=> Unit)(using Context): this.type = - if isUnboxable then handler() + def readOnly(using Context): CaptureSet = map(ReadOnlyMap()) + + /** A bad root `elem` is inadmissible as a member of this set. What is a bad roots depends + * on the value of `rootLimit`. + * If the limit is null, all capture roots are good. + * If the limit is NoSymbol, all Fresh roots are good, but cap and Result roots are bad. + * If the limit is some other symbol, cap and Result roots are bad, as well as + * all Fresh roots that are contained (via ccOwner) in `rootLimit`. + */ + protected def isBadRoot(rootLimit: Symbol | Null, elem: Capability)(using Context): Boolean = + if rootLimit == null then false + else elem.core match + case GlobalCap | _: ResultCap => true + case elem: FreshCap => elem.ccOwner.isContainedIn(rootLimit) + case _ => false + + /** Invoke `handler` if this set has (or later aquires) a root capability. + * Excluded are Fresh instances unless their ccOwner is contained in `upto`. + * If `upto` is NoSymbol, all Fresh instances are admitted. + */ + def disallowRootCapability(upto: Symbol)(handler: () => Context ?=> Unit)(using Context): this.type = + if elems.exists(isBadRoot(upto, _)) then handler() this /** Invoke handler on the elements to ensure wellformedness of the capture set. * The handler might add additional elements to the capture set. */ - def ensureWellformed(handler: CaptureRef => Context ?=> Unit)(using Context): this.type = + def ensureWellformed(handler: Capability => Context ?=> Unit)(using Context): this.type = elems.foreach(handler(_)) this @@ -336,7 +417,7 @@ sealed abstract class CaptureSet extends Showable: * to this set. This might result in the set being solved to be constant * itself. */ - protected def propagateSolved()(using Context): Unit = () + protected def propagateSolved(provisional: Boolean)(using Context): Unit = () /** This capture set with a description that tells where it comes from */ def withDescription(description: String): CaptureSet @@ -354,44 +435,68 @@ sealed abstract class CaptureSet extends Showable: override def toText(printer: Printer): Text = printer.toTextCaptureSet(this) ~~ description + /** Apply function `f` to the elements. Typically used for printing. + * Overridden in HiddenSet so that we don't run into infinite recursions + */ + def processElems[T](f: Refs => T): T = f(elems) + object CaptureSet: - type Refs = SimpleIdentitySet[CaptureRef] + type Refs = SimpleIdentitySet[Capability] type Vars = SimpleIdentitySet[Var] type Deps = SimpleIdentitySet[CaptureSet] - @sharable private var varId = 0 - /** If set to `true`, capture stack traces that tell us where sets are created */ private final val debugSets = false - private val emptySet = SimpleIdentitySet.empty + val emptyRefs: Refs = SimpleIdentitySet.empty /** The empty capture set `{}` */ - val empty: CaptureSet.Const = Const(emptySet) + val empty: CaptureSet.Const = Const(emptyRefs) /** The universal capture set `{cap}` */ - def universal(using Context): CaptureSet = - defn.captureRoot.termRef.singletonCaptureSet + def universal(using Context): Const = + Const(SimpleIdentitySet(GlobalCap)) + + /** The same as {cap.rd} but generated implicitly for + * references of Capability subtypes + */ + val csImpliedByCapability = Const(SimpleIdentitySet(GlobalCap.readOnly)) + + def fresh(origin: Origin)(using Context): Const = + FreshCap(origin).singletonCaptureSet + + /** The shared capture set `{cap.rd}` */ + def shared(using Context): Const = + GlobalCap.readOnly.singletonCaptureSet /** Used as a recursion brake */ @sharable private[dotc] val Pending = Const(SimpleIdentitySet.empty) - def apply(elems: CaptureRef*)(using Context): CaptureSet.Const = + def apply(elems: Capability*)(using Context): Const = if elems.isEmpty then empty - else Const(SimpleIdentitySet(elems.map(_.ensuring(_.isTrackableRef))*)) + else + for elem <- elems do + assert(elem.isWellformed, i"not a trackable ref: $elem") + Const(SimpleIdentitySet(elems*)) - def apply(elems: Refs)(using Context): CaptureSet.Const = + def apply(elems: Refs)(using Context): Const = if elems.isEmpty then empty else Const(elems) /** The subclass of constant capture sets with given elements `elems` */ class Const private[CaptureSet] (val elems: Refs, val description: String = "") extends CaptureSet: - def isConst = true - def isAlwaysEmpty = elems.isEmpty - - def addThisElem(elem: CaptureRef)(using Context, VarState): CompareResult = - CompareResult.Fail(this :: Nil) + def isConst(using Context) = true + def isAlwaysEmpty(using Context) = elems.isEmpty + def isProvisionallySolved(using Context) = false + + def addThisElem(elem: Capability)(using Context, VarState): Boolean = + addIfHiddenOrFail(elem) + || { + if this.isProvisionallySolved then + capt.println(i"Cannot add $elem to provisionally solved $this") + false + } - def addDependent(cs: CaptureSet)(using Context, VarState) = CompareResult.OK + def addDependent(cs: CaptureSet)(using Context, VarState) = true def upperApprox(origin: CaptureSet)(using Context): CaptureSet = this @@ -404,7 +509,7 @@ object CaptureSet: override def toString = elems.toString end Const - case class EmptyWithProvenance(ref: CaptureRef, mapped: Type) extends Const(SimpleIdentitySet.empty): + case class EmptyWithProvenance(ref: Capability, mapped: CaptureSet) extends Const(SimpleIdentitySet.empty): override def optionalInfo(using Context): String = if ctx.settings.YccDebug.value then i" under-approximating the result of mapping $ref to $mapped" @@ -416,47 +521,64 @@ object CaptureSet: * nulls, this provides more lenient checking against compilation units that * were not yet compiled with capture checking on. */ - object Fluid extends Const(emptySet): - override def isAlwaysEmpty = false - override def addThisElem(elem: CaptureRef)(using Context, VarState) = CompareResult.OK - override def accountsFor(x: CaptureRef)(using Context): Boolean = true - override def mightAccountFor(x: CaptureRef)(using Context): Boolean = true + object Fluid extends Const(emptyRefs): + override def isAlwaysEmpty(using Context) = false + override def addThisElem(elem: Capability)(using Context, VarState) = true + override def accountsFor(x: Capability)(using Context)(using VarState): Boolean = true + override def mightAccountFor(x: Capability)(using Context): Boolean = true override def toString = "" end Fluid /** The subclass of captureset variables with given initial elements */ - class Var(override val owner: Symbol = NoSymbol, initialElems: Refs = emptySet, val level: Level = undefinedLevel, underBox: Boolean = false)(using @constructorOnly ictx: Context) extends CaptureSet: + class Var(initialOwner: Symbol = NoSymbol, initialElems: Refs = emptyRefs, val level: Level = undefinedLevel, underBox: Boolean = false)(using @constructorOnly ictx: Context) extends CaptureSet: + + override def owner = initialOwner /** A unique identification number for diagnostics */ val id = - varId += 1 - varId + val ccs = ccState + ccs.varId += 1 + ccs.varId - //assert(id != 40) + override def maybeId = id - /** A variable is solved if it is aproximated to a from-then-on constant set. */ - private var isSolved: Boolean = false + //assert(id != 8, this) + + /** A variable is solved if it is aproximated to a from-then-on constant set. + * Interpretation: + * 0 not solved + * Int.MaxValue definitively solved + * n > 0 provisionally solved in iteration n + */ + private var solved: Int = 0 /** The elements currently known to be in the set */ - var elems: Refs = initialElems + protected var myElems: Refs = initialElems + + def elems: Refs = myElems + def elems_=(refs: Refs): Unit = myElems = refs /** The sets currently known to be dependent sets (i.e. new additions to this set * are propagated to these dependent sets.) */ - var deps: Deps = emptySet + var deps: Deps = SimpleIdentitySet.empty - def isConst = isSolved - def isAlwaysEmpty = isSolved && elems.isEmpty + def isConst(using Context) = solved >= ccState.iterationId + def isAlwaysEmpty(using Context) = isConst && elems.isEmpty + def isProvisionallySolved(using Context): Boolean = solved > 0 && solved != Int.MaxValue def isMaybeSet = false // overridden in BiMapped /** A handler to be invoked if the root reference `cap` is added to this set */ var rootAddedHandler: () => Context ?=> Unit = () => () - private[CaptureSet] var noUniversal = false + /** The limit deciding which capture roots are bad (i.e. cannot be contained in this set). + * @see isBadRoot for details. + */ + private[CaptureSet] var rootLimit: Symbol | Null = null /** A handler to be invoked when new elems are added to this set */ - var newElemAddedHandler: CaptureRef => Context ?=> Unit = _ => () + var newElemAddedHandler: Capability => Context ?=> Unit = _ => () var description: String = "" @@ -484,68 +606,93 @@ object CaptureSet: def resetDeps()(using state: VarState): Unit = deps = state.deps(this) - final def addThisElem(elem: CaptureRef)(using Context, VarState): CompareResult = - if isConst // Fail if variable is solved, - || !recordElemsState() // or given VarState is frozen, - || Existential.isBadExistential(elem) // or `elem` is an out-of-scope existential, - then - CompareResult.Fail(this :: Nil) + /** Check that all maps recorded in skippedMaps map `elem` to itself + * or something subsumed by it. + */ + private def checkSkippedMaps(elem: Capability)(using Context): Unit = + for tm <- skippedMaps do + for elem1 <- mappedSet(elem, tm, variance = 1).elems do + assert(elem.subsumes(elem1), + i"Skipped map ${tm.getClass} maps newly added $elem to $elem1 in $this") + + final def addThisElem(elem: Capability)(using Context, VarState): Boolean = + if isConst || !recordElemsState() then // Fail if variable is solved or given VarState is frozen + addIfHiddenOrFail(elem) else if !levelOK(elem) then - CompareResult.LevelError(this, elem) // or `elem` is not visible at the level of the set. + failWith(IncludeFailure(this, elem, levelError = true)) // or `elem` is not visible at the level of the set. else - //if id == 34 then assert(!elem.isUniversalRootCapability) - assert(elem.isTrackableRef, elem) + // id == 108 then assert(false, i"trying to add $elem to $this") + assert(elem.isWellformed, elem) + assert(!this.isInstanceOf[HiddenSet] || summon[VarState].isSeparating, summon[VarState]) elems += elem - if elem.isRootCapability then + if isBadRoot(rootLimit, elem) then rootAddedHandler() newElemAddedHandler(elem) val normElem = if isMaybeSet then elem else elem.stripMaybe // assert(id != 5 || elems.size != 3, this) - val res = (CompareResult.OK /: deps): (r, dep) => - r.andAlso(dep.tryInclude(normElem, this)) - res.orElse: + val res = deps.forall: dep => + reporting.trace(i"forward $normElem from $this # $id to $dep # ${dep.maybeId} of class ${dep.getClass.toString}"): + dep.tryInclude(normElem, this) + if ccConfig.checkSkippedMaps && res then checkSkippedMaps(elem) + if !res then elems -= elem - res.addToTrace(this) - - private def levelOK(elem: CaptureRef)(using Context): Boolean = - if elem.isRootCapability then - !noUniversal - else if Existential.isExistentialVar(elem) then - !noUniversal - && !TypeComparer.isOpenedExistential(elem) - // Opened existentials on the left cannot be added to nested capture sets on the right - // of a comparison. Test case is open-existential.scala. - else elem match - case elem: TermRef if level.isDefined => - elem.prefix match - case prefix: CaptureRef => - levelOK(prefix) - case _ => - elem.symbol.ccLevel <= level - case elem: ThisType if level.isDefined => - elem.cls.ccLevel.nextInner <= level - case ReachCapability(elem1) => - levelOK(elem1) - case MaybeCapability(elem1) => - levelOK(elem1) - case _ => - true - - def addDependent(cs: CaptureSet)(using Context, VarState): CompareResult = - if (cs eq this) || cs.isUniversal || isConst then - CompareResult.OK - else if recordDepsState() then - deps += cs - CompareResult.OK - else - CompareResult.Fail(this :: Nil) - - override def disallowRootCapability(handler: () => Context ?=> Unit)(using Context): this.type = - noUniversal = true + TypeComparer.updateErrorNotes: + case note: IncludeFailure => note.addToTrace(this) + res + + private def isPartOf(binder: Type)(using Context): Boolean = + val find = new TypeAccumulator[Boolean]: + def apply(b: Boolean, t: Type) = + b || t.match + case CapturingType(p, refs) => (refs eq Var.this) || this(b, p) + case _ => foldOver(b, t) + find(false, binder) + + private def levelOK(elem: Capability)(using Context): Boolean = elem match + case _: FreshCap => + !level.isDefined + || ccState.symLevel(elem.ccOwner) <= level + || { + capt.println(i"LEVEL ERROR $elem cannot be included in $this of $owner") + false + } + case elem @ ResultCap(binder) => + rootLimit == null && (this.isInstanceOf[BiMapped] || isPartOf(binder.resType)) + case GlobalCap => + rootLimit == null + case elem: TermRef if level.isDefined => + elem.prefix match + case prefix: Capability => + levelOK(prefix) + case _ => + ccState.symLevel(elem.symbol) <= level + case elem: ThisType if level.isDefined => + ccState.symLevel(elem.cls).nextInner <= level + case elem: ParamRef if !this.isInstanceOf[BiMapped] => + isPartOf(elem.binder.resType) + || { + capt.println( + i"""LEVEL ERROR $elem for $this + |elem binder = ${elem.binder}""") + false + } + case elem: DerivedCapability => + levelOK(elem.underlying) + case _ => + true + + def addDependent(cs: CaptureSet)(using Context, VarState): Boolean = + (cs eq this) + || cs.isUniversal + || isConst + || recordDepsState() && { deps += cs; true } + + override def disallowRootCapability(upto: Symbol)(handler: () => Context ?=> Unit)(using Context): this.type = + rootLimit = upto rootAddedHandler = handler - super.disallowRootCapability(handler) + super.disallowRootCapability(upto)(handler) - override def ensureWellformed(handler: CaptureRef => (Context) ?=> Unit)(using Context): this.type = + override def ensureWellformed(handler: Capability => (Context) ?=> Unit)(using Context): this.type = newElemAddedHandler = handler super.ensureWellformed(handler) @@ -558,41 +705,47 @@ object CaptureSet: final def upperApprox(origin: CaptureSet)(using Context): CaptureSet = if isConst then this - else if elems.exists(_.isRootCapability) || computingApprox then + else if isUniversal || computingApprox then universal + else if containsCap && isReadOnly then + shared else computingApprox = true try val approx = computeApprox(origin).ensuring(_.isConst) - if approx.elems.exists(Existential.isExistentialVar(_)) then + if approx.elems.exists(_.isInstanceOf[ResultCap]) then ccState.approxWarnings += em"""Capture set variable $this gets upper-approximated - |to existential variable from $approx, using {cap} instead.""" + |to existential variable from $approx, using {cap} instead.""" universal else approx finally computingApprox = false /** The intersection of all upper approximations of dependent sets */ protected def computeApprox(origin: CaptureSet)(using Context): CaptureSet = - (universal /: deps) { (acc, sup) => acc ** sup.upperApprox(this) } + ((universal: CaptureSet) /: deps) { (acc, sup) => acc ** sup.upperApprox(this) } /** Widen the variable's elements to its upper approximation and * mark it as constant from now on. This is used for contra-variant type variables * in the results of defs and vals. */ def solve()(using Context): Unit = - if !isConst then + CCState.withCapAsRoot: // // OK here since we infer parameter types that get checked later val approx = upperApprox(empty) + .map(CapToFresh(Origin.Unknown).inverse) // Fresh --> cap .showing(i"solve $this = $result", capt) //println(i"solving var $this $approx ${approx.isConst} deps = ${deps.toList}") val newElems = approx.elems -- elems - if tryInclude(newElems, empty)(using ctx, VarState()).isOK then - markSolved() + given VarState() + if tryInclude(newElems, empty) then + markSolved(provisional = false) /** Mark set as solved and propagate this info to all dependent sets */ - def markSolved()(using Context): Unit = - isSolved = true - deps.foreach(_.propagateSolved()) + def markSolved(provisional: Boolean)(using Context): Unit = + solved = if provisional then ccState.iterationId else Int.MaxValue + deps.foreach(_.propagateSolved(provisional)) + + var skippedMaps: Set[TypeMap] = Set.empty def withDescription(description: String): this.type = this.description = this.description.join(" and ", description) @@ -622,11 +775,13 @@ object CaptureSet: * is not derived from some other variable. */ protected def ids(using Context): String = + def descr = getClass.getSimpleName.nn.take(1) val trail = this.match - case dv: DerivedVar => dv.source.ids - case _ => "" - val descr = getClass.getSimpleName.nn.take(1) - s"$id$descr$trail" + case dv: DerivedVar => + def summary = if ctx.settings.YccVerbose.value then dv.summarize else descr + s"$summary${dv.source.ids}" + case _ => descr + s"$id$trail" override def toString = s"Var$id$elems" end Var @@ -635,140 +790,67 @@ object CaptureSet: * Test case: Without that tweak, logger.scala would not compile. */ class RefiningVar(owner: Symbol)(using Context) extends Var(owner): - override def disallowRootCapability(handler: () => Context ?=> Unit)(using Context) = this + override def disallowRootCapability(upto: Symbol)(handler: () => Context ?=> Unit)(using Context) = this /** A variable that is derived from some other variable via a map or filter. */ abstract class DerivedVar(owner: Symbol, initialElems: Refs)(using @constructorOnly ctx: Context) extends Var(owner, initialElems): // For debugging: A trace where a set was created. Note that logically it would make more - // sense to place this variable in Mapped, but that runs afoul of the initializatuon checker. - val stack = if debugSets && this.isInstanceOf[Mapped] then (new Throwable).getStackTrace().nn.take(20) else null + // sense to place this variable in Mapped, but that runs afoul of the initialization checker. + // val stack = if debugSets && this.isInstanceOf[Mapped] then (new Throwable).getStackTrace().take(20) else null /** The variable from which this variable is derived */ def source: Var addAsDependentTo(source) - override def propagateSolved()(using Context) = - if source.isConst && !isConst then markSolved() - end DerivedVar + override def propagateSolved(provisional: Boolean)(using Context) = + if source.isConst && !isConst then markSolved(provisional) - /** A variable that changes when `source` changes, where all additional new elements are mapped - * using ∪ { tm(x) | x <- source.elems }. - * @param source the original set that is mapped - * @param tm the type map, which is assumed to be idempotent on capture refs - * (except if ccUnsoundMaps is enabled) - * @param variance the assumed variance with which types with capturesets of size >= 2 are approximated - * (i.e. co: full capture set, contra: empty set, nonvariant is not allowed.) - * @param initial The initial mappings of source's elements at the point the Mapped set is created. - */ - class Mapped private[CaptureSet] - (val source: Var, tm: TypeMap, variance: Int, initial: CaptureSet)(using @constructorOnly ctx: Context) - extends DerivedVar(source.owner, initial.elems): - addAsDependentTo(initial) // initial mappings could change by propagation - - private def mapIsIdempotent = tm.isInstanceOf[IdempotentCaptRefMap] - - assert(ccConfig.allowUnsoundMaps || mapIsIdempotent, tm.getClass) - - private def whereCreated(using Context): String = - if stack == null then "" - else i""" - |Stack trace of variable creation:" - |${stack.mkString("\n")}""" - - override def tryInclude(elem: CaptureRef, origin: CaptureSet)(using Context, VarState): CompareResult = - def propagate: CompareResult = - if (origin ne source) && (origin ne initial) && mapIsIdempotent then - // `tm` is idempotent, propagate back elems from image set. - // This is sound, since we know that for `r in newElems: tm(r) = r`, hence - // `r` is _one_ possible solution in `source` that would make an `r` appear in this set. - // It's not necessarily the only possible solution, so the scheme is incomplete. - source.tryInclude(elem, this) - else if ccConfig.allowUnsoundMaps && !mapIsIdempotent - && variance <= 0 && !origin.isConst && (origin ne initial) && (origin ne source) - then - // The map is neither a BiTypeMap nor an idempotent type map. - // In that case there's no much we can do. - // The scheme then does not propagate added elements back to source and rejects adding - // elements from variable sources in contra- and non-variant positions. In essence, - // we approximate types resulting from such maps by returning a possible super type - // from the actual type. But this is neither sound nor complete. - report.warning(em"trying to add $elem from unrecognized source $origin of mapped set $this$whereCreated") - CompareResult.Fail(this :: Nil) - else - CompareResult.OK - def propagateIf(cond: Boolean): CompareResult = - if cond then propagate else CompareResult.OK - - val mapped = extrapolateCaptureRef(elem, tm, variance) - - def isFixpoint = - mapped.isConst && mapped.elems.size == 1 && mapped.elems.contains(elem) - - def failNoFixpoint = - val reason = - if variance <= 0 then i"the set's variance is $variance" - else i"$elem gets mapped to $mapped, which is not a supercapture." - report.warning(em"""trying to add $elem from unrecognized source $origin of mapped set $this$whereCreated - |The reference cannot be added since $reason""") - CompareResult.Fail(this :: Nil) - - if origin eq source then // elements have to be mapped - val added = mapped.elems.filter(!accountsFor(_)) - addNewElems(added) - .andAlso: - if mapped.isConst then CompareResult.OK - else if mapped.asVar.recordDepsState() then { addAsDependentTo(mapped); CompareResult.OK } - else CompareResult.Fail(this :: Nil) - .andAlso: - propagateIf(!added.isEmpty) - else if accountsFor(elem) then - CompareResult.OK - else if variance > 0 then - // we can soundly add nothing to source and `x` to this set - addNewElem(elem) - else if isFixpoint then - // We can soundly add `x` to both this set and source since `f(x) = x` - addNewElem(elem).andAlso(propagate) - else - // we are out of options; fail (which is always sound). - failNoFixpoint - end tryInclude + // ----------- Longest path recording ------------------------- - override def computeApprox(origin: CaptureSet)(using Context): CaptureSet = - if source eq origin then - // it's a mapping of origin, so not a superset of `origin`, - // therefore don't contribute to the intersection. - universal - else - source.upperApprox(this).map(tm) + /** Summarize for set displaying in a path */ + def summarize: String = getClass.toString - override def propagateSolved()(using Context) = - if initial.isConst then super.propagateSolved() + /** The length of the path of DerivedVars ending in this set */ + def pathLength: Int = source match + case source: DerivedVar => source.pathLength + 1 + case _ => 1 - override def toString = s"Mapped$id($source, elems = $elems)" - end Mapped + /** The path of DerivedVars ending in this set */ + def path: List[DerivedVar] = source match + case source: DerivedVar => this :: source.path + case _ => this :: Nil + + if ctx.settings.YccLog.value || util.Stats.enabled then + ctx.run.nn.recordPath(pathLength, path) + + end DerivedVar /** A mapping where the type map is required to be a bijection. * Parameters as in Mapped. */ final class BiMapped private[CaptureSet] - (val source: Var, bimap: BiTypeMap, initialElems: Refs)(using @constructorOnly ctx: Context) + (val source: Var, val bimap: BiTypeMap, initialElems: Refs)(using @constructorOnly ctx: Context) extends DerivedVar(source.owner, initialElems): - override def tryInclude(elem: CaptureRef, origin: CaptureSet)(using Context, VarState): CompareResult = + override def tryInclude(elem: Capability, origin: CaptureSet)(using Context, VarState): Boolean = if origin eq source then - val mappedElem = bimap.forward(elem) - if accountsFor(mappedElem) then CompareResult.OK - else addNewElem(mappedElem) + val mappedElem = bimap.mapCapability(elem) + accountsFor(mappedElem) || addNewElem(mappedElem) else if accountsFor(elem) then - CompareResult.OK + true else - source.tryInclude(bimap.backward(elem), this) - .showing(i"propagating new elem $elem backward from $this to $source = $result", captDebug) - .andAlso(addNewElem(elem)) + // Propagate backwards to source. The element will be added then by another + // forward propagation from source that hits the first branch `if origin eq source then`. + try + reporting.trace(i"prop backwards $elem from $this # $id to $source # ${source.id} via $summarize"): + source.tryInclude(bimap.inverse.mapCapability(elem), this) + .showing(i"propagating new elem $elem backward from $this/$id to $source = $result", captDebug) + catch case ex: AssertionError => + println(i"fail while prop backwards tryInclude $elem of ${elem.getClass} from $this # $id / ${this.summarize} to $source # ${source.id}") + throw ex /** For a BiTypeMap, supertypes of the mapped type also constrain * the source via the inverse type mapping and vice versa. That is, if @@ -784,24 +866,24 @@ object CaptureSet: override def isMaybeSet: Boolean = bimap.isInstanceOf[MaybeMap] override def toString = s"BiMapped$id($source, elems = $elems)" + override def summarize = bimap.getClass.toString end BiMapped /** A variable with elements given at any time as { x <- source.elems | p(x) } */ class Filtered private[CaptureSet] - (val source: Var, p: Context ?=> CaptureRef => Boolean)(using @constructorOnly ctx: Context) + (val source: Var, val p: Context ?=> Capability => Boolean)(using @constructorOnly ctx: Context) extends DerivedVar(source.owner, source.elems.filter(p)): - override def tryInclude(elem: CaptureRef, origin: CaptureSet)(using Context, VarState): CompareResult = + override def tryInclude(elem: Capability, origin: CaptureSet)(using Context, VarState): Boolean = if accountsFor(elem) then - CompareResult.OK + true else if origin eq source then - if p(elem) then addNewElem(elem) - else CompareResult.OK + !p(elem) || addNewElem(elem) else // Filtered elements have to be back-propagated to source. // Elements that don't satisfy `p` are not allowed. if p(elem) then source.tryInclude(elem, this) - else CompareResult.Fail(this :: Nil) + else failWith(IncludeFailure(this, elem)) override def computeApprox(origin: CaptureSet)(using Context): CaptureSet = if source eq origin then @@ -823,21 +905,21 @@ object CaptureSet: addAsDependentTo(cs1) addAsDependentTo(cs2) - override def tryInclude(elem: CaptureRef, origin: CaptureSet)(using Context, VarState): CompareResult = - if accountsFor(elem) then CompareResult.OK + override def tryInclude(elem: Capability, origin: CaptureSet)(using Context, VarState): Boolean = + if accountsFor(elem) then true else val res = super.tryInclude(elem, origin) // If this is the union of a constant and a variable, // propagate `elem` to the variable part to avoid slack // between the operands and the union. - if res.isOK && (origin ne cs1) && (origin ne cs2) then + if res && (origin ne cs1) && (origin ne cs2) then if cs1.isConst then cs2.tryInclude(elem, origin) else if cs2.isConst then cs1.tryInclude(elem, origin) else res else res - override def propagateSolved()(using Context) = - if cs1.isConst && cs2.isConst && !isConst then markSolved() + override def propagateSolved(provisional: Boolean)(using Context) = + if cs1.isConst && cs2.isConst && !isConst then markSolved(provisional) end Union class Intersection(cs1: CaptureSet, cs2: CaptureSet)(using Context) @@ -847,13 +929,12 @@ object CaptureSet: deps += cs1 deps += cs2 - override def tryInclude(elem: CaptureRef, origin: CaptureSet)(using Context, VarState): CompareResult = + override def tryInclude(elem: Capability, origin: CaptureSet)(using Context, VarState): Boolean = val present = if origin eq cs1 then cs2.accountsFor(elem) else if origin eq cs2 then cs1.accountsFor(elem) else true - if present && !accountsFor(elem) then addNewElem(elem) - else CompareResult.OK + !present || accountsFor(elem) || addNewElem(elem) override def computeApprox(origin: CaptureSet)(using Context): CaptureSet = if (origin eq cs1) || (origin eq cs2) then @@ -863,39 +944,128 @@ object CaptureSet: else CaptureSet(elemIntersection(cs1.upperApprox(this), cs2.upperApprox(this))) - override def propagateSolved()(using Context) = - if cs1.isConst && cs2.isConst && !isConst then markSolved() + override def propagateSolved(provisional: Boolean)(using Context) = + if cs1.isConst && cs2.isConst && !isConst then markSolved(provisional) end Intersection def elemIntersection(cs1: CaptureSet, cs2: CaptureSet)(using Context): Refs = - cs1.elems.filter(cs2.mightAccountFor) ++ cs2.elems.filter(cs1.mightAccountFor) + cs1.elems.filter(cs2.accountsFor) ++ cs2.elems.filter(cs1.accountsFor) + + /** A capture set variable used to record the references hidden by a Fresh instance, + * The elems and deps members are repurposed as follows: + * elems: Set of hidden references + * deps : Set of hidden sets for which the Fresh instance owning this set + * is a hidden element. + * Hidden sets may become aliases of other hidden sets, which means that + * reads and writes of elems go to the alias. + * If H is an alias of R.hidden for some Fresh instance R then: + * H.elems == {R} + * H.deps = {R.hidden} + * This encoding was chosen because it relies only on the elems and deps fields + * which are already subject through snapshotting and rollbacks in VarState. + * It's advantageous if we don't need to deal with other pieces of state there. + */ + class HiddenSet(initialOwner: Symbol, val owningCap: FreshCap)(using @constructorOnly ictx: Context) + extends Var(initialOwner): + var givenOwner: Symbol = initialOwner + + override def owner = givenOwner + + //assert(id != 3) + + description = i"of elements subsumed by a fresh cap in $initialOwner" + + private def aliasRef: FreshCap | Null = + if myElems.size == 1 then + myElems.nth(0) match + case alias: FreshCap if deps.contains(alias.hiddenSet) => alias + case _ => null + else null + + private def aliasSet: HiddenSet = + if myElems.size == 1 then + myElems.nth(0) match + case alias: FreshCap if deps.contains(alias.hiddenSet) => alias.hiddenSet + case _ => this + else this + + def superCaps: List[FreshCap] = + deps.toList.map(_.asInstanceOf[HiddenSet].owningCap) + + override def elems: Refs = + val al = aliasSet + if al eq this then super.elems else al.elems + + override def elems_=(refs: Refs) = + val al = aliasSet + if al eq this then super.elems_=(refs) else al.elems_=(refs) + + /** Add element to hidden set. Also add it to all supersets (as indicated by + * deps of this set). Follow aliases on both hidden set and added element + * before adding. If the added element is also a Fresh instance with + * hidden set H which is a superset of this set, then make this set an + * alias of H. + */ + def add(elem: Capability)(using ctx: Context, vs: VarState): Unit = + val alias = aliasSet + if alias ne this then alias.add(elem) + else + def addToElems() = + elems += elem + deps.foreach: dep => + assert(dep != this) + vs.addHidden(dep.asInstanceOf[HiddenSet], elem) + elem match + case elem: FreshCap => + if this ne elem.hiddenSet then + val alias = elem.hiddenSet.aliasRef + if alias != null then + add(alias) + else if deps.contains(elem.hiddenSet) then // make this an alias of elem + capt.println(i"Alias $this to ${elem.hiddenSet}") + elems = SimpleIdentitySet(elem) + deps = SimpleIdentitySet(elem.hiddenSet) + else + addToElems() + elem.hiddenSet.deps += this + case _ => + addToElems() + + /** Apply function `f` to `elems` while setting `elems` to empty for the + * duration. This is used to escape infinite recursions if two Freshs + * refer to each other in their hidden sets. + */ + override def processElems[T](f: Refs => T): T = + val savedElems = elems + elems = emptyRefs + try f(savedElems) + finally elems = savedElems + end HiddenSet /** Extrapolate tm(r) according to `variance`. Let r1 be the result of tm(r). - * - If r1 is a tracked CaptureRef, return {r1} + * - If r1 is a tracked capability, return {r1} * - If r1 has an empty capture set, return {} * - Otherwise, * - if the variance is covariant, return r1's capture set * - if the variance is contravariant, return {} * - Otherwise assertion failure */ - def extrapolateCaptureRef(r: CaptureRef, tm: TypeMap, variance: Int)(using Context): CaptureSet = - val r1 = tm(r) - val upper = r1.captureSet - def isExact = - upper.isAlwaysEmpty - || upper.isConst && upper.elems.size == 1 && upper.elems.contains(r1) - || r.derivesFrom(defn.Caps_CapSet) - if variance > 0 || isExact then upper - else if variance < 0 then CaptureSet.EmptyWithProvenance(r, r1) - else upper.maybe + final def mappedSet(r: Capability, tm: TypeMap, variance: Int)(using Context): CaptureSet = + tm.mapCapability(r) match + case c: CoreCapability => c.captureSet + case c: Capability => c.singletonCaptureSet + case (cs: CaptureSet, exact) => + if cs.isAlwaysEmpty || exact || variance > 0 then cs + else if variance < 0 then CaptureSet.EmptyWithProvenance(r, cs) + else cs.maybe /** Apply `f` to each element in `xs`, and join result sets with `++` */ - def mapRefs(xs: Refs, f: CaptureRef => CaptureSet)(using Context): CaptureSet = + def mapRefs(xs: Refs, f: Capability => CaptureSet)(using Context): CaptureSet = ((empty: CaptureSet) /: xs)((cs, x) => cs ++ f(x)) /** Apply extrapolated `tm` to each element in `xs`, and join result sets with `++` */ def mapRefs(xs: Refs, tm: TypeMap, variance: Int)(using Context): CaptureSet = - mapRefs(xs, extrapolateCaptureRef(_, tm, variance)) + mapRefs(xs, mappedSet(_, tm, variance)) /** Return true iff * - arg1 is a TypeBounds >: CL T <: CH T of two capturing types with equal parents. @@ -905,63 +1075,39 @@ object CaptureSet: */ def subCapturesRange(arg1: TypeBounds, arg2: Type)(using Context): Boolean = arg1 match case TypeBounds(CapturingType(lo, loRefs), CapturingType(hi, hiRefs)) if lo =:= hi => - given VarState = VarState() + given VarState() val cs2 = arg2.captureSet - hiRefs.subCaptures(cs2).isOK && cs2.subCaptures(loRefs).isOK + hiRefs.subCaptures(cs2) && cs2.subCaptures(loRefs) case _ => false - /** A TypeMap with the property that every capture reference in the image - * of the map is mapped to itself. I.e. for all capture references r1, r2, - * if M(r1) == r2 then M(r2) == r2. + /** A TypeMap that is the identity on capabilities */ + trait IdentityCaptRefMap extends TypeMap + + /** A value of this class is produced and added as a note to ccState + * when a subsumes check decides that an existential variable `ex` cannot be + * instantiated to the other capability `other`. */ - trait IdempotentCaptRefMap extends TypeMap + case class ExistentialSubsumesFailure(val ex: ResultCap, val other: Capability) extends ErrorNote - /** A TypeMap that is the identity on capture references */ - trait IdentityCaptRefMap extends TypeMap + case class IncludeFailure(cs: CaptureSet, elem: Capability, levelError: Boolean = false) extends ErrorNote, Showable: + private var myTrace: List[CaptureSet] = cs :: Nil - enum CompareResult extends Showable: - case OK - case Fail(trace: List[CaptureSet]) - case LevelError(cs: CaptureSet, elem: CaptureRef) + def trace: List[CaptureSet] = myTrace + def addToTrace(cs1: CaptureSet) = + val res = IncludeFailure(cs, elem, levelError) + res.myTrace = cs1 :: this.myTrace + res override def toText(printer: Printer): Text = inContext(printer.printerContext): - this match - case OK => Str("OK") - case Fail(trace) => - if ctx.settings.YccDebug.value then printer.toText(trace, ", ") - else blocking.show - case LevelError(cs: CaptureSet, elem: CaptureRef) => - Str(i"($elem at wrong level for $cs at level ${cs.level.toString})") - - /** The result is OK */ - def isOK: Boolean = this == OK - - /** If not isOK, the blocking capture set */ - def blocking: CaptureSet = (this: @unchecked) match - case Fail(cs) => cs.last - case LevelError(cs, _) => cs - - /** Optionally, this result if it is a level error */ - def levelError: Option[LevelError] = this match - case result: LevelError => Some(result) - case _ => None - - inline def andAlso(op: Context ?=> CompareResult)(using Context): CompareResult = - if isOK then op else this - - inline def orElse(op: Context ?=> CompareResult)(using Context): CompareResult = - if isOK then this - else - val alt = op - if alt.isOK then alt - else this - - inline def addToTrace(cs: CaptureSet): CompareResult = this match - case Fail(trace) => Fail(cs :: trace) - case _ => this - end CompareResult + if levelError then + i"($elem at wrong level for $cs at level ${cs.level.toString})" + else + if ctx.settings.YccDebug.value + then i"$elem cannot be included in $trace" + else i"$elem cannot be included in $cs" + end IncludeFailure /** A VarState serves as a snapshot mechanism that can undo * additions of elements or super sets if an operation fails @@ -974,6 +1120,14 @@ object CaptureSet: /** A map from captureset variables to their dependent sets at the time of the snapshot. */ private val depsMap: util.EqHashMap[Var, Deps] = new util.EqHashMap + /** A map from ResultCap values to other ResultCap values. If two result values + * `a` and `b` are unified, then `eqResultMap(a) = b` and `eqResultMap(b) = a`. + */ + private var eqResultMap: util.SimpleIdentityMap[ResultCap, ResultCap] = util.SimpleIdentityMap.empty + + /** A snapshot of the `eqResultMap` value at the start of a VarState transaction */ + private var eqResultSnapshot: util.SimpleIdentityMap[ResultCap, ResultCap] | Null = null + /** The recorded elements of `v` (it's required that a recording was made) */ def elems(v: Var): Refs = elemsMap(v) @@ -981,8 +1135,7 @@ object CaptureSet: def getElems(v: Var): Option[Refs] = elemsMap.get(v) /** Record elements, return whether this was allowed. - * By default, recording is allowed but the special state FrozenState - * overrides this. + * By default, recording is allowed in regular but not in frozen states. */ def putElems(v: Var, elems: Refs): Boolean = { elemsMap(v) = elems; true } @@ -993,58 +1146,160 @@ object CaptureSet: def getDeps(v: Var): Option[Deps] = depsMap.get(v) /** Record dependent sets, return whether this was allowed. - * By default, recording is allowed but the special state FrozenState - * overrides this. + * By default, recording is allowed in regular but not in frozen states. */ def putDeps(v: Var, deps: Deps): Boolean = { depsMap(v) = deps; true } + /** Does this state allow additions of elements to capture set variables? */ + def isOpen = true + def isSeparating = false + + /** Add element to hidden set, recording it in elemsMap, + * return whether this was allowed. By default, recording is allowed + * but the special state VarState.Separate overrides this. + */ + def addHidden(hidden: HiddenSet, elem: Capability)(using Context): Boolean = + elemsMap.get(hidden) match + case None => + elemsMap(hidden) = hidden.elems + depsMap(hidden) = hidden.deps + case _ => + hidden.add(elem)(using ctx, this) + true + + /** If root1 and root2 belong to the same binder but have different originalBinders + * it means that one of the roots was mapped to the binder of the other by a + * substBinder when comparing two method types. In that case we can unify + * the two roots1, provided none of the two roots have already been unified + * themselves. So unification must be 1-1. + * + * Note, see (**) below: We also allow unifications of results that have different ExprType + * binders. This is necessary because ExprTypes don't get updated with SubstBindingMaps. + * It's sound since ExprTypes always appear alone and at the top-level, so there is + * no problem with confusing results at different levels. + * See pos-customargs/captures/overrides.scala for a test case. + */ + def unify(c1: ResultCap, c2: ResultCap)(using Context): Boolean = + ((c1.binder eq c2.binder) + || c1.binder.isInstanceOf[ExprType] && c2.binder.isInstanceOf[ExprType] // (**) + ) + && (c1.originalBinder ne c2.originalBinder) + && eqResultMap(c1) == null + && eqResultMap(c2) == null + && { + if eqResultSnapshot == null then eqResultSnapshot = eqResultMap + eqResultMap = eqResultMap.updated(c1, c2).updated(c2, c1) + true + } + /** Roll back global state to what was recorded in this VarState */ def rollBack(): Unit = elemsMap.keysIterator.foreach(_.resetElems()(using this)) depsMap.keysIterator.foreach(_.resetDeps()(using this)) - end VarState + if eqResultSnapshot != null then eqResultMap = eqResultSnapshot.nn - /** A special state that does not allow to record elements or dependent sets. - * In effect this means that no new elements or dependent sets can be added - * in this state (since the previous state cannot be recorded in a snapshot) - */ - @sharable - object FrozenState extends VarState: - override def putElems(v: Var, refs: Refs) = false - override def putDeps(v: Var, deps: Deps) = false - override def rollBack(): Unit = () - - @sharable - /** A special state that turns off recording of elements. Used only - * in `addSub` to prevent cycles in recordings. - */ - private object UnrecordedState extends VarState: - override def putElems(v: Var, refs: Refs) = true - override def putDeps(v: Var, deps: Deps) = true - override def rollBack(): Unit = () + private var seen: util.EqHashSet[Capability] = new util.EqHashSet + + /** Run test `pred` unless `ref` was seen in an enclosing `ifNotSeen` operation */ + def ifNotSeen(ref: Capability)(pred: => Boolean): Boolean = + if seen.add(ref) then + try pred finally seen -= ref + else false + + override def toString = "open varState" + + object VarState: + + /** A class for states that do not allow to record elements or dependent sets. + * In effect this means that no new elements or dependent sets can be added + * in these states (since the previous state cannot be recorded in a snapshot) + * On the other hand, these states do allow by default Fresh instances to + * subsume arbitary types, which are then recorded in their hidden sets. + */ + class Closed extends VarState: + override def putElems(v: Var, refs: Refs) = false + override def putDeps(v: Var, deps: Deps) = false + override def isOpen = false + override def toString = "closed varState" + + /** A closed state that allows a Fresh instance to subsume a + * reference `r` only if `r` is already present in the hidden set of the instance. + * No new references can be added. + */ + class Separating extends Closed: + override def addHidden(hidden: HiddenSet, elem: Capability)(using Context): Boolean = false + override def toString = "separating varState" + override def isSeparating = true + + /** A closed state that allows a Fresh instance to subsume a + * reference `r` only if `r` is already present in the hidden set of the instance. + * No new references can be added. + */ + def Separate(using Context): Separating = ccState.Separate + + /** Like Separate but in addition we assume that `cap` never subsumes anything else. + * Used in `++` to not lose track of dependencies between function parameters. + */ + def HardSeparate(using Context): Separating = ccState.HardSeparate + + /** A special state that turns off recording of elements. Used only + * in `addSub` to prevent cycles in recordings. Instantiated in ccState.Unrecorded. + */ + class Unrecorded extends VarState: + override def putElems(v: Var, refs: Refs) = true + override def putDeps(v: Var, deps: Deps) = true + override def rollBack(): Unit = () + override def addHidden(hidden: HiddenSet, elem: Capability)(using Context): Boolean = true + override def toString = "unrecorded varState" + + def Unrecorded(using Context): Unrecorded = ccState.Unrecorded + + /** A closed state that turns off recording of hidden elements (but allows + * adding them). Used in `mightAccountFor`. Instantiated in ccState.ClosedUnrecorded. + */ + class ClosedUnrecorded extends Closed: + override def addHidden(hidden: HiddenSet, elem: Capability)(using Context): Boolean = true + override def toString = "closed unrecorded varState" + + def ClosedUnrecorded(using Context): ClosedUnrecorded = ccState.ClosedUnrecorded + + end VarState /** The current VarState, as passed by the implicit context */ def varState(using state: VarState): VarState = state - /** Maps `x` to `x?` */ - private class MaybeMap(using Context) extends BiTypeMap: + /** A template for maps on capabilities where f(c) <: c and f(f(c)) = c */ + private abstract class NarrowingCapabilityMap(using Context) extends BiTypeMap: - def apply(t: Type) = t match - case t: CaptureRef if t.isTrackableRef => t.maybe - case _ => mapOver(t) + def apply(t: Type) = mapOver(t) - override def toString = "Maybe" + override def fuse(next: BiTypeMap)(using Context) = next match + case next: Inverse if next.inverse.getClass == getClass => assert(false); Some(IdentityTypeMap) + case next: NarrowingCapabilityMap if next.getClass == getClass => assert(false) + case _ => None - lazy val inverse = new BiTypeMap: + class Inverse extends BiTypeMap: + def apply(t: Type) = t // since f(c) <: c, this is the best inverse + override def mapCapability(c: Capability, deep: Boolean): Capability = c + def inverse = NarrowingCapabilityMap.this + override def toString = NarrowingCapabilityMap.this.toString ++ ".inverse" + override def fuse(next: BiTypeMap)(using Context) = next match + case next: NarrowingCapabilityMap if next.inverse.getClass == getClass => assert(false); Some(IdentityTypeMap) + case next: NarrowingCapabilityMap if next.getClass == getClass => assert(false) + case _ => None - def apply(t: Type) = t match - case t: CaptureRef if t.isMaybe => t.stripMaybe - case t => mapOver(t) + lazy val inverse = Inverse() + end NarrowingCapabilityMap - def inverse = MaybeMap.this + /** Maps `x` to `x?` */ + private class MaybeMap(using Context) extends NarrowingCapabilityMap: + override def mapCapability(c: Capability, deep: Boolean) = c.maybe + override def toString = "Maybe" - override def toString = "Maybe.inverse" - end MaybeMap + /** Maps `x` to `x.rd` */ + private class ReadOnlyMap(using Context) extends NarrowingCapabilityMap: + override def mapCapability(c: Capability, deep: Boolean) = c.readOnly + override def toString = "ReadOnly" /* Not needed: def ofClass(cinfo: ClassInfo, argTypes: List[Type])(using Context): CaptureSet = @@ -1068,16 +1323,31 @@ object CaptureSet: css.foldLeft(empty)(_ ++ _) */ - /** The capture set of the type underlying CaptureRef */ - def ofInfo(ref: CaptureRef)(using Context): CaptureSet = ref match - case ReachCapability(ref1) => - ref1.widen.deepCaptureSet(includeTypevars = true) - .showing(i"Deep capture set of $ref: ${ref1.widen} = ${result}", capt) - case _ => - if ref.isMaxCapability then ref.singletonCaptureSet - else ofType(ref.underlying, followResult = true) - - /** Capture set of a type */ + /** The capture set of the type underlying the capability `c` */ + def ofInfo(c: Capability)(using Context): CaptureSet = c match + case Reach(c1) => + c1.widen.deepCaptureSet(includeTypevars = true) + .showing(i"Deep capture set of $c: ${c1.widen} = ${result}", capt) + case ReadOnly(c1) => + c1.captureSetOfInfo.readOnly + case Maybe(c1) => + c1.captureSetOfInfo.maybe + case c: RootCapability => + c.singletonCaptureSet + case c: ParamRef if !c.underlying.exists => + // might happen during construction of lambdas, assume `{cap}` in this case so that + // `ref` will not seem subsumed by other capabilities in a `++`. + universal + case c: CoreCapability => + ofType(c.underlying, followResult = false) + + /** Capture set of a type + * @param followResult If true, also include capture sets of function results. + * This mode is currently not used. It could be interesting + * when we change the system so that the capture set of a function + * is the union of the capture sets if its span. + * In this case we should use `followResult = true` in the call in ofInfo above. + */ def ofType(tp: Type, followResult: Boolean)(using Context): CaptureSet = def recur(tp: Type): CaptureSet = trace(i"ofType $tp, ${tp.getClass} $followResult", show = true): tp.dealiasKeepAnnots match @@ -1090,16 +1360,13 @@ object CaptureSet: else empty case CapturingType(parent, refs) => recur(parent) ++ refs - case tp @ AnnotatedType(parent, ann) if ann.hasSymbol(defn.ReachCapabilityAnnot) => - parent match - case parent: SingletonCaptureRef if parent.isTrackableRef => - tp.singletonCaptureSet - case _ => - CaptureSet.ofTypeDeeply(parent.widen) case tpd @ defn.RefinedFunctionOf(rinfo: MethodType) if followResult => - ofType(tpd.parent, followResult = false) // pick up capture set from parent type - ++ (recur(rinfo.resType) // add capture set of result - -- CaptureSet(rinfo.paramRefs.filter(_.isTracked)*)) // but disregard bound parameters + ofType(tpd.parent, followResult = false) // pick up capture set from parent type + ++ recur(rinfo.resType) // add capture set of result + .filter: + case TermParamRef(binder, _) => binder ne rinfo + case ResultCap(binder) => binder ne rinfo + case _ => true case tpd @ AppliedType(tycon, args) => if followResult && defn.isNonRefinedFunction(tpd) then recur(args.last) @@ -1123,36 +1390,17 @@ object CaptureSet: /** The deep capture set of a type is the union of all covariant occurrences of * capture sets. Nested existential sets are approximated with `cap`. - * NOTE: The traversal logic needs to be in sync with narrowCaps in CaptureOps, which - * replaces caps with reach capabilties. The one exception to this is invariant - * arguments. This have to be included to be conservative in dcs but must be - * excluded in narrowCaps. */ def ofTypeDeeply(tp: Type, includeTypevars: Boolean = false)(using Context): CaptureSet = - val collect = new TypeAccumulator[CaptureSet]: - val seen = util.HashSet[Symbol]() - def apply(cs: CaptureSet, t: Type) = - if variance < 0 then cs - else t.dealias match - case t @ CapturingType(p, cs1) => - this(cs, p) ++ cs1 - case t @ AnnotatedType(parent, ann) => - this(cs, parent) - case t: TypeRef if t.symbol.isAbstractOrParamType && !seen.contains(t.symbol) => - seen += t.symbol - val upper = t.info.bounds.hi - if includeTypevars && upper.isExactlyAny then CaptureSet.universal - else this(cs, upper) - case t @ FunctionOrMethod(args, res @ Existential(_, _)) - if args.forall(_.isAlwaysPure) => - this(cs, Existential.toCap(res)) - case t @ Existential(_, _) => - cs - case _ => - foldOver(cs, t) + val collect = new DeepTypeAccumulator[CaptureSet]: + def capturingCase(acc: CaptureSet, parent: Type, refs: CaptureSet) = + this(acc, parent) ++ refs + def abstractTypeCase(acc: CaptureSet, t: TypeRef, upperBound: Type) = + if includeTypevars && upperBound.isExactlyAny then fresh(Origin.DeepCS(t)) + else this(acc, upperBound) collect(CaptureSet.empty, tp) - type AssumedContains = immutable.Map[TypeRef, SimpleIdentitySet[CaptureRef]] + type AssumedContains = immutable.Map[TypeRef, SimpleIdentitySet[Capability]] val AssumedContains: Property.Key[AssumedContains] = Property.Key() def assumedContains(using Context): AssumedContains = @@ -1190,23 +1438,4 @@ object CaptureSet: println(i" ${cv.show.padTo(20, ' ')} :: ${cv.deps.toList}%, %") } else op - - def levelErrors: Addenda = new Addenda: - override def toAdd(using Context) = - for CompareResult.LevelError(cs, ref) <- ccState.levelError.toList yield - ccState.levelError = None - if ref.isRootCapability then - i""" - | - |Note that the universal capability `cap` - |cannot be included in capture set $cs""" - else - val levelStr = ref match - case ref: TermRef => i", defined in ${ref.symbol.maybeOwner}" - case _ => "" - i""" - | - |Note that reference ${ref}$levelStr - |cannot be included in outer capture set $cs""" - end CaptureSet diff --git a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala index 830d9ad0a4d4..1bdd7ce92129 100644 --- a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala +++ b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala @@ -18,11 +18,13 @@ import util.{SimpleIdentitySet, EqHashMap, EqHashSet, SrcPos, Property} import transform.{Recheck, PreRecheck, CapturedVars} import Recheck.* import scala.collection.mutable -import CaptureSet.{withCaptureSetsExplained, IdempotentCaptRefMap, CompareResult} +import CaptureSet.{withCaptureSetsExplained, IncludeFailure, ExistentialSubsumesFailure} import CCState.* import StdNames.nme import NameKinds.{DefaultGetterName, WildcardParamName, UniqueNameKind} import reporting.{trace, Message, OverrideError} +import Annotations.Annotation +import Capabilities.* /** The capture checker */ object CheckCaptures: @@ -58,7 +60,7 @@ object CheckCaptures: def isOutermost = outer0 == null /** If an environment is open it tracks free references */ - def isOpen = !captured.isAlwaysEmpty && kind != EnvKind.Boxed + def isOpen(using Context) = !captured.isAlwaysEmpty && kind != EnvKind.Boxed def outersIterator: Iterator[Env] = new: private var cur = Env.this @@ -76,7 +78,7 @@ object CheckCaptures: * maps parameters in contravariant capture sets to the empty set. */ final class SubstParamsMap(from: BindingType, to: List[Type])(using Context) - extends ApproximatingTypeMap, IdempotentCaptRefMap: + extends ApproximatingTypeMap: def apply(tp: Type): Type = tp match case tp: ParamRef => @@ -88,44 +90,9 @@ object CheckCaptures: tp case _ => mapOver(tp) + override def toString = "SubstParamsMap" end SubstParamsMap - /** Used for substituting parameters in a special case: when all actual arguments - * are mutually distinct capabilities. - */ - final class SubstParamsBiMap(from: LambdaType, to: List[Type])(using Context) - extends BiTypeMap: - thisMap => - - def apply(tp: Type): Type = tp match - case tp: ParamRef => - if tp.binder == from then to(tp.paramNum) else tp - case tp: NamedType => - if tp.prefix `eq` NoPrefix then tp - else tp.derivedSelect(apply(tp.prefix)) - case _: ThisType => - tp - case _ => - mapOver(tp) - - lazy val inverse = new BiTypeMap: - def apply(tp: Type): Type = tp match - case tp: NamedType => - var idx = 0 - var to1 = to - while idx < to.length && (tp ne to(idx)) do - idx += 1 - to1 = to1.tail - if idx < to.length then from.paramRefs(idx) - else if tp.prefix `eq` NoPrefix then tp - else tp.derivedSelect(apply(tp.prefix)) - case _: ThisType => - tp - case _ => - mapOver(tp) - def inverse = thisMap - end SubstParamsBiMap - /** A prototype that indicates selection with an immutable value */ class PathSelectionProto(val sym: Symbol, val pt: Type)(using Context) extends WildcardSelectionProto @@ -134,8 +101,8 @@ object CheckCaptures: */ def checkWellformed(parent: Tree, ann: Tree)(using Context): Unit = def check(elem: Tree, pos: SrcPos): Unit = elem.tpe match - case ref: CaptureRef => - if !ref.isTrackableRef then + case ref: Capability => + if !ref.isTrackableRef && !ref.isCapRef then report.error(em"$elem cannot be tracked since it is not a parameter or local value", pos) case tpe => report.error(em"$elem: $tpe is not a legal element of a capture set", pos) @@ -150,17 +117,22 @@ object CheckCaptures: |is must be a type parameter or abstract type with a caps.CapSet upper bound.""", elem.srcPos) case ReachCapabilityApply(arg) => check(arg, elem.srcPos) + case ReadOnlyCapabilityApply(arg) => check(arg, elem.srcPos) case _ => check(elem, elem.srcPos) /** Under the sealed policy, report an error if some part of `tp` contains the * root capability in its capture set or if it refers to a type parameter that * could possibly be instantiated with cap in a way that's visible at the type. */ - private def disallowRootCapabilitiesIn(tp: Type, carrier: Symbol, what: String, have: String, addendum: String, pos: SrcPos)(using Context) = + private def disallowRootCapabilitiesIn(tp: Type, upto: Symbol, what: String, have: String, addendum: String, pos: SrcPos)(using Context) = val check = new TypeTraverser: private val seen = new EqHashSet[TypeRef] + // We keep track of open existential scopes here so that we can set these scopes + // in ccState when printing a part of the offending type. + var openExistentialScopes: List[MethodType] = Nil + def traverse(t: Type) = t.dealiasKeepAnnots match case t: TypeRef => @@ -178,54 +150,40 @@ object CheckCaptures: () case CapturingType(parent, refs) => if variance >= 0 then - refs.disallowRootCapability: () => - def part = if t eq tp then "" else i"the part $t of " + val openScopes = openExistentialScopes + refs.disallowRootCapability(upto): () => + def part = + if t eq tp then "" + else + // Show in context of all enclosing traversed existential scopes. + def showInOpenedFreshBinders(mts: List[MethodType]): String = mts match + case Nil => i"the part $t of " + case mt :: mts1 => + inNewExistentialScope(mt): + showInOpenedFreshBinders(mts1) + showInOpenedFreshBinders(openScopes.reverse) report.error( em"""$what cannot $have $tp since |${part}that type captures the root capability `cap`.$addendum""", pos) traverse(parent) + case defn.RefinedFunctionOf(mt) => + traverse(mt) + case t: MethodType if t.marksExistentialScope => + atVariance(-variance): + t.paramInfos.foreach(traverse) + val saved = openExistentialScopes + openExistentialScopes = t :: openExistentialScopes + try traverse(t.resType) + finally openExistentialScopes = saved case t => traverseChildren(t) - if ccConfig.useSealed then check.traverse(tp) + check.traverse(tp) end disallowRootCapabilitiesIn - /** If we are not under the sealed policy, and a tree is an application that unboxes - * its result or is a try, check that the tree's type does not have covariant universal - * capabilities. - */ - private def checkNotUniversalInUnboxedResult(tpe: Type, tree: Tree)(using Context): Unit = - def needsUniversalCheck = tree match - case _: RefTree | _: Apply | _: TypeApply => tree.symbol.unboxesResult - case _: Try => true - case _ => false - - object checkNotUniversal extends TypeTraverser: - def traverse(tp: Type) = - tp.dealias match - case wtp @ CapturingType(parent, refs) => - if variance > 0 then - refs.disallowRootCapability: () => - def part = if wtp eq tpe.widen then "" else i" in its part $wtp" - report.error( - em"""The expression's type ${tpe.widen} is not allowed to capture the root capability `cap`$part. - |This usually means that a capability persists longer than its allowed lifetime.""", - tree.srcPos) - if !wtp.isBoxed then traverse(parent) - case tp => - traverseChildren(tp) - - if !ccConfig.useSealed - && !tpe.hasAnnotation(defn.UncheckedCapturesAnnot) - && needsUniversalCheck - && tpe.widen.isValueType - then - checkNotUniversal.traverse(tpe.widen) - end checkNotUniversalInUnboxedResult - trait CheckerAPI: /** Complete symbol info of a val or a def */ - def completeDef(tree: ValOrDefDef, sym: Symbol)(using Context): Type + def completeDef(tree: ValOrDefDef, sym: Symbol, completer: LazyType)(using Context): Type extension [T <: Tree](tree: T) @@ -237,6 +195,21 @@ object CheckCaptures: /** Was a new type installed for this tree? */ def hasNuType: Boolean + + /** Is this tree passed to a parameter or assigned to a value with a type + * that contains cap in no-flip covariant position, which will necessite + * a separation check? + */ + def needsSepCheck: Boolean + + /** If a tree is an argument for which needsSepCheck is true, + * the type of the formal paremeter corresponding to the argument. + */ + def formalType: Type + + /** The "use set", i.e. the capture set marked as free at this node. */ + def markedFree: CaptureSet + end CheckerAPI class CheckCaptures extends Recheck, SymTransformer: @@ -261,6 +234,8 @@ class CheckCaptures extends Recheck, SymTransformer: class CaptureChecker(ictx: Context) extends Rechecker(ictx), CheckerAPI: + // println(i"checking ${ictx.source}"(using ictx)) + /** The current environment */ private val rootEnv: Env = inContext(ictx): Env(defn.RootClass, EnvKind.Regular, CaptureSet.empty, null) @@ -277,61 +252,148 @@ class CheckCaptures extends Recheck, SymTransformer: */ private val todoAtPostCheck = new mutable.ListBuffer[() => Unit] + /** Maps trees that need a separation check because they are arguments to + * polymorphic parameters. The trees are mapped to the formal parameter type. + */ + private val sepCheckFormals = util.EqHashMap[Tree, Type]() + + /** The references used at identifier or application trees */ + private val usedSet = util.EqHashMap[Tree, CaptureSet]() + + /** The set of symbols that were rechecked via a completer */ + private val completed = new mutable.HashSet[Symbol] + + var needAnotherRun = false + + def resetIteration()(using Context): Unit = + needAnotherRun = false + resetNuTypes() + todoAtPostCheck.clear() + completed.clear() + + extension [T <: Tree](tree: T) + def needsSepCheck: Boolean = sepCheckFormals.contains(tree) + def formalType: Type = sepCheckFormals.getOrElse(tree, NoType) + def markedFree = usedSet.getOrElse(tree, CaptureSet.empty) + /** Instantiate capture set variables appearing contra-variantly to their * upper approximation. */ - private def interpolator(startingVariance: Int = 1)(using Context) = new TypeTraverser: - variance = startingVariance - override def traverse(t: Type) = t match - case t @ CapturingType(parent, refs) => - refs match - case refs: CaptureSet.Var if variance < 0 => refs.solve() + private def interpolate(tp: Type, sym: Symbol, startingVariance: Int = 1)(using Context): Unit = + + object variances extends TypeTraverser: + variance = startingVariance + val varianceOfVar = EqHashMap[CaptureSet.Var, Int]() + override def traverse(t: Type) = t match + case t @ CapturingType(parent, refs) => + refs match + case refs: CaptureSet.Var if !refs.isConst => + varianceOfVar(refs) = varianceOfVar.get(refs) match + case Some(v0) => if v0 == 0 then 0 else (v0 + variance) / 2 + case None => variance + case _ => + traverse(parent) + case t @ defn.RefinedFunctionOf(rinfo) => + traverse(rinfo) + case _ => + traverseChildren(t) + + val interpolator = new TypeTraverser: + override def traverse(t: Type) = t match + case t @ CapturingType(parent, refs) => + refs match + case refs: CaptureSet.Var if !refs.isConst => + if variances.varianceOfVar(refs) < 0 then refs.solve() + else refs.markSolved(provisional = !sym.isMutableVar) + case _ => + traverse(parent) + case t @ defn.RefinedFunctionOf(rinfo) => + traverse(rinfo) + case _ => + traverseChildren(t) + + variances.traverse(tp) + interpolator.traverse(tp) + end interpolate + + /* Also set any previously unset owners of toplevel Fresh instances to improve + * error diagnostics in separation checking. + */ + private def anchorCaps(sym: Symbol)(using Context) = new TypeTraverser: + override def traverse(t: Type) = + if variance > 0 then + t match + case t @ CapturingType(parent, refs) => + for ref <- refs.elems do + ref match + case ref: FreshCap if !ref.hiddenSet.givenOwner.exists => + ref.hiddenSet.givenOwner = sym + case _ => + traverse(parent) + case t @ defn.RefinedFunctionOf(rinfo) => + traverse(rinfo) case _ => - traverse(parent) - case t @ defn.RefinedFunctionOf(rinfo) => - traverse(rinfo) - case _ => - traverseChildren(t) + traverseChildren(t) /** If `tpt` is an inferred type, interpolate capture set variables appearing contra- - * variantly in it. + * variantly in it. Also anchor Fresh instances with anchorCaps. */ - private def interpolateVarsIn(tpt: Tree)(using Context): Unit = + private def interpolateIfInferred(tpt: Tree, sym: Symbol)(using Context): Unit = if tpt.isInstanceOf[InferredTypeTree] then - interpolator().traverse(tpt.nuType) - .showing(i"solved vars in ${tpt.nuType}", capt) - for msg <- ccState.approxWarnings do - report.warning(msg, tpt.srcPos) - ccState.approxWarnings.clear() + interpolate(tpt.nuType, sym) + .showing(i"solved vars for $sym in ${tpt.nuType}", capt) + anchorCaps(sym).traverse(tpt.nuType) + for msg <- ccState.approxWarnings do + report.warning(msg, tpt.srcPos) + ccState.approxWarnings.clear() /** Assert subcapturing `cs1 <: cs2` (available for debugging, otherwise unused) */ def assertSub(cs1: CaptureSet, cs2: CaptureSet)(using Context) = - assert(cs1.subCaptures(cs2, frozen = false).isOK, i"$cs1 is not a subset of $cs2") + assert(cs1.subCaptures(cs2), i"$cs1 is not a subset of $cs2") /** If `res` is not CompareResult.OK, report an error */ - def checkOK(res: CompareResult, prefix: => String, pos: SrcPos, provenance: => String = "")(using Context): Unit = - if !res.isOK then - def toAdd: String = CaptureSet.levelErrors.toAdd.mkString - def descr: String = - val d = res.blocking.description - if d.isEmpty then provenance else "" - report.error(em"$prefix included in the allowed capture set ${res.blocking}$descr$toAdd", pos) + def checkOK(res: TypeComparer.CompareResult, prefix: => String, added: Capability | CaptureSet, target: CaptureSet, pos: SrcPos, provenance: => String = "")(using Context): Unit = + res match + case TypeComparer.CompareResult.Fail(notes) => + val ((res: IncludeFailure) :: Nil, otherNotes) = + notes.partition(_.isInstanceOf[IncludeFailure]): @unchecked + def msg(provisional: Boolean) = + def toAdd: String = errorNotes(otherNotes).toAdd.mkString + def descr: String = + val d = res.cs.description + if d.isEmpty then provenance else "" + def kind = if provisional then "previously estimated\n" else "allowed " + em"$prefix included in the ${kind}capture set ${res.cs}$descr$toAdd" + target match + case target: CaptureSet.Var + if res.cs.isProvisionallySolved => + report.warning( + msg(provisional = true) + .prepend(i"Another capture checking run needs to be scheduled because\n"), + pos) + needAnotherRun = true + added match + case added: Capability => target.elems += added + case added: CaptureSet => target.elems ++= added.elems + case _ => + report.error(msg(provisional = false), pos) + case _ => /** Check subcapturing `{elem} <: cs`, report error on failure */ - def checkElem(elem: CaptureRef, cs: CaptureSet, pos: SrcPos, provenance: => String = "")(using Context) = + def checkElem(elem: Capability, cs: CaptureSet, pos: SrcPos, provenance: => String = "")(using Context) = checkOK( - elem.singletonCaptureSet.subCaptures(cs, frozen = false), + TypeComparer.compareResult(elem.singletonCaptureSet.subCaptures(cs)), i"$elem cannot be referenced here; it is not", - pos, provenance) + elem, cs, pos, provenance) /** Check subcapturing `cs1 <: cs2`, report error on failure */ def checkSubset(cs1: CaptureSet, cs2: CaptureSet, pos: SrcPos, provenance: => String = "", cs1description: String = "")(using Context) = checkOK( - cs1.subCaptures(cs2, frozen = false), - if cs1.elems.size == 1 then i"reference ${cs1.elems.toList.head}$cs1description is not" + TypeComparer.compareResult(cs1.subCaptures(cs2)), + if cs1.elems.size == 1 then i"reference ${cs1.elems.nth(0)}$cs1description is not" else i"references $cs1$cs1description are not all", - pos, provenance) + cs1, cs2, pos, provenance) /** If `sym` is a class or method nested inside a term, a capture set variable representing * the captured variables of the environment associated with `sym`. @@ -339,7 +401,7 @@ class CheckCaptures extends Recheck, SymTransformer: def capturedVars(sym: Symbol)(using Context): CaptureSet = myCapturedVars.getOrElseUpdate(sym, if sym.ownersIterator.exists(_.isTerm) - then CaptureSet.Var(sym.owner, level = sym.ccLevel) + then CaptureSet.Var(sym.owner, level = ccState.symLevel(sym)) else CaptureSet.empty) // ---- Record Uses with MarkFree ---------------------------------------------------- @@ -367,7 +429,7 @@ class CheckCaptures extends Recheck, SymTransformer: i"\nof the enclosing ${owner.showLocated}" /** Does the given environment belong to a method that is (a) nested in a term - * and (b) not the method of an anonympus function? + * and (b) not the method of an anonymous function? */ def isOfNestedMethod(env: Env | Null)(using Context) = env != null @@ -378,17 +440,17 @@ class CheckCaptures extends Recheck, SymTransformer: /** Include `sym` in the capture sets of all enclosing environments nested in the * the environment in which `sym` is defined. */ - def markFree(sym: Symbol, pos: SrcPos)(using Context): Unit = - markFree(sym, sym.termRef, pos) + def markFree(sym: Symbol, tree: Tree)(using Context): Unit = + markFree(sym, sym.termRef, tree) - def markFree(sym: Symbol, ref: TermRef, pos: SrcPos)(using Context): Unit = - if sym.exists && ref.isTracked then markFree(ref.captureSet, pos) + def markFree(sym: Symbol, ref: Capability, tree: Tree)(using Context): Unit = + if sym.exists && ref.isTracked then markFree(ref.singletonCaptureSet, tree) /** Make sure the (projected) `cs` is a subset of the capture sets of all enclosing * environments. At each stage, only include references from `cs` that are outside * the environment's owner */ - def markFree(cs: CaptureSet, pos: SrcPos)(using Context): Unit = + def markFree(cs: CaptureSet, tree: Tree)(using Context): Unit = // A captured reference with the symbol `sym` is visible from the environment // if `sym` is not defined inside the owner of the environment. inline def isVisibleFromEnv(sym: Symbol, env: Env) = @@ -399,10 +461,10 @@ class CheckCaptures extends Recheck, SymTransformer: !sym.isContainedIn(env.owner) } - /** If captureRef `c` refers to a parameter that is not @use declared, report an error. + /** If capability `c` refers to a parameter that is not @use declared, report an error. * Exception under deferredReaches: If use comes from a nested closure, accept it. */ - def checkUseDeclared(c: CaptureRef, env: Env, lastEnv: Env | Null) = + def checkUseDeclared(c: Capability, env: Env, lastEnv: Env | Null) = if lastEnv != null && env.nestedClosure.exists && env.nestedClosure == lastEnv.owner then assert(ccConfig.deferredReaches) // access is from a nested closure under deferredReaches, so it's OK else c.pathRoot match @@ -410,34 +472,35 @@ class CheckCaptures extends Recheck, SymTransformer: val what = if ref.isType then "Capture set parameter" else "Local reach capability" report.error( em"""$what $c leaks into capture scope of ${env.ownerString}. - |To allow this, the ${ref.symbol} should be declared with a @use annotation""", pos) + |To allow this, the ${ref.symbol} should be declared with a @use annotation""", tree.srcPos) case _ => /** Avoid locally defined capability by charging the underlying type * (which may not be cap). This scheme applies only under the deferredReaches setting. */ - def avoidLocalCapability(c: CaptureRef, env: Env, lastEnv: Env | Null): Unit = + def avoidLocalCapability(c: Capability, env: Env, lastEnv: Env | Null): Unit = if c.isParamPath then c match - case ReachCapability(_) | _: TypeRef => + case Reach(_) | _: TypeRef => checkUseDeclared(c, env, lastEnv) case _ => else val underlying = c match - case ReachCapability(c1) => - CaptureSet.ofTypeDeeply(c1.widen) - case _ => - CaptureSet.ofType(c.widen, followResult = false) - capt.println(i"Widen reach $c to $underlying in ${env.owner}") - underlying.disallowRootCapability: () => - report.error(em"Local capability $c in ${env.ownerString} cannot have `cap` as underlying capture set", pos) + case Reach(c1) => CaptureSet.ofTypeDeeply(c1.widen) + case _ => c.core match + case c1: RootCapability => c1.singletonCaptureSet + case c1: CoreCapability => + CaptureSet.ofType(c1.widen, followResult = false) + capt.println(i"Widen reach $c to $underlying in ${env.owner}") + underlying.disallowRootCapability(NoSymbol): () => + report.error(em"Local capability $c in ${env.ownerString} cannot have `cap` as underlying capture set", tree.srcPos) recur(underlying, env, lastEnv) /** Avoid locally defined capability if it is a reach capability or capture set * parameter. This is the default. */ - def avoidLocalReachCapability(c: CaptureRef, env: Env): Unit = c match - case ReachCapability(c1) => + def avoidLocalReachCapability(c: Capability, env: Env): Unit = c match + case Reach(c1) => if c1.isParamPath then checkUseDeclared(c, env, null) else @@ -452,9 +515,15 @@ class CheckCaptures extends Recheck, SymTransformer: // The path-use.scala neg test contains an example. val underlying = CaptureSet.ofTypeDeeply(c1.widen) capt.println(i"Widen reach $c to $underlying in ${env.owner}") - underlying.disallowRootCapability: () => - report.error(em"Local reach capability $c leaks into capture scope of ${env.ownerString}", pos) - recur(underlying, env, null) + if ccConfig.useSepChecks then + recur(underlying.filter(!_.isTerminalCapability), env, null) + // we don't want to disallow underlying Fresh instances, since these are typically locally created + // fresh capabilities. We don't need to also follow the hidden set since separation + // checking makes ure that locally hidden references need to go to @consume parameters. + else + underlying.disallowRootCapability(ctx.owner): () => + report.error(em"Local reach capability $c leaks into capture scope of ${env.ownerString}", tree.srcPos) + recur(underlying, env, null) case c: TypeRef if c.isParamPath => checkUseDeclared(c, env, null) case _ => @@ -470,7 +539,7 @@ class CheckCaptures extends Recheck, SymTransformer: then avoidLocalCapability(c, env, lastEnv) else avoidLocalReachCapability(c, env) isVisible - checkSubset(included, env.captured, pos, provenance(env)) + checkSubset(included, env.captured, tree.srcPos, provenance(env)) capt.println(i"Include call or box capture $included from $cs in ${env.owner} --> ${env.captured}") if !isOfNestedMethod(env) then recur(included, nextEnvToCharge(env, !_.owner.isStaticOwner), env) @@ -478,13 +547,28 @@ class CheckCaptures extends Recheck, SymTransformer: // will be charged when that method is called. recur(cs, curEnv, null) + usedSet(tree) = tree.markedFree ++ cs end markFree /** Include references captured by the called method in the current environment stack */ - def includeCallCaptures(sym: Symbol, resType: Type, pos: SrcPos)(using Context): Unit = resType match + def includeCallCaptures(sym: Symbol, resType: Type, tree: Tree)(using Context): Unit = resType match case _: MethodOrPoly => // wait until method is fully applied case _ => - if sym.exists && curEnv.isOpen then markFree(capturedVars(sym), pos) + def isRetained(ref: Capability): Boolean = ref.pathRoot match + case root: ThisType => ctx.owner.isContainedIn(root.cls) + case _ => true + if sym.exists && curEnv.isOpen then + markFree(capturedVars(sym).filter(isRetained), tree) + + /** If `tp` (possibly after widening singletons) is an ExprType + * of a parameterless method, map Result instances in it to Fresh instances + */ + def mapResultRoots(tp: Type, sym: Symbol)(using Context): Type = + tp.widenSingleton match + case tp: ExprType if sym.is(Method) => + resultToFresh(tp, Origin.ResultInstance(tp, sym)) + case _ => + tp /** Under the sealed policy, disallow the root capability in type arguments. * Type arguments come either from a TypeApply node or from an AppliedType @@ -497,7 +581,7 @@ class CheckCaptures extends Recheck, SymTransformer: */ def disallowCapInTypeArgs(fn: Tree, sym: Symbol, args: List[Tree])(using Context): Unit = def isExempt = sym.isTypeTestOrCast || sym == defn.Compiletime_erasedValue - if ccConfig.useSealed && !isExempt then + if !isExempt then val paramNames = atPhase(thisPhase.prev): fn.tpe.widenDealias match case tl: TypeLambda => tl.paramNames @@ -508,44 +592,57 @@ class CheckCaptures extends Recheck, SymTransformer: for case (arg: TypeTree, pname) <- args.lazyZip(paramNames) do def where = if sym.exists then i" in an argument of $sym" else "" - val (addendum, pos) = + val (addendum, errTree) = if arg.isInferred - then ("\nThis is often caused by a local capability$where\nleaking as part of its result.", fn.srcPos) - else if arg.span.exists then ("", arg.srcPos) - else ("", fn.srcPos) + then (i"\nThis is often caused by a local capability$where\nleaking as part of its result.", fn) + else if arg.span.exists then ("", arg) + else ("", fn) disallowRootCapabilitiesIn(arg.nuType, NoSymbol, - i"Type variable $pname of $sym", "be instantiated to", addendum, pos) + i"Type variable $pname of $sym", "be instantiated to", addendum, errTree.srcPos) val param = fn.symbol.paramNamed(pname) - if param.isUseParam then markFree(arg.nuType.deepCaptureSet, pos) + if param.isUseParam then markFree(arg.nuType.deepCaptureSet, errTree) end disallowCapInTypeArgs + /** Rechecking idents involves: + * - adding call captures for idents referring to methods + * - marking as free the identifier with any selections or .rd + * modifiers implied by the expected type + */ override def recheckIdent(tree: Ident, pt: Type)(using Context): Type = val sym = tree.symbol if sym.is(Method) then // If ident refers to a parameterless method, charge its cv to the environment - includeCallCaptures(sym, sym.info, tree.srcPos) + includeCallCaptures(sym, sym.info, tree) else if !sym.isStatic then - // Otherwise charge its symbol, but add all selections implied by the e - // expected type `pt`. - // Example: If we have `x` and the expected type says we select that with `.a.b`, - // we charge `x.a.b` instead of `x`. - def addSelects(ref: TermRef, pt: Type): TermRef = pt match + // Otherwise charge its symbol, but add all selections and also any `.rd` + // modifier implied by the expected type `pt`. + // Example: If we have `x` and the expected type says we select that with `.a.b` + // where `b` is a read-only method, we charge `x.a.b.rd` instead of `x`. + def addSelects(ref: TermRef, pt: Type): Capability = pt match case pt: PathSelectionProto if ref.isTracked => - // if `ref` is not tracked then the selection could not give anything new - // class SerializationProxy in stdlib-cc/../LazyListIterable.scala has an example where this matters. - addSelects(ref.select(pt.sym).asInstanceOf[TermRef], pt.pt) + if pt.sym.isReadOnlyMethod then + ref.readOnly + else + // if `ref` is not tracked then the selection could not give anything new + // class SerializationProxy in stdlib-cc/../LazyListIterable.scala has an example where this matters. + addSelects(ref.select(pt.sym).asInstanceOf[TermRef], pt.pt) case _ => ref - val pathRef = addSelects(sym.termRef, pt) - markFree(sym, pathRef, tree.srcPos) - super.recheckIdent(tree, pt) + var pathRef: Capability = addSelects(sym.termRef, pt) + if pathRef.derivesFromMutable && pt.isValueType && !pt.isMutableType then + pathRef = pathRef.readOnly + markFree(sym, pathRef, tree) + mapResultRoots(super.recheckIdent(tree, pt), tree.symbol) /** The expected type for the qualifier of a selection. If the selection - * could be part of a capabaility path, we return a PathSelectionProto. + * could be part of a capability path or is a a read-only method, we return + * a PathSelectionProto. */ override def selectionProto(tree: Select, pt: Type)(using Context): Type = val sym = tree.symbol - if !sym.isOneOf(UnstableValueFlags) && !sym.isStatic then PathSelectionProto(sym, pt) + if !sym.isOneOf(UnstableValueFlags) && !sym.isStatic + || sym.isReadOnlyMethod + then PathSelectionProto(sym, pt) else super.selectionProto(tree, pt) /** A specialized implementation of the selection rule. @@ -573,14 +670,23 @@ class CheckCaptures extends Recheck, SymTransformer: } case _ => denot - val selType = recheckSelection(tree, qualType, name, disambiguate) + // Don't allow update methods to be called unless the qualifier captures + // an exclusive reference. TODO This should probably rolled into + // qualifier logic once we have it. + if tree.symbol.isUpdateMethod && !qualType.captureSet.isExclusive then + report.error( + em"""cannot call update ${tree.symbol} from $qualType, + |since its capture set ${qualType.captureSet} is read-only""", + tree.srcPos) + + val selType = mapResultRoots(recheckSelection(tree, qualType, name, disambiguate), tree.symbol) val selWiden = selType.widen // Don't apply the rule // - on the LHS of assignments, or // - if the qualifier or selection type is boxed, or - // - the selection is either a trackable capture ref or a pure type - if pt == LhsProto + // - the selection is either a trackable capture reference or a pure type + if noWiden(selType, pt) || qualType.isBoxedCapturing || selWiden.isBoxedCapturing || selType.isTrackableRef @@ -602,15 +708,17 @@ class CheckCaptures extends Recheck, SymTransformer: selType }//.showing(i"recheck sel $tree, $qualType = $result") - /** Hook for massaging a function before it is applied. Copies all @use annotations - * on method parameter symbols to the corresponding paramInfo types. + /** Hook for massaging a function before it is applied. Copies all @use and @consume + * annotations on method parameter symbols to the corresponding paramInfo types. */ override def prepareFunction(funtpe: MethodType, meth: Symbol)(using Context): MethodType = - val paramInfosWithUses = funtpe.paramInfos.zipWithConserve(funtpe.paramNames): (formal, pname) => - val param = meth.paramNamed(pname) - param.getAnnotation(defn.UseAnnot) match - case Some(ann) => AnnotatedType(formal, ann) - case _ => formal + val paramInfosWithUses = + funtpe.paramInfos.zipWithConserve(funtpe.paramNames): (formal, pname) => + val param = meth.paramNamed(pname) + def copyAnnot(tp: Type, cls: ClassSymbol) = param.getAnnotation(cls) match + case Some(ann) => AnnotatedType(tp, ann) + case _ => tp + copyAnnot(copyAnnot(formal, defn.UseAnnot), defn.ConsumeAnnot) funtpe.derivedLambdaType(paramInfos = paramInfosWithUses) /** Recheck applications, with special handling of unsafeAssumePure. @@ -620,28 +728,31 @@ class CheckCaptures extends Recheck, SymTransformer: val meth = tree.fun.symbol if meth == defn.Caps_unsafeAssumePure then val arg :: Nil = tree.args: @unchecked - val argType0 = recheck(arg, pt.capturing(CaptureSet.universal)) + val argType0 = recheck(arg, pt.stripCapturing.capturing(FreshCap(Origin.UnsafeAssumePure))) val argType = if argType0.captureSet.isAlwaysEmpty then argType0 else argType0.widen.stripCapturing - capt.println(i"rechecking $arg with $pt: $argType") + capt.println(i"rechecking unsafeAssumePure of $arg with $pt: $argType") super.recheckFinish(argType, tree, pt) else val res = super.recheckApply(tree, pt) - includeCallCaptures(meth, res, tree.srcPos) + includeCallCaptures(meth, res, tree) res - /** Recheck argument, and, if formal parameter carries a `@use`, + /** Recheck argument against a "freshened" version of `formal` where toplevel `cap` + * occurrences are replaced by `Fresh` instances. Also, if formal parameter carries a `@use`, * charge the deep capture set of the actual argument to the environment. */ - protected override def recheckArg(arg: Tree, formal: Type)(using Context): Type = - val argType = recheck(arg, formal) - formal match - case AnnotatedType(formal1, ann) if ann.symbol == defn.UseAnnot => - // The UseAnnot is added to `formal` by `prepareFunction` - capt.println(i"charging deep capture set of $arg: ${argType} = ${argType.deepCaptureSet}") - markFree(argType.deepCaptureSet, arg.srcPos) - case _ => + protected override def recheckArg(arg: Tree, formal: Type, pref: ParamRef, app: Apply)(using Context): Type = + val freshenedFormal = capToFresh(formal, Origin.Formal(pref, app)) + val argType = recheck(arg, freshenedFormal) + .showing(i"recheck arg $arg vs $freshenedFormal = $result", capt) + if formal.hasAnnotation(defn.UseAnnot) || formal.hasAnnotation(defn.ConsumeAnnot) then + // The @use and/or @consume annotation is added to `formal` by `prepareFunction` + capt.println(i"charging deep capture set of $arg: ${argType} = ${argType.deepCaptureSet}") + markFree(argType.deepCaptureSet, arg) + if formal.containsCap then + sepCheckFormals(arg) = freshenedFormal argType /** Map existential captures in result to `cap` and implement the following @@ -667,13 +778,13 @@ class CheckCaptures extends Recheck, SymTransformer: */ protected override def recheckApplication(tree: Apply, qualType: Type, funType: MethodType, argTypes: List[Type])(using Context): Type = - val appType = Existential.toCap(super.recheckApplication(tree, qualType, funType, argTypes)) + val appType = resultToFresh( + super.recheckApplication(tree, qualType, funType, argTypes), + Origin.ResultInstance(funType, tree.symbol)) val qualCaptures = qualType.captureSet val argCaptures = for (argType, formal) <- argTypes.lazyZip(funType.paramInfos) yield - formal match - case AnnotatedType(_, ann) if ann.symbol == defn.UseAnnot => argType.deepCaptureSet - case _ => argType.captureSet + if formal.hasAnnotation(defn.UseAnnot) then argType.deepCaptureSet else argType.captureSet appType match case appType @ CapturingType(appType1, refs) if qualType.exists @@ -694,19 +805,11 @@ class CheckCaptures extends Recheck, SymTransformer: * This means * - Instantiate result type with actual arguments * - if `sym` is a constructor, refine its type with `refineInstanceType` - * If all argument types are mutually different trackable capture references, use a BiTypeMap, - * since that is more precise. Otherwise use a normal idempotent map, which might lose information - * in the case where the result type contains captureset variables that are further - * constrained afterwards. */ override def instantiate(mt: MethodType, argTypes: List[Type], sym: Symbol)(using Context): Type = val ownType = - if !mt.isResultDependent then - mt.resType - else if argTypes.forall(_.isTrackableRef) && isDistinct(argTypes) then - SubstParamsBiMap(mt, argTypes)(mt.resType) - else - SubstParamsMap(mt, argTypes)(mt.resType) + if !mt.isResultDependent then mt.resType + else SubstParamsMap(mt, argTypes)(mt.resType) if sym.isConstructor then refineConstructorInstance(ownType, mt, argTypes, sym) else ownType @@ -722,20 +825,29 @@ class CheckCaptures extends Recheck, SymTransformer: /** First half of result pair: * Refine the type of a constructor call `new C(t_1, ..., t_n)` - * to C{val x_1: T_1, ..., x_m: T_m} where x_1, ..., x_m are the tracked - * parameters of C and T_1, ..., T_m are the types of the corresponding arguments. + * to C{val x_1: @refineOverride T_1, ..., x_m: @refineOverride T_m} + * where x_1, ..., x_m are the tracked parameters of C and + * T_1, ..., T_m are the types of the corresponding arguments. The @refineOveride + * annotations avoid problematic intersections of capture sets when those + * parameters are selected. * * Second half: union of initial capture set and all capture sets of arguments - * to tracked parameters. + * to tracked parameters. The initial capture set `initCs` is augmented with + * - FreshCap(...) if `core` extends Mutable + * - FreshCap(...).rd if `core` extends Capability */ def addParamArgRefinements(core: Type, initCs: CaptureSet): (Type, CaptureSet) = var refined: Type = core var allCaptures: CaptureSet = - if core.derivesFromCapability then defn.universalCSImpliedByCapability else initCs + if core.derivesFromMutable then + initCs ++ FreshCap(Origin.NewMutable(core)).singletonCaptureSet + else if core.derivesFromCapability then + initCs ++ FreshCap(Origin.NewCapability(core)).readOnly.singletonCaptureSet + else initCs for (getterName, argType) <- mt.paramNames.lazyZip(argTypes) do val getter = cls.info.member(getterName).suchThat(_.isRefiningParamAccessor).symbol if !getter.is(Private) && getter.hasTrackedParts then - refined = RefinedType(refined, getterName, argType.unboxed) // Yichen you might want to check this + refined = refined.refinedOverride(getterName, argType.unboxed) // Yichen you might want to check this allCaptures ++= argType.captureSet (refined, allCaptures) @@ -769,9 +881,11 @@ class CheckCaptures extends Recheck, SymTransformer: val meth = tree.fun match case fun @ Select(qual, nme.apply) => qual.symbol.orElse(fun.symbol) case fun => fun.symbol + def methDescr = if meth.exists then i"$meth's type " else "" disallowCapInTypeArgs(tree.fun, meth, tree.args) - val res = Existential.toCap(super.recheckTypeApply(tree, pt)) - includeCallCaptures(tree.symbol, res, tree.srcPos) + val funType = super.recheckTypeApply(tree, pt) + val res = resultToFresh(funType, Origin.ResultInstance(funType, meth)) + includeCallCaptures(tree.symbol, res, tree) checkContains(tree) res end recheckTypeApply @@ -785,15 +899,12 @@ class CheckCaptures extends Recheck, SymTransformer: val ref = refArg.nuType capt.println(i"check contains $cs , $ref") ref match - case ref: CaptureRef if ref.isTracked => + case ref: Capability if ref.isTracked => checkElem(ref, cs, tree.srcPos) case _ => report.error(em"$refArg is not a tracked capability", refArg.srcPos) case _ => - override def recheckBlock(tree: Block, pt: Type)(using Context): Type = - inNestedLevel(super.recheckBlock(tree, pt)) - /** Recheck Closure node: add the captured vars of the anonymoys function * to the result type. See also `recheckClosureBlock` which rechecks the * block containing the anonymous function and the Closure node. @@ -808,28 +919,58 @@ class CheckCaptures extends Recheck, SymTransformer: * { def $anonfun(...) = ...; closure($anonfun, ...)} */ override def recheckClosureBlock(mdef: DefDef, expr: Closure, pt: Type)(using Context): Type = + + def matchParams(paramss: List[ParamClause], pt: Type): Unit = + //println(i"match $mdef against $pt") + paramss match + case params :: paramss1 => pt match + case defn.PolyFunctionOf(poly: PolyType) => + assert(params.hasSameLengthAs(poly.paramInfos)) + matchParams(paramss1, poly.instantiate(params.map(_.symbol.typeRef))) + case FunctionOrMethod(argTypes, resType) => + assert(params.hasSameLengthAs(argTypes), i"$mdef vs $pt, ${params}") + for (argType, param) <- argTypes.lazyZip(params) do + val paramTpt = param.asInstanceOf[ValDef].tpt + val paramType = freshToCap(paramTpt.nuType) + checkConformsExpr(argType, paramType, param) + .showing(i"compared expected closure formal $argType against $param with ${paramTpt.nuType}", capt) + if ccConfig.preTypeClosureResults && !(isEtaExpansion(mdef) && ccConfig.handleEtaExpansionsSpecially) then + // Check whether the closure's result conforms to the expected type + // This constrains parameter types of the closure which can give better + // error messages. + // But if the closure is an eta expanded method reference it's better to not constrain + // its internals early since that would give error messages in generated code + // which are less intelligible. An example is the line `a = x` in + // neg-custom-args/captures/vars.scala. That's why this code is conditioned. + // to apply only to closures that are not eta expansions. + assert(paramss1.isEmpty) + val respt0 = pt match + case defn.RefinedFunctionOf(rinfo) => + val paramTypes = params.map(_.asInstanceOf[ValDef].tpt.nuType) + rinfo.instantiate(paramTypes) + case _ => + resType + val respt = resultToFresh(respt0, Origin.LambdaExpected(respt0)) + val res = resultToFresh(mdef.tpt.nuType, Origin.LambdaActual(mdef.tpt.nuType)) + // We need to open existentials here in order not to get vars mixed up in them + // We do the proper check with existentials when we are finished with the closure block. + capt.println(i"pre-check closure $expr of type $res against $respt") + checkConformsExpr(res, respt, expr) + case _ => + case Nil => + openClosures = (mdef.symbol, pt) :: openClosures + // openClosures is needed for errors but currently makes no difference + // TODO follow up on this try - // Constrain closure's parameters and result from the expected type before - // rechecking the body. - val res = recheckClosure(expr, pt, forceDependent = true) - if !(isEtaExpansion(mdef) && ccConfig.handleEtaExpansionsSpecially) then - // Check whether the closure's results conforms to the expected type - // This constrains parameter types of the closure which can give better - // error messages. - // But if the closure is an eta expanded method reference it's better to not constrain - // its internals early since that would give error messages in generated code - // which are less intelligible. An example is the line `a = x` in - // neg-custom-args/captures/vars.scala. That's why this code is conditioned. - // to apply only to closures that are not eta expansions. - val res1 = Existential.toCapDeeply(res) - val pt1 = Existential.toCapDeeply(pt) - // We need to open existentials here in order not to get vars mixed up in them - // We do the proper check with existentials when we are finished with the closure block. - capt.println(i"pre-check closure $expr of type $res1 against $pt1") - checkConformsExpr(res1, pt1, expr) - recheckDef(mdef, mdef.symbol) - res + matchParams(mdef.paramss, pt) + capt.println(i"recheck closure block $mdef: ${mdef.symbol.infoOrCompleter}") + if !mdef.symbol.isCompleted then + mdef.symbol.ensureCompleted() // this will recheck def + else + recheckDef(mdef, mdef.symbol) + + recheckClosure(expr, pt, forceDependent = true) finally openClosures = openClosures.tail end recheckClosureBlock @@ -851,7 +992,7 @@ class CheckCaptures extends Recheck, SymTransformer: if sym.is(Module) then sym.info // Modules are checked by checking the module class else if sym.is(Mutable) && !sym.hasAnnotation(defn.UncheckedCapturesAnnot) then - val (carrier, addendum) = capturedBy.get(sym) match + val addendum = capturedBy.get(sym) match case Some(encl) => val enclStr = if encl.isAnonymousFunction then @@ -860,11 +1001,11 @@ class CheckCaptures extends Recheck, SymTransformer: case _ => "" s"an anonymous function$location" else encl.show - (NoSymbol, i"\n\nNote that $sym does not count as local since it is captured by $enclStr") + i"\n\nNote that $sym does not count as local since it is captured by $enclStr" case _ => - (sym, "") + "" disallowRootCapabilitiesIn( - tree.tpt.nuType, carrier, i"Mutable $sym", "have type", addendum, sym.srcPos) + tree.tpt.nuType, NoSymbol, i"Mutable $sym", "have type", addendum, sym.srcPos) checkInferredResult(super.recheckValDef(tree, sym), tree) finally if !sym.is(Param) then @@ -872,7 +1013,7 @@ class CheckCaptures extends Recheck, SymTransformer: // for more info from the context, so we cannot interpolate. Note that we cannot // expect to have all necessary info available at the point where the anonymous // function is compiled since we do not propagate expected types into blocks. - interpolateVarsIn(tree.tpt) + interpolateIfInferred(tree.tpt, sym) /** Recheck method definitions: * - check body in a nested environment that tracks uses, in a nested level, @@ -900,7 +1041,7 @@ class CheckCaptures extends Recheck, SymTransformer: val saved = curEnv val localSet = capturedVars(sym) - if !localSet.isAlwaysEmpty then + if localSet ne CaptureSet.empty then curEnv = Env(sym, EnvKind.Regular, localSet, curEnv, nestedClosure(tree.rhs)) // ctx with AssumedContains entries for each Contains parameter @@ -912,13 +1053,13 @@ class CheckCaptures extends Recheck, SymTransformer: if ac.isEmpty then ctx else ctx.withProperty(CaptureSet.AssumedContains, Some(ac)) - inNestedLevel: // TODO: nestedLevel needed here? + ccState.inNestedLevel: // TODO: nestedLevel needed here? try checkInferredResult(super.recheckDefDef(tree, sym)(using bodyCtx), tree) finally if !sym.isAnonymousFunction then // Anonymous functions propagate their type to the enclosing environment // so it is not in general sound to interpolate their types. - interpolateVarsIn(tree.tpt) + interpolateIfInferred(tree.tpt, sym) curEnv = saved end recheckDefDef @@ -955,16 +1096,16 @@ class CheckCaptures extends Recheck, SymTransformer: tree.tpt match case tpt: InferredTypeTree if !canUseInferred => val expected = tpt.tpe.dropAllRetains - todoAtPostCheck += (() => checkConformsExpr(tp, expected, tree.rhs, addenda(expected))) - // The check that inferred <: expected is done after recheck so that it - // does not interfere with normal rechecking by constraining capture set variables. + todoAtPostCheck += { () => + withCapAsRoot: + checkConformsExpr(tp, expected, tree.rhs, addenda(expected)) + // The check that inferred <: expected is done after recheck so that it + // does not interfere with normal rechecking by constraining capture set variables. + } case _ => tp end checkInferredResult - /** The set of symbols that were rechecked via a completer */ - private val completed = new mutable.HashSet[Symbol] - /** The normal rechecking if `sym` was already completed before */ override def skipRecheck(sym: Symbol)(using Context): Boolean = completed.contains(sym) @@ -973,7 +1114,7 @@ class CheckCaptures extends Recheck, SymTransformer: * these checks can appear out of order, we need to first create the correct * environment for checking the definition. */ - def completeDef(tree: ValOrDefDef, sym: Symbol)(using Context): Type = + def completeDef(tree: ValOrDefDef, sym: Symbol, completer: LazyType)(using Context): Type = val saved = curEnv try // Setup environment to reflect the new owner. @@ -983,7 +1124,7 @@ class CheckCaptures extends Recheck, SymTransformer: .toMap def restoreEnvFor(sym: Symbol): Env = val localSet = capturedVars(sym) - if localSet.isAlwaysEmpty then rootEnv + if localSet eq CaptureSet.empty then rootEnv else envForOwner.get(sym) match case Some(e) => e case None => Env(sym, EnvKind.Regular, localSet, restoreEnvFor(sym.owner)) @@ -994,6 +1135,10 @@ class CheckCaptures extends Recheck, SymTransformer: finally curEnv = saved + override def recheckTypeDef(tree: TypeDef, sym: Symbol)(using Context): Type = + try super.recheckTypeDef(tree, sym) + finally completed += sym + /** Recheck classDef by enforcing the following class-specific capture set relations: * 1. The capture set of a class includes the capture sets of its parents. * 2. The capture set of the self type of a class includes the capture set of the class. @@ -1010,15 +1155,16 @@ class CheckCaptures extends Recheck, SymTransformer: checkSubset(capturedVars(parent.tpe.classSymbol), localSet, parent.srcPos, i"\nof the references allowed to be captured by $cls") val saved = curEnv - if !localSet.isAlwaysEmpty then + if localSet ne CaptureSet.empty then curEnv = Env(cls, EnvKind.Regular, localSet, curEnv) try val thisSet = cls.classInfo.selfType.captureSet.withDescription(i"of the self type of $cls") checkSubset(localSet, thisSet, tree.srcPos) // (2) for param <- cls.paramGetters do if !param.hasAnnotation(defn.ConstructorOnlyAnnot) - && !param.hasAnnotation(defn.UntrackedCapturesAnnot) then - checkSubset(param.termRef.captureSet, thisSet, param.srcPos) // (3) + && !param.hasAnnotation(defn.UntrackedCapturesAnnot) then + withCapAsRoot: // OK? We need this here since self types use `cap` instead of `fresh` + checkSubset(param.termRef.captureSet, thisSet, param.srcPos) // (3) for pureBase <- cls.pureBaseClass do // (4) def selfTypeTree = impl.body .collect: @@ -1034,9 +1180,10 @@ class CheckCaptures extends Recheck, SymTransformer: case AppliedType(fn, args) => disallowCapInTypeArgs(tpt, fn.typeSymbol, args.map(TypeTree(_))) case _ => - inNestedLevelUnless(cls.is(Module)): + ccState.inNestedLevelUnless(cls.is(Module)): super.recheckClassDef(tree, impl, cls) finally + completed += cls curEnv = saved /** If type is of the form `T @requiresCapability(x)`, @@ -1048,7 +1195,7 @@ class CheckCaptures extends Recheck, SymTransformer: case AnnotatedType(_, annot) if annot.symbol == defn.RequiresCapabilityAnnot => annot.tree match case Apply(_, cap :: Nil) => - markFree(cap.symbol, tree.srcPos) + markFree(cap.symbol, tree) case _ => case _ => super.recheckTyped(tree) @@ -1057,8 +1204,13 @@ class CheckCaptures extends Recheck, SymTransformer: * result type of a try */ override def recheckTry(tree: Try, pt: Type)(using Context): Type = - val tp = super.recheckTry(tree, pt) - if ccConfig.useSealed && Feature.enabled(Feature.saferExceptions) then + val tryOwner = Setup.firstCanThrowEvidence(tree.expr) match + case Some(vd) => vd.symbol.owner + case None => ctx.owner + val bodyType = inContext(ctx.withOwner(tryOwner)): + recheck(tree.expr, pt) + val tp = recheckTryRest(bodyType, tree.cases, tree.finalizer, pt) + if Feature.enabled(Feature.saferExceptions) then disallowRootCapabilitiesIn(tp, ctx.owner, "The result of `try`", "have type", "\nThis is often caused by a locally generated exception capability leaking as part of its result.", @@ -1092,7 +1244,7 @@ class CheckCaptures extends Recheck, SymTransformer: val saved = curEnv tree match case _: RefTree | closureDef(_) if pt.isBoxedCapturing => - curEnv = Env(curEnv.owner, EnvKind.Boxed, CaptureSet.Var(curEnv.owner, level = currentLevel), curEnv) + curEnv = Env(curEnv.owner, EnvKind.Boxed, CaptureSet.Var(curEnv.owner, level = ccState.currentLevel), curEnv) case _ => val res = try @@ -1103,14 +1255,9 @@ class CheckCaptures extends Recheck, SymTransformer: super.recheck(tree, pt) finally curEnv = saved if tree.isTerm && !pt.isBoxedCapturing && pt != LhsProto then - markFree(res.boxedCaptureSet, tree.srcPos) + markFree(res.boxedCaptureSet, tree) res - - /** Under the old unsealed policy: check that cap is ot unboxed */ - override def recheckFinish(tpe: Type, tree: Tree, pt: Type)(using Context): Type = - checkNotUniversalInUnboxedResult(tpe, tree) - super.recheckFinish(tpe, tree, pt) - end recheckFinish + end recheck // ------------------ Adaptation ------------------------------------- // @@ -1127,18 +1274,38 @@ class CheckCaptures extends Recheck, SymTransformer: type BoxErrors = mutable.ListBuffer[Message] | Null - private def boxErrorAddenda(boxErrors: BoxErrors) = - if boxErrors == null then NothingToAdd + private def errorNotes(notes: List[TypeComparer.ErrorNote])(using Context): Addenda = + val printableNotes = notes.filter: + case IncludeFailure(_, _, true) => true + case _: ExistentialSubsumesFailure => true + case _ => false + if printableNotes.isEmpty then NothingToAdd else new Addenda: - override def toAdd(using Context): List[String] = - boxErrors.toList.map: msg => - i""" - | - |Note that ${msg.toString}""" + override def toAdd(using Context) = printableNotes.map: note => + val msg = note match + case IncludeFailure(cs, ref, _) => + if ref.core.isCapOrFresh then + i"""the universal capability $ref + |cannot be included in capture set $cs""" + else + val levelStr = ref match + case ref: TermRef => i", defined in ${ref.symbol.maybeOwner}" + case _ => "" + i"""reference ${ref}$levelStr + |cannot be included in outer capture set $cs""" + case ExistentialSubsumesFailure(ex, other) => + def since = + if other.isTerminalCapability then "" + else " since that capability is not a SharedCapability" + i"""the existential capture root in ${ex.originalBinder.resType} + |cannot subsume the capability $other$since""" + i""" + |Note that ${msg.toString}""" + /** Addendas for error messages that show where we have under-approximated by - * mapping a a capture ref in contravariant position to the empty set because - * the original result type of the map was not itself a capture ref. + * mapping a a capability in contravariant position to the empty set because + * the original result type of the map was not itself a capability. */ private def addApproxAddenda(using Context) = new TypeAccumulator[Addenda]: @@ -1168,26 +1335,26 @@ class CheckCaptures extends Recheck, SymTransformer: */ override def checkConformsExpr(actual: Type, expected: Type, tree: Tree, addenda: Addenda)(using Context): Type = var expected1 = alignDependentFunction(expected, actual.stripCapturing) - val boxErrors = new mutable.ListBuffer[Message] - val actualBoxed = adapt(actual, expected1, tree.srcPos, boxErrors) + val actualBoxed = adapt(actual, expected1, tree) //println(i"check conforms $actualBoxed <<< $expected1") if actualBoxed eq actual then // Only `addOuterRefs` when there is no box adaptation expected1 = addOuterRefs(expected1, actual, tree.srcPos) - if isCompatible(actualBoxed, expected1) then - if debugSuccesses then tree match - case Ident(_) => - println(i"SUCCESS $tree:\n${TypeComparer.explained(_.isSubType(actual, expected))}") - case _ => - actualBoxed - else - capt.println(i"conforms failed for ${tree}: $actual vs $expected") - err.typeMismatch(tree.withType(actualBoxed), expected1, - addApproxAddenda( - addenda ++ CaptureSet.levelErrors ++ boxErrorAddenda(boxErrors), - expected1)) - actual + TypeComparer.compareResult(isCompatible(actualBoxed, expected1)) match + case TypeComparer.CompareResult.Fail(notes) => + capt.println(i"conforms failed for ${tree}: $actual vs $expected") + err.typeMismatch(tree.withType(actualBoxed), expected1, + addApproxAddenda( + addenda ++ errorNotes(notes), + expected1)) + actual + case /*OK*/ _ => + if debugSuccesses then tree match + case Ident(_) => + println(i"SUCCESS $tree for $actual <:< $expected:\n${TypeComparer.explained(_.isSubType(actualBoxed, expected1))}") + case _ => + actualBoxed end checkConformsExpr /** Turn `expected` into a dependent function when `actual` is dependent. */ @@ -1203,6 +1370,14 @@ class CheckCaptures extends Recheck, SymTransformer: case defn.RefinedFunctionOf(rinfo: MethodType) => depFun(args, resultType, isContextual, rinfo.paramNames) case _ => expected + case expected @ defn.RefinedFunctionOf(einfo: MethodType) + if einfo.allParamNamesSynthetic => + actual match + case defn.RefinedFunctionOf(ainfo: MethodType) + if !ainfo.allParamNamesSynthetic && ainfo.paramNames.hasSameLengthAs(einfo.paramNames) => + einfo.derivedLambdaType(paramNames = ainfo.paramNames) + .toFunctionType(alwaysDependent = true) + case _ => expected case _ => expected recur(expected) @@ -1232,11 +1407,11 @@ class CheckCaptures extends Recheck, SymTransformer: else if !owner.exists then false else isPure(owner.info) && isPureContext(owner.owner, limit) - // Augment expeced capture set `erefs` by all references in actual capture + // Augment expected capture set `erefs` by all references in actual capture // set `arefs` that are outside some `C.this.type` reference in `erefs` for an enclosing // class `C`. If an added reference is not a ThisType itself, add it to the capture set // (i.e. use set) of the `C`. This makes sure that any outer reference implicitly subsumed - // by `C.this` becomes a capture reference of every instance of `C`. + // by `C.this` becomes a capability of every instance of `C`. def augment(erefs: CaptureSet, arefs: CaptureSet): CaptureSet = (erefs /: erefs.elems): (erefs, eref) => eref match @@ -1288,7 +1463,7 @@ class CheckCaptures extends Recheck, SymTransformer: * * @param alwaysConst always make capture set variables constant after adaptation */ - def adaptBoxed(actual: Type, expected: Type, pos: SrcPos, covariant: Boolean, alwaysConst: Boolean, boxErrors: BoxErrors)(using Context): Type = + def adaptBoxed(actual: Type, expected: Type, tree: Tree, covariant: Boolean, alwaysConst: Boolean)(using Context): Type = def recur(actual: Type, expected: Type, covariant: Boolean): Type = @@ -1300,7 +1475,7 @@ class CheckCaptures extends Recheck, SymTransformer: val saved = curEnv curEnv = Env( curEnv.owner, EnvKind.NestedInOwner, - CaptureSet.Var(curEnv.owner, level = currentLevel), + CaptureSet.Var(curEnv.owner, level = ccState.currentLevel), if boxed then null else curEnv) try val (eargs, eres) = expected.dealias.stripCapturing match @@ -1320,21 +1495,11 @@ class CheckCaptures extends Recheck, SymTransformer: def adaptStr = i"adapting $actual ${if covariant then "~~>" else "<~~"} $expected" - // Get existentials and wildcards out of the way - actual match - case actual @ Existential(_, actualUnpacked) => - return Existential.derivedExistentialType(actual): - recur(actualUnpacked, expected, covariant) - case _ => + // Get wildcards out of the way expected match - case expected @ Existential(_, expectedUnpacked) => - return recur(actual, expectedUnpacked, covariant) - case _: WildcardType => - return actual + case _: WildcardType => return actual case _ => - trace(adaptStr, capt, show = true) { - // Decompose the actual type into the inner shape type, the capture set and the box status val actualShape = if actual.isFromJavaObject then actual else actual.stripCapturing val actualIsBoxed = actual.isBoxedCapturing @@ -1352,10 +1517,10 @@ class CheckCaptures extends Recheck, SymTransformer: val cs = actual.captureSet if covariant then cs ++ leaked else - if !leaked.subCaptures(cs, frozen = false).isOK then + if !leaked.subCaptures(cs) then report.error( em"""$expected cannot be box-converted to ${actual.capturing(leaked)} - |since the additional capture set $leaked resulted from box conversion is not allowed in $actual""", pos) + |since the additional capture set $leaked resulting from box conversion is not allowed in $actual""", tree.srcPos) cs def adaptedType(resultBoxed: Boolean) = @@ -1365,34 +1530,12 @@ class CheckCaptures extends Recheck, SymTransformer: .capturing(if alwaysConst then CaptureSet(captures.elems) else captures) .forceBoxStatus(resultBoxed) - if needsAdaptation then - val criticalSet = // the set with which we box or unbox + if needsAdaptation && !insertBox then // we are unboxing + val criticalSet = // the set with which we unbox if covariant then captures // covariant: we box with captures of actual type plus captures leaked by inner adapation else expected.captureSet // contravarant: we unbox with captures of epected type - def msg = em"""$actual cannot be box-converted to $expected - |since at least one of their capture sets contains the root capability `cap`""" - def allowUniversalInBoxed = - ccConfig.useSealed - || expected.hasAnnotation(defn.UncheckedCapturesAnnot) - || actual.widen.hasAnnotation(defn.UncheckedCapturesAnnot) - if !allowUniversalInBoxed then - if criticalSet.isUnboxable && expected.isValueType then - // We can't box/unbox the universal capability. Leave `actual` as it is - // so we get an error in checkConforms. Add the error message generated - // from boxing as an addendum. This tends to give better error - // messages than disallowing the root capability in `criticalSet`. - if boxErrors != null then boxErrors += msg - if ctx.settings.YccDebug.value then - println(i"cannot box/unbox $actual vs $expected") - return actual - // Disallow future addition of `cap` to `criticalSet`. - criticalSet.disallowRootCapability: () => - report.error(msg, pos) - - if !insertBox then // we are unboxing //debugShowEnvs() - markFree(criticalSet, pos) - end if + markFree(criticalSet, tree) // Compute the adapted type. // The result is boxed if actual is boxed and we don't need to adapt, @@ -1403,13 +1546,12 @@ class CheckCaptures extends Recheck, SymTransformer: else adaptedShape .capturing(if alwaysConst then CaptureSet(captures.elems) else captures) .forceBoxStatus(resultIsBoxed) - } end recur recur(actual, expected, covariant) end adaptBoxed - /** If actual is a tracked CaptureRef `a` and widened is a capturing type T^C, + /** If actual is a tracked Capability `a` and widened is a capturing type T^C, * improve `T^C` to `T^{a}`, following the VAR rule of CC. * TODO: We probably should do this also for other top-level occurrences of captures * E.g. @@ -1418,30 +1560,70 @@ class CheckCaptures extends Recheck, SymTransformer: * Then * foo: Foo { def a: C^{foo}; def b: C^{foo} }^{foo} */ - private def improveCaptures(widened: Type, actual: Type)(using Context): Type = actual match - case ref: CaptureRef if ref.isTracked => + private def improveCaptures(widened: Type, prefix: Type)(using Context): Type = prefix match + case ref: Capability if ref.isTracked => widened match - case CapturingType(p, refs) if ref.singletonCaptureSet.mightSubcapture(refs) => - widened.derivedCapturingType(p, ref.singletonCaptureSet) + case widened @ CapturingType(p, refs) if ref.singletonCaptureSet.mightSubcapture(refs) => + val improvedCs = + if widened.isBoxed then ref.reach.singletonCaptureSet + else ref.singletonCaptureSet + widened.derivedCapturingType(p, improvedCs) .showing(i"improve $widened to $result", capt) case _ => widened case _ => widened + /** If actual is a capturing type T^C extending Mutable, and expected is an + * unboxed non-singleton value type not extending mutable, narrow the capture + * set `C` to `ro(C)`. + * The unboxed condition ensures that the expected type is not a type variable + * that's upper bounded by a read-only type. In this case it would not be sound + * to narrow to the read-only set, since that set can be propagated + * by the type variable instantiation. + */ + private def improveReadOnly(actual: Type, expected: Type)(using Context): Type = actual match + case actual @ CapturingType(parent, refs) + if parent.derivesFrom(defn.Caps_Mutable) + && expected.isValueType + && !expected.isMutableType + && !expected.isSingleton + && !expected.isBoxedCapturing => + actual.derivedCapturingType(parent, refs.readOnly) + case _ => + actual + + /* Currently not needed since it forms part of `adapt` + private def improve(actual: Type, prefix: Type)(using Context): Type = + val widened = actual.widen.dealiasKeepAnnots + val improved = improveCaptures(widened, prefix).withReachCaptures(prefix) + if improved eq widened then actual else improved + */ + + /** An actual singleton type should not be widened if the expected type is a + * LhsProto, or a singleton type, or a path selection with a stable value + */ + private def noWiden(actual: Type, expected: Type)(using Context): Boolean = + actual.isSingleton + && expected.match + case expected: PathSelectionProto => !expected.sym.isOneOf(UnstableValueFlags) + case _ => expected.isSingleton || expected == LhsProto + /** Adapt `actual` type to `expected` type. This involves: * - narrow toplevel captures of `x`'s underlying type to `{x}` according to CC's VAR rule * - narrow nested captures of `x`'s underlying type to `{x*}` * - do box adaptation */ - def adapt(actual: Type, expected: Type, pos: SrcPos, boxErrors: BoxErrors)(using Context): Type = - if expected == LhsProto || expected.isSingleton && actual.isSingleton then + def adapt(actual: Type, expected: Type, tree: Tree)(using Context): Type = + if noWiden(actual, expected) then actual else - val widened = improveCaptures(actual.widen.dealiasKeepAnnots, actual) + val improvedVAR = improveCaptures(actual.widen.dealiasKeepAnnots, actual) + val improved = improveReadOnly(improvedVAR, expected) val adapted = adaptBoxed( - widened.withReachCaptures(actual), expected, pos, - covariant = true, alwaysConst = false, boxErrors) - if adapted eq widened then actual - else adapted.showing(i"adapt boxed $actual vs $expected = $adapted", capt) + improved.withReachCaptures(actual), expected, tree, + covariant = true, alwaysConst = false) + if adapted eq improvedVAR // no .rd improvement, no box-adaptation + then actual // might as well use actual instead of improved widened + else adapted.showing(i"adapt $actual vs $expected = $adapted", capt) end adapt // ---- Unit-level rechecking ------------------------------------------- @@ -1452,56 +1634,90 @@ class CheckCaptures extends Recheck, SymTransformer: * But maybe we can then elide the check during the RefChecks phase under captureChecking? */ def checkOverrides = new TreeTraverser: - class OverridingPairsCheckerCC(clazz: ClassSymbol, self: Type, srcPos: SrcPos)(using Context) extends OverridingPairsChecker(clazz, self): - /** Check subtype with box adaptation. - * This function is passed to RefChecks to check the compatibility of overriding pairs. - * @param sym symbol of the field definition that is being checked - */ - override def checkSubType(actual: Type, expected: Type)(using Context): Boolean = - val expected1 = alignDependentFunction(addOuterRefs(expected, actual, srcPos), actual.stripCapturing) - val actual1 = - val saved = curEnv - try - curEnv = Env(clazz, EnvKind.NestedInOwner, capturedVars(clazz), outer0 = curEnv) - val adapted = - adaptBoxed(actual, expected1, srcPos, covariant = true, alwaysConst = true, null) - actual match - case _: MethodType => - // We remove the capture set resulted from box adaptation for method types, - // since class methods are always treated as pure, and their captured variables - // are charged to the capture set of the class (which is already done during - // box adaptation). - adapted.stripCapturing - case _ => adapted - finally curEnv = saved - actual1 frozen_<:< expected1 + class OverridingPairsCheckerCC(clazz: ClassSymbol, self: Type, tree: Tree)(using Context) extends OverridingPairsChecker(clazz, self): /** Omit the check if one of {overriding,overridden} was nnot capture checked */ override def needsCheck(overriding: Symbol, overridden: Symbol)(using Context): Boolean = !setup.isPreCC(overriding) && !setup.isPreCC(overridden) + /** Perform box adaptation for override checking */ + override def adaptOverridePair(member: Symbol, memberTp: Type, otherTp: Type)(using Context): Option[(Type, Type)] = + if member.isType then + memberTp match + case TypeAlias(_) => + otherTp match + case otherTp: RealTypeBounds => + if otherTp.hi.isBoxedCapturing || otherTp.lo.isBoxedCapturing then + Some((memberTp, otherTp.unboxed)) + else otherTp.hi match + case hi @ CapturingType(parent: TypeRef, refs) + if parent.symbol == defn.Caps_CapSet && refs.isUniversal => + Some(( + memberTp, + otherTp.derivedTypeBounds( + otherTp.lo, + hi.derivedCapturingType(parent, + CaptureSet.fresh(Origin.OverriddenType(member)))))) + case _ => None + case _ => None + case _ => None + else memberTp match + case memberTp @ ExprType(memberRes) => + adaptOverridePair(member, memberRes, otherTp) match + case Some((mres, otp)) => Some((memberTp.derivedExprType(mres), otp)) + case None => None + case _ => otherTp match + case otherTp @ ExprType(otherRes) => + adaptOverridePair(member, memberTp, otherRes) match + case Some((mtp, ores)) => Some((mtp, otherTp.derivedExprType(ores))) + case None => None + case _ => + val expected1 = alignDependentFunction(addOuterRefs(otherTp, memberTp, tree.srcPos), memberTp.stripCapturing) + val actual1 = + val saved = curEnv + try + curEnv = Env(clazz, EnvKind.NestedInOwner, capturedVars(clazz), outer0 = curEnv) + val adapted = + adaptBoxed(memberTp, expected1, tree, covariant = true, alwaysConst = true) + memberTp match + case _: MethodType => + // We remove the capture set resulted from box adaptation for method types, + // since class methods are always treated as pure, and their captured variables + // are charged to the capture set of the class (which is already done during + // box adaptation). + adapted.stripCapturing + case _ => adapted + finally curEnv = saved + if (actual1 eq memberTp) && (expected1 eq otherTp) then None + else Some((actual1, expected1)) + end adaptOverridePair + override def checkInheritedTraitParameters: Boolean = false - /** Check that overrides don't change the @use status of their parameters */ + /** Check that overrides don't change the @use or @consume status of their parameters */ override def additionalChecks(member: Symbol, other: Symbol)(using Context): Unit = for (params1, params2) <- member.rawParamss.lazyZip(other.rawParamss) (param1, param2) <- params1.lazyZip(params2) do - if param1.hasAnnotation(defn.UseAnnot) != param2.hasAnnotation(defn.UseAnnot) then - report.error( - OverrideError( - i"has a parameter ${param1.name} with different @use status than the corresponding parameter in the overridden definition", - self, member, other, self.memberInfo(member), self.memberInfo(other) - ), - if member.owner == clazz then member.srcPos else clazz.srcPos - ) + def checkAnnot(cls: ClassSymbol) = + if param1.hasAnnotation(cls) != param2.hasAnnotation(cls) then + report.error( + OverrideError( + i"has a parameter ${param1.name} with different @${cls.name} status than the corresponding parameter in the overridden definition", + self, member, other, self.memberInfo(member), self.memberInfo(other) + ), + if member.owner == clazz then member.srcPos else clazz.srcPos) + + checkAnnot(defn.UseAnnot) + checkAnnot(defn.ConsumeAnnot) end OverridingPairsCheckerCC def traverse(t: Tree)(using Context) = t match case t: Template => - checkAllOverrides(ctx.owner.asClass, OverridingPairsCheckerCC(_, _, t)) + withCollapsedFresh: + checkAllOverrides(ctx.owner.asClass, OverridingPairsCheckerCC(_, _, t)) case _ => traverseChildren(t) end checkOverrides @@ -1526,7 +1742,7 @@ class CheckCaptures extends Recheck, SymTransformer: def traverse(tree: Tree)(using Context) = tree match case id: Ident => val sym = id.symbol - if sym.is(Mutable, butNot = Method) && sym.owner.isTerm then + if sym.isMutableVar && sym.owner.isTerm then val enclMeth = ctx.owner.enclosingMethod if sym.enclosingMethod != enclMeth then capturedBy(sym) = enclMeth @@ -1554,7 +1770,19 @@ class CheckCaptures extends Recheck, SymTransformer: report.echo(s"$echoHeader\n$treeString\n") withCaptureSetsExplained: - super.checkUnit(unit) + def iterate(): Unit = + super.checkUnit(unit) + if !ctx.reporter.errorsReported + && (needAnotherRun + || ccConfig.alwaysRepeatRun && ccState.iterationId == 1) + then + resetIteration() + ccState.nextIteration: + setup.setupUnit(unit.tpdTree, this) + capt.println(s"**** capture checking run ${ccState.iterationId} started on ${ctx.source}") + iterate() + + iterate() checkOverrides.traverse(unit.tpdTree) postCheck(unit.tpdTree) checkSelfTypes(unit.tpdTree) @@ -1597,11 +1825,11 @@ class CheckCaptures extends Recheck, SymTransformer: inContext(ctx.fresh.setOwner(root)): checkSelfAgainstParents(root, root.baseClasses) val selfType = root.asClass.classInfo.selfType - interpolator(startingVariance = -1).traverse(selfType) + interpolate(selfType, root, startingVariance = -1) selfType match case CapturingType(_, refs: CaptureSet.Var) if !root.isEffectivelySealed - && !refs.elems.exists(_.isRootCapability) + && !refs.isUniversal && !root.matchesExplicitRefsInBaseClass(refs) => // Forbid inferred self types unless they are already implied by an explicit @@ -1616,65 +1844,26 @@ class CheckCaptures extends Recheck, SymTransformer: capt.println(i"checked $root with $selfType") end checkSelfTypes - /** Heal ill-formed capture sets in the type parameter. - * - * We can push parameter refs into a capture set in type parameters - * that this type parameter can't see. - * For example, when capture checking the following expression: - * - * def usingLogFile[T](op: File^ => T): T = ... - * - * usingLogFile[box ?1 () -> Unit] { (f: File^) => () => { f.write(0) } } - * - * We may propagate `f` into ?1, making ?1 ill-formed. - * This also causes soundness issues, since `f` in ?1 should be widened to `cap`, - * giving rise to an error that `cap` cannot be included in a boxed capture set. - * - * To solve this, we still allow ?1 to capture parameter refs like `f`, but - * compensate this by pushing the widened capture set of `f` into ?1. - * This solves the soundness issue caused by the ill-formness of ?1. + /** Check ill-formed capture sets in a type parameter. We used to be able to + * push parameter refs into a capture set in type parameters that this type + * parameter can't see. We used to heal this by replacing illegal refs by their + * underlying capture sets. But now these should no longer be necessary, so + * instead of errors we use assertions. */ - private def healTypeParam(tree: Tree, paramName: TypeName, meth: Symbol)(using Context): Unit = + private def checkTypeParam(tree: Tree, paramName: TypeName, meth: Symbol)(using Context): Unit = val checker = new TypeTraverser: private var allowed: SimpleIdentitySet[TermParamRef] = SimpleIdentitySet.empty - private def isAllowed(ref: CaptureRef): Boolean = ref match - case ref: TermParamRef => allowed.contains(ref) - case _ => true - - private def healCaptureSet(cs: CaptureSet): Unit = - cs.ensureWellformed: elem => - ctx ?=> - var seen = new util.HashSet[CaptureRef] - def recur(ref: CaptureRef): Unit = ref.stripReach match - case ref: TermParamRef - if !allowed.contains(ref) && !seen.contains(ref) => - seen += ref - if ref.isMaxCapability then - report.error(i"escaping local reference $ref", tree.srcPos) - else - val widened = ref.captureSetOfInfo - val added = widened.filter(isAllowed(_)) - capt.println(i"heal $ref in $cs by widening to $added") - if !added.subCaptures(cs, frozen = false).isOK then - val location = if meth.exists then i" of ${meth.showLocated}" else "" - val paramInfo = - if ref.paramName.info.kind.isInstanceOf[UniqueNameKind] - then i"${ref.paramName} from ${ref.binder}" - else i"${ref.paramName}" - val debugSetInfo = if ctx.settings.YccDebug.value then i" $cs" else "" - report.error( - i"local reference $paramInfo leaks into outer capture set$debugSetInfo of type parameter $paramName$location", - tree.srcPos) - else - widened.elems.foreach(recur) - case _ => - recur(elem) + private def checkCaptureSet(cs: CaptureSet): Unit = + for elem <- cs.elems do + elem.stripReach match + case ref: TermParamRef => assert(allowed.contains(ref)) + case _ => def traverse(tp: Type) = tp match case CapturingType(parent, refs) => - healCaptureSet(refs) + checkCaptureSet(refs) traverse(parent) case defn.RefinedFunctionOf(rinfo: MethodType) => traverse(rinfo) @@ -1689,7 +1878,7 @@ class CheckCaptures extends Recheck, SymTransformer: if tree.isInstanceOf[InferredTypeTree] then checker.traverse(tree.nuType) - end healTypeParam + end checkTypeParam /** Under the unsealed policy: Arrays are like vars, check that their element types * do not contains `cap` (in fact it would work also to check on array creation @@ -1713,9 +1902,7 @@ class CheckCaptures extends Recheck, SymTransformer: traverseChildren(t) check.traverse(tp) - /** Perform the following kinds of checks - * - Check that arguments of TypeApplys and AppliedTypes conform to their bounds. - * - Heal ill-formed capture sets of type parameters. See `healTypeParam`. + /** Check that arguments of TypeApplys and AppliedTypes conform to their bounds. */ def postCheck(unit: tpd.Tree)(using Context): Unit = val checker = new TreeTraverser: @@ -1733,16 +1920,17 @@ class CheckCaptures extends Recheck, SymTransformer: val normArgs = args.lazyZip(tl.paramInfos).map: (arg, bounds) => arg.withType(arg.nuType.forceBoxStatus( bounds.hi.isBoxedCapturing | bounds.lo.isBoxedCapturing)) - checkBounds(normArgs, tl) - args.lazyZip(tl.paramNames).foreach(healTypeParam(_, _, fun.symbol)) + withCollapsedFresh: // OK? We need this since bounds use `cap` instead of `fresh` + checkBounds(normArgs, tl) + if ccConfig.postCheckCapturesets then + args.lazyZip(tl.paramNames).foreach(checkTypeParam(_, _, fun.symbol)) case _ => - case tree: TypeTree if !ccConfig.useSealed => - checkArraysAreSealedIn(tree.tpe, tree.srcPos) case _ => end check end checker checker.traverse(unit)(using ctx.withOwner(defn.RootClass)) + if ccConfig.useSepChecks then SepCheck(this).traverse(unit) if !ctx.reporter.errorsReported then // We dont report errors here if previous errors were reported, because other // errors often result in bad applied types, but flagging these bad types gives @@ -1751,7 +1939,9 @@ class CheckCaptures extends Recheck, SymTransformer: def traverse(t: Tree)(using Context) = t match case tree: InferredTypeTree => case tree: New => - case tree: TypeTree => checkAppliedTypesIn(tree.withType(tree.nuType)) + case tree: TypeTree => + withCollapsedFresh: + checkAppliedTypesIn(tree.withType(tree.nuType)) case _ => traverseChildren(t) checkApplied.traverse(unit) end postCheck diff --git a/compiler/src/dotty/tools/dotc/cc/Existential.scala b/compiler/src/dotty/tools/dotc/cc/Existential.scala deleted file mode 100644 index ea979e0b9f7f..000000000000 --- a/compiler/src/dotty/tools/dotc/cc/Existential.scala +++ /dev/null @@ -1,385 +0,0 @@ -package dotty.tools -package dotc -package cc - -import core.* -import Types.*, Symbols.*, Contexts.*, Annotations.*, Flags.* -import CaptureSet.IdempotentCaptRefMap -import StdNames.nme -import ast.tpd.* -import Decorators.* -import typer.ErrorReporting.errorType -import Names.TermName -import NameKinds.ExistentialBinderName -import NameOps.isImpureFunction -import reporting.Message - -/** - -Handling existentials in CC: - - - We generally use existentials only in function and method result types - - All occurrences of an EX-bound variable appear co-variantly in the bound type - -In Setup: - - - Convert occurrences of `cap` in function results to existentials. Precise rules below. - - Conversions are done in two places: - - + As part of mapping from local types of parameters and results to infos of methods. - The local types just use `cap`, whereas the result type in the info uses EX-bound variables. - + When converting functions or methods appearing in explicitly declared types. - Here again, we only replace cap's in fucntion results. - - - Conversion is done with a BiTypeMap in `Existential.mapCap`. - -In reckeckApply and recheckTypeApply: - - - If an EX is toplevel in the result type, replace its bound variable - occurrences with `cap`. - -Level checking and avoidance: - - - Environments, capture refs, and capture set variables carry levels - - + levels start at 0 - + The level of a block or template statement sequence is one higher than the level of - its environment - + The level of a TermRef is the level of the environment where its symbol is defined. - + The level of a ThisType is the level of the statements of the class to which it beloongs. - + The level of a TermParamRef is currently -1 (i.e. TermParamRefs are not yet checked using this system) - + The level of a capture set variable is the level of the environment where it is created. - - - Variables also carry info whether they accept `cap` or not. Variables introduced under a box - don't, the others do. - - - Capture set variables do not accept elements of level higher than the variable's level - - - We use avoidance to heal such cases: If the level-incorrect ref appears - + covariantly: widen to underlying capture set, reject if that is cap and the variable does not allow it - + contravariantly: narrow to {} - + invarianty: reject with error - -In cv-computation (markFree): - - - Reach capabilities x* of a parameter x cannot appear in the capture set of - the owning method. They have to be widened to dcs(x), or, where this is not - possible, it's an error. - -In box adaptation: - - - Check that existential variables are not boxed or unboxed. - -Subtype rules - - - new alphabet: existentially bound variables `a`. - - they can be stored in environments Gamma. - - they are alpha-renable, usual hygiene conditions apply - - Gamma |- EX a.T <: U - if Gamma, a |- T <: U - - Gamma |- T <: EX a.U - if exists capture set C consisting of capture refs and ex-bound variables - bound in Gamma such that Gamma |- T <: [a := C]U - -Representation: - - EX a.T[a] is represented as a dependent function type - - (a: Exists) => T[a]] - - where Exists is defined in caps like this: - - sealed trait Exists extends Capability - - The defn.RefinedFunctionOf extractor will exclude existential types from - its results, so only normal refined functions match. - - Let `boundvar(ex)` be the TermParamRef defined by the existential type `ex`. - -Subtype checking algorithm, general scheme: - - Maintain two structures in TypeComparer: - - openExistentials: List[TermParamRef] - assocExistentials: Map[TermParamRef, List[TermParamRef]] - - `openExistentials` corresponds to the list of existential variables stored in the environment. - `assocExistentials` maps existential variables bound by existentials appearing on the right - to the value of `openExistentials` at the time when the existential on the right was dropped. - -Subtype checking algorithm, steps to add for tp1 <:< tp2: - - If tp1 is an existential EX a.tp1a: - - val saved = openExistentials - openExistentials = boundvar(tp1) :: openExistentials - try tp1a <:< tp2 - finally openExistentials = saved - - If tp2 is an existential EX a.tp2a: - - val saved = assocExistentials - assocExistentials = assocExistentials + (boundvar(tp2) -> openExistentials) - try tp1 <:< tp2a - finally assocExistentials = saved - - If tp2 is an existentially bound variable: - assocExistentials(tp2).isDefined - && (assocExistentials(tp2).contains(tp1) || tp1 is not existentially bound) - -Subtype checking algorithm, comparing two capture sets CS1 <:< CS2: - - We need to map the (possibly to-be-added) existentials in CS1 to existentials - in CS2 so that we can compare them. We use `assocExistentals` for that: - To map an EX-variable V1 in CS1, pick the last (i.e. outermost, leading to the smallest - type) EX-variable in `assocExistentials` that has V1 in its possible instances. - To go the other way (and therby produce a BiTypeMap), map an EX-variable - V2 in CS2 to the first (i.e. innermost) EX-variable it can be instantiated to. - If either direction is not defined, we choose a special "bad-existetal" value - that represents and out-of-scope existential. This leads to failure - of the comparison. - -Existential source syntax: - - Existential types are ususally not written in source, since we still allow the `^` - syntax that can express most of them more concesely (see below for translation rules). - But we should also allow to write existential types explicity, even if it ends up mainly - for debugging. To express them, we use the encoding with `Exists`, so a typical - expression of an existential would be - - (x: Exists) => A ->{x} B - - Existential types can only at the top level of the result type - of a function or method. - -Restrictions on Existential Types: (to be implemented if we want to -keep the source syntax for users). - - - An existential capture ref must be the only member of its set. This is - intended to model the idea that existential variables effectibely range - over capture sets, not capture references. But so far our calculus - and implementation does not yet acoommodate first-class capture sets. - - Existential capture refs must appear co-variantly in their bound type - - So the following would all be illegal: - - EX x.C^{x, io} // error: multiple members - EX x.() => EX y.C^{x, y} // error: multiple members - EX x.C^{x} ->{x} D // error: contra-variant occurrence - EX x.Set[C^{x}] // error: invariant occurrence - -Expansion of ^: - - We expand all occurrences of `cap` in the result types of functions or methods - to existentially quantified types. Nested scopes are expanded before outer ones. - - The expansion algorithm is then defined as follows: - - 1. In a result type, replace every occurrence of ^ with a fresh existentially - bound variable and quantify over all variables such introduced. - - 2. After this step, type aliases are expanded. If aliases have aliases in arguments, - the outer alias is expanded before the aliases in the arguments. Each time an alias - is expanded that reveals a `^`, apply step (1). - - 3. The algorithm ends when no more alieases remain to be expanded. - - Examples: - - - `A => B` is an alias type that expands to `(A -> B)^`, therefore - `() -> A => B` expands to `() -> EX c. A ->{c} B`. - - - `() => Iterator[A => B]` expands to `() => EX c. Iterator[A ->{c} B]` - - - `A -> B^` expands to `A -> EX c.B^{c}`. - - - If we define `type Fun[T] = A -> T`, then `() -> Fun[B^]` expands to `() -> EX c.Fun[B^{c}]`, which - dealiases to `() -> EX c.A -> B^{c}`. - - - If we define - - type F = A -> Fun[B^] - - then the type alias expands to - - type F = A -> EX c.A -> B^{c} -*/ -object Existential: - - type Carrier = RefinedType - - def unapply(tp: Carrier)(using Context): Option[(TermParamRef, Type)] = - tp.refinedInfo match - case mt: MethodType - if isExistentialMethod(mt) && defn.isNonRefinedFunction(tp.parent) => - Some(mt.paramRefs.head, mt.resultType) - case _ => None - - /** Create method type in the refinement of an existential type */ - private def exMethodType(using Context)( - mk: TermParamRef => Type, - boundName: TermName = ExistentialBinderName.fresh() - ): MethodType = - MethodType(boundName :: Nil)( - mt => defn.Caps_Exists.typeRef :: Nil, - mt => mk(mt.paramRefs.head)) - - /** Create existential */ - def apply(mk: TermParamRef => Type)(using Context): Type = - exMethodType(mk).toFunctionType(alwaysDependent = true) - - /** Create existential if bound variable appears in result of `mk` */ - def wrap(mk: TermParamRef => Type)(using Context): Type = - val mt = exMethodType(mk) - if mt.isResultDependent then mt.toFunctionType() else mt.resType - - extension (tp: Carrier) - def derivedExistentialType(core: Type)(using Context): Type = tp match - case Existential(boundVar, unpacked) => - if core eq unpacked then tp - else apply(bv => core.substParam(boundVar, bv)) - case _ => - core - - /** Map top-level existentials to `cap`. Do the same for existentials - * in function results if all preceding arguments are known to be always pure. - */ - def toCap(tp: Type)(using Context): Type = tp.dealiasKeepAnnots match - case Existential(boundVar, unpacked) => - val transformed = unpacked.substParam(boundVar, defn.captureRoot.termRef) - transformed match - case FunctionOrMethod(args, res @ Existential(_, _)) - if args.forall(_.isAlwaysPure) => - transformed.derivedFunctionOrMethod(args, toCap(res)) - case _ => - transformed - case tp1 @ CapturingType(parent, refs) => - tp1.derivedCapturingType(toCap(parent), refs) - case tp1 @ AnnotatedType(parent, ann) => - tp1.derivedAnnotatedType(toCap(parent), ann) - case _ => tp - - /** Map existentials at the top-level and in all nested result types to `cap` - */ - def toCapDeeply(tp: Type)(using Context): Type = tp.dealiasKeepAnnots match - case Existential(boundVar, unpacked) => - toCapDeeply(unpacked.substParam(boundVar, defn.captureRoot.termRef)) - case tp1 @ FunctionOrMethod(args, res) => - val tp2 = tp1.derivedFunctionOrMethod(args, toCapDeeply(res)) - if tp2 ne tp1 then tp2 else tp - case tp1 @ CapturingType(parent, refs) => - tp1.derivedCapturingType(toCapDeeply(parent), refs) - case tp1 @ AnnotatedType(parent, ann) => - tp1.derivedAnnotatedType(toCapDeeply(parent), ann) - case _ => tp - - /** Knowing that `tp` is a function type, is an alias to a function other - * than `=>`? - */ - private def isAliasFun(tp: Type)(using Context) = tp match - case AppliedType(tycon, _) => !defn.isFunctionSymbol(tycon.typeSymbol) - case _ => false - - /** Replace all occurrences of `cap` in parts of this type by an existentially bound - * variable. If there are such occurrences, or there might be in the future due to embedded - * capture set variables, create an existential with the variable wrapping the type. - * Stop at function or method types since these have been mapped before. - */ - def mapCap(tp: Type, fail: Message => Unit)(using Context): Type = - var needsWrap = false - - abstract class CapMap extends BiTypeMap: - override def mapOver(t: Type): Type = t match - case t @ FunctionOrMethod(args, res) if variance > 0 && !isAliasFun(t) => - t // `t` should be mapped in this case by a different call to `mapCap`. - case Existential(_, _) => - t - case t: (LazyRef | TypeVar) => - mapConserveSuper(t) - case _ => - super.mapOver(t) - - class Wrap(boundVar: TermParamRef) extends CapMap: - def apply(t: Type) = t match - case t: TermRef if t.isRootCapability => - if variance > 0 then - needsWrap = true - boundVar - else - if variance == 0 then - fail(em"""$tp captures the root capability `cap` in invariant position""") - // we accept variance < 0, and leave the cap as it is - super.mapOver(t) - case t @ CapturingType(parent, refs: CaptureSet.Var) => - if variance > 0 then needsWrap = true - super.mapOver(t) - case defn.FunctionNOf(args, res, contextual) if t.typeSymbol.name.isImpureFunction => - if variance > 0 then - needsWrap = true - super.mapOver: - defn.FunctionNOf(args, res, contextual).capturing(boundVar.singletonCaptureSet) - else mapOver(t) - case _ => - mapOver(t) - //.showing(i"mapcap $t = $result") - - lazy val inverse = new BiTypeMap: - def apply(t: Type) = t match - case t: TermParamRef if t eq boundVar => defn.captureRoot.termRef - case _ => mapOver(t) - def inverse = Wrap.this - override def toString = "Wrap.inverse" - end Wrap - - val wrapped = apply(Wrap(_)(tp)) - if needsWrap then wrapped else tp - end mapCap - - /** Map `cap` in function results to fresh existentials */ - def mapCapInResults(fail: Message => Unit)(using Context): TypeMap = new: - - def mapFunOrMethod(tp: Type, args: List[Type], res: Type): Type = - val args1 = atVariance(-variance)(args.map(this)) - val res1 = res match - case res: MethodType => mapFunOrMethod(res, res.paramInfos, res.resType) - case res: PolyType => mapFunOrMethod(res, Nil, res.resType) // TODO: Also map bounds of PolyTypes - case _ => mapCap(apply(res), fail) - //.showing(i"map cap res $res / ${apply(res)} of $tp = $result") - tp.derivedFunctionOrMethod(args1, res1) - - def apply(t: Type): Type = t match - case FunctionOrMethod(args, res) if variance > 0 && !isAliasFun(t) => - mapFunOrMethod(t, args, res) - case CapturingType(parent, refs) => - t.derivedCapturingType(this(parent), refs) - case Existential(_, _) => - t - case t: (LazyRef | TypeVar) => - mapConserveSuper(t) - case _ => - mapOver(t) - end mapCapInResults - - /** Is `mt` a method represnting an existential type when used in a refinement? */ - def isExistentialMethod(mt: TermLambda)(using Context): Boolean = mt.paramInfos match - case (info: TypeRef) :: rest => info.symbol == defn.Caps_Exists && rest.isEmpty - case _ => false - - /** Is `ref` this an existentially bound variable? */ - def isExistentialVar(ref: CaptureRef)(using Context) = ref match - case ref: TermParamRef => isExistentialMethod(ref.binder) - case _ => false - - /** An value signalling an out-of-scope existential that should - * lead to a compare failure. - */ - def badExistential(using Context): TermParamRef = - exMethodType(identity, nme.OOS_EXISTENTIAL).paramRefs.head - - def isBadExistential(ref: CaptureRef) = ref match - case ref: TermParamRef => ref.paramName == nme.OOS_EXISTENTIAL - case _ => false - -end Existential diff --git a/compiler/src/dotty/tools/dotc/cc/SepCheck.scala b/compiler/src/dotty/tools/dotc/cc/SepCheck.scala new file mode 100644 index 000000000000..6dad0e9a2ff7 --- /dev/null +++ b/compiler/src/dotty/tools/dotc/cc/SepCheck.scala @@ -0,0 +1,965 @@ +package dotty.tools +package dotc +package cc +import ast.tpd +import collection.mutable + +import core.* +import Symbols.*, Types.*, Flags.* +import Contexts.*, Names.*, Flags.*, Symbols.*, Decorators.* +import CaptureSet.{Refs, emptyRefs, HiddenSet} +import config.Printers.capt +import StdNames.nme +import util.{SimpleIdentitySet, EqHashMap, SrcPos} +import tpd.* +import reflect.ClassTag +import reporting.trace +import Capabilities.* + +/** The separation checker is a tree traverser that is run after capture checking. + * It checks tree nodes for various separation conditions, explained in the + * methods below. Rough summary: + * + * - Hidden sets of arguments must not be referred to in the same application + * - Hidden sets of (result-) types must not be referred to alter in the same scope. + * - Returned hidden sets can only refer to @consume parameters. + * - If returned hidden sets refer to an encloding this, the reference must be + * from a @consume method. + * - Consumed entities cannot be used subsequently. + * - Entitites cannot be consumed in a loop. + */ +object SepCheck: + + /** Enumerates kinds of captures encountered so far */ + enum Captures: + case None + case Explicit // one or more explicitly declared captures + case Hidden // exacttly one hidden captures + case NeedsCheck // one hidden capture and one other capture (hidden or declared) + + def add(that: Captures): Captures = + if this == None then that + else if that == None then this + else if this == Explicit && that == Explicit then Explicit + else NeedsCheck + end Captures + + /** The role in which a checked type appears, used for composing error messages */ + enum TypeRole: + case Result(sym: Symbol, inferred: Boolean) + case Argument(arg: Tree) + case Qualifier(qual: Tree, meth: Symbol) + + /** If this is a Result tole, the associated symbol, otherwise NoSymbol */ + def dclSym = this match + case Result(sym, _) => sym + case _ => NoSymbol + + /** A textual description of this role */ + def description(using Context): String = this match + case Result(sym, inferred) => + def inferredStr = if inferred then " inferred" else "" + def resultStr = if sym.info.isInstanceOf[MethodicType] then " result" else "" + i"$sym's$inferredStr$resultStr type" + case TypeRole.Argument(_) => + "the argument's adapted type" + case TypeRole.Qualifier(_, meth) => + i"the type of the prefix to a call of $meth" + end TypeRole + + /** A class for segmented sets of consumed references. + * References are associated with the source positions where they first appeared. + * References are compared with `eq`. + */ + abstract class ConsumedSet: + /** The references in the set. The array should be treated as immutable in client code */ + def refs: Array[Capability] + + /** The associated source positoons. The array should be treated as immutable in client code */ + def locs: Array[SrcPos] + + /** The number of references in the set */ + def size: Int + + def toMap: Map[Capability, SrcPos] = refs.take(size).zip(locs).toMap + + def show(using Context) = + s"[${toMap.map((ref, loc) => i"$ref -> $loc").toList}]" + end ConsumedSet + + /** A fixed consumed set consisting of the given references `refs` and + * associated source positions `locs` + */ + class ConstConsumedSet(val refs: Array[Capability], val locs: Array[SrcPos]) extends ConsumedSet: + def size = refs.size + + /** A mutable consumed set, which is initially empty */ + class MutConsumedSet extends ConsumedSet: + var refs: Array[Capability] = new Array(4) + var locs: Array[SrcPos] = new Array(4) + var size = 0 + var peaks: Refs = emptyRefs + + private def double[T <: AnyRef : ClassTag](xs: Array[T]): Array[T] = + val xs1 = new Array[T](xs.length * 2) + xs.copyToArray(xs1) + xs1 + + private def ensureCapacity(added: Int): Unit = + if size + added > refs.length then + refs = double(refs) + locs = double(locs) + + /** If `ref` is in the set, its associated source position, otherwise `null` */ + def get(ref: Capability): SrcPos | Null = + var i = 0 + while i < size && (refs(i) ne ref) do i += 1 + if i < size then locs(i) else null + + def clashing(ref: Capability)(using Context): SrcPos | Null = + val refPeaks = ref.peaks + if !peaks.sharedWith(refPeaks).isEmpty then + var i = 0 + while i < size && refs(i).peaks.sharedWith(refPeaks).isEmpty do + i += 1 + assert(i < size) + locs(i) + else null + + /** If `ref` is not yet in the set, add it with given source position */ + def put(ref: Capability, loc: SrcPos)(using Context): Unit = + if get(ref) == null then + ensureCapacity(1) + refs(size) = ref + locs(size) = loc + size += 1 + peaks = peaks ++ ref.peaks + + /** Add all references with their associated positions from `that` which + * are not yet in the set. + */ + def ++= (that: ConsumedSet)(using Context): Unit = + for i <- 0 until that.size do put(that.refs(i), that.locs(i)) + + /** Run `op` and return any new references it created in a separate `ConsumedSet`. + * The current mutable set is reset to its state before `op` was run. + */ + def segment(op: => Unit): ConsumedSet = + val start = size + val savedPeaks = peaks + try + op + if size == start then EmptyConsumedSet + else ConstConsumedSet(refs.slice(start, size), locs.slice(start, size)) + finally + size = start + peaks = savedPeaks + end MutConsumedSet + + val EmptyConsumedSet = ConstConsumedSet(Array(), Array()) + + case class PeaksPair(actual: Refs, hidden: Refs) + + case class DefInfo(tree: ValOrDefDef, symbol: Symbol, hidden: Refs, hiddenPeaks: Refs) + + extension (refs: Refs) + + /** The footprint of a set of references `refs` the smallest set `F` such that + * 1. if includeMax is false then no maximal capability is in `F` + * 2. all capabilities in `refs` satisfying (1) are in `F` + * 3. if `f in F` then the footprint of `f`'s info is also in `F`. + */ + private def footprint(includeMax: Boolean = false)(using Context): Refs = + def retain(ref: Capability) = includeMax || !ref.isTerminalCapability + def recur(elems: Refs, newElems: List[Capability]): Refs = newElems match + case newElem :: newElems1 => + val superElems = newElem.captureSetOfInfo.elems.filter: superElem => + retain(superElem) && !elems.contains(superElem) + recur(elems ++ superElems, newElems1 ++ superElems.toList) + case Nil => elems + val elems: Refs = refs.filter(retain) + recur(elems, elems.toList) + + private def peaks(using Context): Refs = + def recur(seen: Refs, acc: Refs, newElems: List[Capability]): Refs = trace(i"peaks $acc, $newElems = "): + newElems match + case newElem :: newElems1 => + if seen.contains(newElem) then + recur(seen, acc, newElems1) + else newElem.stripReadOnly match + case elem: FreshCap => + if elem.hiddenSet.deps.isEmpty then recur(seen + newElem, acc + newElem, newElems1) + else + val superCaps = + if newElem.isReadOnly then elem.hiddenSet.superCaps.map(_.readOnly) + else elem.hiddenSet.superCaps + recur(seen + newElem, acc, superCaps ++ newElems) + case _ => + if newElem.isTerminalCapability + //|| newElem.isInstanceOf[TypeRef | TypeParamRef] + then recur(seen + newElem, acc, newElems1) + else recur(seen + newElem, acc, newElem.captureSetOfInfo.elems.toList ++ newElems1) + case Nil => acc + recur(emptyRefs, emptyRefs, refs.toList) + + /** The shared peaks between `refs` and `other` */ + private def sharedWith(other: Refs)(using Context): Refs = + def common(refs1: Refs, refs2: Refs) = + refs1.filter: ref => + !ref.isReadOnly && refs2.exists(_.stripReadOnly eq ref) + common(refs, other) ++ common(other, refs) + + /** The overlap of two footprint sets F1 and F2. This contains all exclusive references `r` + * such that one of the following is true: + * 1. + * - one of the sets contains `r` + * - the other contains a capability `s` or `s.rd` where `s` _covers_ `r` + * 2. + * - one of the sets contains `r.rd` + * - the other contains a capability `s` where `s` _covers_ `r` + * + * A capability `s` covers `r` if `r` can be seen as a path extension of `s`. E.g. + * if `s = x.a` and `r = x.a.b.c` then `s` covers `a`. + */ + private def overlapWith(other: Refs)(using Context): Refs = + val refs1 = refs + val refs2 = other + + /** Exclusive capabilities in refs1 that are covered by exclusive or + * stripped read-only capabilties in refs2 + * + stripped read-only capabilities in refs1 that are covered by an + * exclusive capability in refs2. + */ + def common(refs1: Refs, refs2: Refs) = + refs1.filter: ref => + ref.isExclusive && refs2.exists(_.stripReadOnly.covers(ref)) + ++ + refs1 + .filter: + case ReadOnly(ref @ TermRef(prefix: CoreCapability, _)) => + // We can get away testing only references with at least one field selection + // here since stripped readOnly references that equal a reference in refs2 + // are added by the first clause of the symmetric call to common. + refs2.exists(_.covers(prefix)) + case _ => + false + .map(_.stripReadOnly) + + common(refs, other) ++ common(other, refs) + end overlapWith + + /** The non-maximal elements hidden directly or indirectly by a maximal + * capability in `refs`. E g. if `R = {x, >}` then + * its hidden set is `{y, z}`. + */ + private def hiddenSet(using Context): Refs = + val seen: util.EqHashSet[Capability] = new util.EqHashSet + + def hiddenByElem(elem: Capability): Refs = elem match + case elem: FreshCap => elem.hiddenSet.elems ++ recur(elem.hiddenSet.elems) + case ReadOnly(elem1) => hiddenByElem(elem1).map(_.readOnly) + case _ => emptyRefs + + def recur(refs: Refs): Refs = + (emptyRefs /: refs): (elems, elem) => + if seen.add(elem) then elems ++ hiddenByElem(elem) else elems + + recur(refs) + end hiddenSet + + /** Subtract all elements that are covered by some element in `others` from this set. */ + private def deduct(others: Refs)(using Context): Refs = + refs.filter: ref => + !others.exists(_.covers(ref)) + + /** Deduct `sym` and `sym*` from `refs` */ + private def deductSymRefs(sym: Symbol)(using Context): Refs = + val ref = sym.termRef + if ref.isTrackableRef then refs.deduct(SimpleIdentitySet(ref, ref.reach)) + else refs + + end extension + + extension (ref: Capability) + def peaks(using Context): Refs = SimpleIdentitySet(ref).peaks + +class SepCheck(checker: CheckCaptures.CheckerAPI) extends tpd.TreeTraverser: + import checker.* + import SepCheck.* + + /** The set of capabilities that are hidden by a polymorphic result type + * of some previous definition. + */ + private var defsShadow: Refs = emptyRefs + + /** The previous val or def definitions encountered during separation checking + * in reverse order. These all enclose and precede the current traversal node. + */ + private var previousDefs: List[DefInfo] = Nil + + /** The set of references that were consumed so far in the current method */ + private var consumed: MutConsumedSet = MutConsumedSet() + + /** Infos aboput Labeled expressions enclosing the current traversal point. + * For each labeled expression, it's label name, and a list buffer containing + * all consumed sets of return expressions referring to that label. + */ + private var openLabeled: List[(Name, mutable.ListBuffer[ConsumedSet])] = Nil + + /** The deep capture set of an argument or prefix widened to the formal parameter, if + * the latter contains a cap. + */ + private def formalCaptures(arg: Tree)(using Context): Refs = + arg.formalType.orElse(arg.nuType).deepCaptureSet.elems + + /** The deep capture set if the type of `tree` */ + private def captures(tree: Tree)(using Context): Refs = + tree.nuType.deepCaptureSet.elems + + // ---- Error reporting TODO Once these are stabilized, move to messages -----" + + + def sharedPeaksStr(shared: Refs)(using Context): String = + shared.nth(0) match + case fresh: FreshCap => + if fresh.hiddenSet.owner.exists then i"$fresh of ${fresh.hiddenSet.owner}" else i"$fresh" + case other => + i"$other" + + def overlapStr(hiddenSet: Refs, clashSet: Refs)(using Context): String = + val hiddenFootprint = hiddenSet.footprint() + val clashFootprint = clashSet.footprint() + // The overlap of footprints, or, of this empty the set of shared peaks. + // We prefer footprint overlap since it tends to be more informative. + val overlap = hiddenFootprint.overlapWith(clashFootprint) + if !overlap.isEmpty then i"${CaptureSet(overlap)}" + else + val sharedPeaks = hiddenSet.footprint(includeMax = true).sharedWith: + clashSet.footprint(includeMax = true) + assert(!sharedPeaks.isEmpty, i"no overlap for $hiddenSet vs $clashSet") + sharedPeaksStr(sharedPeaks) + + /** Report a separation failure in an application `fn(args)` + * @param fn the function + * @param parts the function prefix followed by the flattened argument list + * @param polyArg the clashing argument to a polymorphic formal + * @param clashing the argument, function prefix, or entire function application result with + * which it clashes, + * + */ + def sepApplyError(fn: Tree, parts: List[Tree], polyArg: Tree, clashing: Tree)(using Context): Unit = + val polyArgIdx = parts.indexOf(polyArg).ensuring(_ >= 0) - 1 + val clashIdx = parts.indexOf(clashing) // -1 means entire function application + def paramName(mt: Type, idx: Int): Option[Name] = mt match + case mt @ MethodType(pnames) => + if idx < pnames.length then Some(pnames(idx)) else paramName(mt.resType, idx - pnames.length) + case mt: PolyType => paramName(mt.resType, idx) + case _ => None + def formalName = paramName(fn.nuType.widen, polyArgIdx) match + case Some(pname) => i"$pname " + case _ => "" + def qualifier = methPart(fn) match + case Select(qual, _) => qual + case _ => EmptyTree + def isShowableMethod = fn.symbol.exists && !defn.isFunctionSymbol(fn.symbol.maybeOwner) + def funType = + if fn.symbol.exists && !qualifier.isEmpty then qualifier.nuType else fn.nuType + def funStr = + if isShowableMethod then i"${fn.symbol}: ${fn.symbol.info}" + else i"a function of type ${funType.widen}" + def clashArgStr = clashIdx match + case -1 => "function result" + case 0 => "function prefix" + case 1 => "first argument " + case 2 => "second argument" + case 3 => "third argument " + case n => s"${n}th argument " + def clashTypeStr = + if clashIdx == 0 && !isShowableMethod then "" // we already mentioned the type in `funStr` + else i" with type ${clashing.nuType}" + val hiddenSet = formalCaptures(polyArg).hiddenSet + val clashSet = captures(clashing) + report.error( + em"""Separation failure: argument of type ${polyArg.nuType} + |to $funStr + |corresponds to capture-polymorphic formal parameter ${formalName}of type ${polyArg.formalType} + |and hides capabilities ${CaptureSet(hiddenSet)}. + |Some of these overlap with the captures of the ${clashArgStr.trim}$clashTypeStr. + | + | Hidden set of current argument : ${CaptureSet(hiddenSet)} + | Hidden footprint of current argument : ${CaptureSet(hiddenSet.footprint())} + | Capture set of $clashArgStr : ${CaptureSet(clashSet)} + | Footprint set of $clashArgStr : ${CaptureSet(clashSet.footprint())} + | The two sets overlap at : ${overlapStr(hiddenSet, clashSet)}""", + polyArg.srcPos) + + /** Report a use/definition failure, where a previously hidden capability is + * used again. + * @param tree the tree where the capability is used + * @param clashing the tree where the capability is previously hidden, + * or emptyTree if none exists + * @param used the uses of `tree` + * @param hidden the hidden set of the clashing def, + * or the global hidden set if no clashing def exists + */ + def sepUseError(tree: Tree, clashingDef: ValOrDefDef | Null, used: Refs, hidden: Refs)(using Context): Unit = + if clashingDef != null then + def resultStr = if clashingDef.isInstanceOf[DefDef] then " result" else "" + report.error( + em"""Separation failure: Illegal access to ${overlapStr(hidden, used)} which is hidden by the previous definition + |of ${clashingDef.symbol} with$resultStr type ${clashingDef.tpt.nuType}. + |This type hides capabilities ${CaptureSet(hidden)}""", + tree.srcPos) + else + report.error( + em"""Separation failure: illegal access to ${overlapStr(hidden, used)} which is hidden by some previous definitions + |No clashing definitions were found. This might point to an internal error.""", + tree.srcPos) + + /** Report a failure where a previously consumed capability is used again, + * @param ref the capability that is used after being consumed + * @param loc the position where the capability was consumed + * @param pos the position where the capability was used again + */ + def consumeError(ref: Capability, loc: SrcPos, pos: SrcPos)(using Context): Unit = + report.error( + em"""Separation failure: Illegal access to $ref, which was passed to a + |@consume parameter or was used as a prefix to a @consume method on line ${loc.line + 1} + |and therefore is no longer available.""", + pos) + + /** Report a failure where a capability is consumed in a loop. + * @param ref the capability + * @param loc the position where the capability was consumed + */ + def consumeInLoopError(ref: Capability, pos: SrcPos)(using Context): Unit = + report.error( + em"""Separation failure: $ref appears in a loop, therefore it cannot + |be passed to a @consume parameter or be used as a prefix of a @consume method call.""", + pos) + + // ------------ Checks ----------------------------------------------------- + + /** Check separation between different arguments and between function + * prefix and arguments. A capability cannot be hidden by one of these arguments + * and also be either explicitly referenced or hidden by the prefix or another + * argument. "Hidden" means: the capability is in the deep capture set of the + * argument and appears in the hidden set of the corresponding (capture-polymorphic) + * formal parameter. Howeber, we do allow explicit references to a hidden + * capability in later arguments, if the corresponding formal parameter mentions + * the parameter where the capability was hidden. For instance in + * + * def seq(x: () => Unit; y ->{cap, x} Unit): Unit + * def f: () ->{io} Unit + * + * we do allow `seq(f, f)` even though `{f, io}` is in the hidden set of the + * first parameter `x`, since the second parameter explicitly mentions `x` in + * its capture set. + * + * Also check separation via checkType within individual arguments widened to their + * formal paramater types. + * + * @param fn the applied function + * @param args the flattened argument lists + * @param app the entire application tree + * @param deps cross argument dependencies: maps argument trees to + * those other arguments that where mentioned by coorresponding + * formal parameters. + */ + private def checkApply(fn: Tree, args: List[Tree], app: Tree, deps: collection.Map[Tree, List[Tree]])(using Context): Unit = + val (qual, fnCaptures) = methPart(fn) match + case Select(qual, _) => (qual, qual.nuType.captureSet) + case _ => (fn, CaptureSet.empty) + var currentPeaks = PeaksPair(fnCaptures.elems.peaks, emptyRefs) + val partsWithPeaks = mutable.ListBuffer[(Tree, PeaksPair)]() += (qual -> currentPeaks) + + capt.println( + i"""check separate $fn($args), fnCaptures = $fnCaptures, + | formalCaptures = ${args.map(arg => CaptureSet(formalCaptures(arg)))}, + | actualCaptures = ${args.map(arg => CaptureSet(captures(arg)))}, + | deps = ${deps.toList}""") + val parts = qual :: args + var reported: SimpleIdentitySet[Tree] = SimpleIdentitySet.empty + + for arg <- args do + val argPeaks = PeaksPair( + captures(arg).peaks, + if arg.needsSepCheck then formalCaptures(arg).hiddenSet.peaks else emptyRefs) + val argDeps = deps(arg) + + def clashingPart(argPeaks: Refs, selector: PeaksPair => Refs): Tree = + partsWithPeaks.find: (prev, prevPeaks) => + !argDeps.contains(prev) + && !selector(prevPeaks).sharedWith(argPeaks).isEmpty + match + case Some(prev, _) => prev + case None => EmptyTree + + // 1. test argPeaks.actual against previously captured hidden sets + if !argPeaks.actual.sharedWith(currentPeaks.hidden).isEmpty then + val clashing = clashingPart(argPeaks.actual, _.hidden) + if !clashing.isEmpty then + sepApplyError(fn, parts, clashing, arg) + reported += clashing + else assert(!argDeps.isEmpty) + + if arg.needsSepCheck then + //println(i"testing $arg, formal = ${arg.formalType}, peaks = ${argPeaks.actual}/${argPeaks.hidden} against ${currentPeaks.actual}") + checkType(arg.formalType, arg.srcPos, TypeRole.Argument(arg)) + // 2. test argPeaks.hidden against previously captured actuals + if !argPeaks.hidden.sharedWith(currentPeaks.actual).isEmpty then + val clashing = clashingPart(argPeaks.hidden, _.actual) + if !clashing.isEmpty then + if !reported.contains(clashing) then + //println(i"CLASH $arg / ${argPeaks.formal} vs $clashing / ${peaksOfTree(clashing).actual} / ${captures(clashing).peaks}") + sepApplyError(fn, parts, arg, clashing) + else assert(!argDeps.isEmpty) + + partsWithPeaks += (arg -> argPeaks) + currentPeaks = PeaksPair( + currentPeaks.actual ++ argPeaks.actual, + currentPeaks.hidden ++ argPeaks.hidden) + end for + + def collectRefs(args: List[Type], res: Type) = + args.foldLeft(argCaptures(res)): (refs, arg) => + refs ++ arg.deepCaptureSet.elems + + /** The deep capture sets of all parameters of this type (if it is a function type) */ + def argCaptures(tpe: Type): Refs = tpe match + case defn.FunctionOf(args, resultType, isContextual) => + collectRefs(args, resultType) + case defn.RefinedFunctionOf(mt) => + collectRefs(mt.paramInfos, mt.resType) + case CapturingType(parent, _) => + argCaptures(parent) + case _ => + emptyRefs + + if !deps(app).isEmpty then + lazy val appPeaks = argCaptures(app.nuType).peaks + lazy val partPeaks = partsWithPeaks.toMap + for arg <- deps(app) do + if arg.needsSepCheck && !partPeaks(arg).hidden.sharedWith(appPeaks).isEmpty then + sepApplyError(fn, parts, arg, app) + end checkApply + + /** 1. Check that the capabilities used at `tree` don't overlap with + * capabilities hidden by a previous definition. + * 2. Also check that none of the used capabilities was consumed before. + */ + def checkUse(tree: Tree)(using Context): Unit = + val used = tree.markedFree.elems + if !used.isEmpty then + capt.println(i"check use $tree: $used") + val usedPeaks = used.peaks + val overlap = defsShadow.peaks.sharedWith(usedPeaks) + if !defsShadow.peaks.sharedWith(usedPeaks).isEmpty then + val sym = tree.symbol + + def findClashing(prevDefs: List[DefInfo]): Option[DefInfo] = prevDefs match + case prevDef :: prevDefs1 => + if prevDef.symbol == sym then Some(prevDef) + else if !prevDef.hiddenPeaks.sharedWith(usedPeaks).isEmpty then Some(prevDef) + else findClashing(prevDefs1) + case Nil => + None + + findClashing(previousDefs) match + case Some(clashing) => + if clashing.symbol != sym then + sepUseError(tree, clashing.tree, used, clashing.hidden) + case None => + sepUseError(tree, null, used, defsShadow) + + for ref <- used do + val pos = consumed.clashing(ref) + if pos != null then consumeError(ref, pos, tree.srcPos) + end checkUse + + /** If `tp` denotes some version of a singleton capability `x.type` the set `{x, x*}` + * otherwise the empty set. + */ + def explicitRefs(tp: Type)(using Context): Refs = tp match + case tp: (TermRef | ThisType) if tp.isTrackableRef => SimpleIdentitySet(tp, tp.reach) + case AnnotatedType(parent, _) => explicitRefs(parent) + case AndType(tp1, tp2) => explicitRefs(tp1) ++ explicitRefs(tp2) + case OrType(tp1, tp2) => explicitRefs(tp1) ** explicitRefs(tp2) + case _ => emptyRefs + + /** Check validity of consumed references `refsToCheck`. The references are consumed + * because they are hidden in a Fresh result type or they are referred + * to in an argument to a @consume parameter or in a prefix of a @consume method -- + * which one applies is determined by the role parameter. + * + * This entails the following checks: + * - The reference must be defined in the same as method or class as + * the access. + * - If the reference is to a term parameter, that parameter must be + * marked as @consume as well. + * - If the reference is to a this type of the enclosing class, the + * access must be in a @consume method. + * + * References that extend SharedCapability are excluded from checking. + * As a side effect, add all checked references with the given position `pos` + * to the global `consumed` map. + * + * @param refsToCheck the referencves to check + * @param tpe the type containing those references + * @param role the role in which the type apears + * @param descr a textual description of the type and its relationship with the checked reference + * @param pos position for error reporting + */ + def checkConsumedRefs(refsToCheck: Refs, tpe: Type, role: TypeRole, descr: => String, pos: SrcPos)(using Context) = + val badParams = mutable.ListBuffer[Symbol]() + def currentOwner = role.dclSym.orElse(ctx.owner) + for hiddenRef <- refsToCheck.deductSymRefs(role.dclSym).deduct(explicitRefs(tpe)) do + if !hiddenRef.derivesFromSharedCapability then + hiddenRef.pathRoot match + case ref: TermRef => + val refSym = ref.symbol + if currentOwner.enclosingMethodOrClass.isProperlyContainedIn(refSym.maybeOwner.enclosingMethodOrClass) then + report.error(em"""Separation failure: $descr non-local $refSym""", pos) + else if refSym.is(TermParam) + && !refSym.hasAnnotation(defn.ConsumeAnnot) + && currentOwner.isContainedIn(refSym.owner) + then + badParams += refSym + case ref: ThisType => + val encl = currentOwner.enclosingMethodOrClass + if encl.isProperlyContainedIn(ref.cls) + && !encl.is(Synthetic) + && !encl.hasAnnotation(defn.ConsumeAnnot) + then + report.error( + em"""Separation failure: $descr non-local this of class ${ref.cls}. + |The access must be in a @consume method to allow this.""", + pos) + case _ => + + if badParams.nonEmpty then + def paramsStr(params: List[Symbol]): String = (params: @unchecked) match + case p :: Nil => i"${p.name}" + case p :: p2 :: Nil => i"${p.name} and ${p2.name}" + case p :: ps => i"${p.name}, ${paramsStr(ps)}" + val (pluralS, singleS) = if badParams.tail.isEmpty then ("", "s") else ("s", "") + report.error( + em"""Separation failure: $descr parameter$pluralS ${paramsStr(badParams.toList)}. + |The parameter$pluralS need$singleS to be annotated with @consume to allow this.""", + pos) + + role match + case _: TypeRole.Argument | _: TypeRole.Qualifier => + for ref <- refsToCheck do + if !ref.derivesFromSharedCapability then + consumed.put(ref, pos) + case _ => + end checkConsumedRefs + + /** Check separation conditions of type `tpe` that appears in `role`. + * 1. Check that the parts of type `tpe` are mutually separated, as defined in + * `checkParts` below. + * 2. Check that validity of all references consumed by the type as defined in + * `checkLegalRefs` below + */ + def checkType(tpe: Type, pos: SrcPos, role: TypeRole)(using Context): Unit = + + /** Deduct some elements from `refs` according to the role of the checked type `tpe`: + * - If the the type apears as a (result-) type of a definition of `x`, deduct + * `x` and `x*`. + * - If the checked type (or, for arguments, the actual type of the argument) + * is morally a singleton type `y.type` deduct `y` and `y*` as well. + */ + extension (refs: Refs) def pruned = + val deductedType = role match + case TypeRole.Argument(arg) => arg.tpe + case _ => tpe + refs.deductSymRefs(role.dclSym).deduct(explicitRefs(deductedType)) + + def sepTypeError(parts: List[Type], genPart: Type, otherPart: Type): Unit = + val captured = genPart.deepCaptureSet.elems + val hiddenSet = captured.hiddenSet.pruned + val clashSet = otherPart.deepCaptureSet.elems + val deepClashSet = (clashSet.footprint() ++ clashSet.hiddenSet).pruned + report.error( + em"""Separation failure in ${role.description} $tpe. + |One part, $genPart, hides capabilities ${CaptureSet(hiddenSet)}. + |Another part, $otherPart, captures capabilities ${CaptureSet(deepClashSet)}. + |The two sets overlap at ${overlapStr(hiddenSet, deepClashSet)}.""", + pos) + + /** Check that the parts of type `tpe` are mutually separated. + * This means that references hidden in some part of the type may not + * be explicitly referenced or hidden in some other part. + */ + def checkParts(parts: List[Type]): Unit = + var currentPeaks = PeaksPair(emptyRefs, emptyRefs) + val partsWithPeaks = mutable.ListBuffer[(Type, PeaksPair)]() + + for part <- parts do + val captured = part.deepCaptureSet.elems.pruned + val hidden = captured.hiddenSet.pruned + val actual = captured ++ hidden + val partPeaks = PeaksPair(actual.peaks, hidden.peaks) + /* + println(i"""check parts $parts + |current = ${currentPeaks.actual}/${currentPeaks.hidden} + |new = $captured/${captured.hiddenSet.pruned} + |new = ${captured.peaks}/${captured.hiddenSet.pruned.peaks}""") + */ + + def clashingPart(argPeaks: Refs, selector: PeaksPair => Refs): Type = + partsWithPeaks.find: (prev, prevPeaks) => + !selector(prevPeaks).sharedWith(argPeaks).isEmpty + match + case Some(prev, _) => prev + case None => NoType + + if !partPeaks.actual.sharedWith(currentPeaks.hidden).isEmpty then + //println(i"CLASH ${partPeaks.actual} with ${currentPeaks.hidden}") + val clashing = clashingPart(partPeaks.actual, _.hidden) + //println(i"CLASH ${partPeaks.actual} with ${currentPeaks.hidden}") + if clashing.exists then sepTypeError(parts, clashing, part) + + if !partPeaks.hidden.sharedWith(currentPeaks.actual).isEmpty then + val clashing = clashingPart(partPeaks.hidden, _.actual) + if clashing.exists then sepTypeError(parts, part, clashing) + + partsWithPeaks += (part -> partPeaks) + currentPeaks = PeaksPair( + currentPeaks.actual ++ partPeaks.actual, + currentPeaks.hidden ++ partPeaks.hidden) + end checkParts + + /** A traverser that collects part lists to check for separation conditions. + * The accumulator of type `Captures` indicates what kind of captures were + * encountered in previous parts. + */ + object traverse extends TypeAccumulator[Captures]: + + /** A stack of part lists to check. We maintain this since immediately + * checking parts when traversing the type would check innermost to outermost. + * But we want to check outermost parts first since this prioritizes errors + * that are more obvious. + */ + var toCheck: List[List[Type]] = Nil + + private val seen = util.HashSet[Symbol]() + + def apply(c: Captures, t: Type) = + if variance < 0 then c + else + val t1 = t.dealias + t1 match + case t @ AppliedType(tycon, args) => + val c1 = foldOver(Captures.None, t) + if c1 == Captures.NeedsCheck then + toCheck = (tycon :: args) :: toCheck + c.add(c1) + case t @ CapturingType(parent, cs) => + val c1 = this(c, parent) + if cs.elems.exists(_.core.isInstanceOf[FreshCap]) then c1.add(Captures.Hidden) + else if !cs.elems.isEmpty then c1.add(Captures.Explicit) + else c1 + case t: TypeRef if t.symbol.isAbstractOrParamType => + if seen.contains(t.symbol) then c + else + seen += t.symbol + apply(apply(c, t.prefix), t.info.bounds.hi) + case t => + foldOver(c, t) + + /** If `tpe` appears as a (result-) type of a definition, treat its + * hidden set minus its explicitly declared footprint as consumed. + * If `tpe` appears as an argument to a @consume parameter, treat + * its footprint as consumed. + */ + def checkLegalRefs() = role match + case TypeRole.Result(sym, _) => + if !sym.isAnonymousFunction // we don't check return types of anonymous functions + && !sym.is(Case) // We don't check so far binders in patterns since they + // have inferred universal types. TODO come back to this; + // either infer more precise types for such binders or + // "see through them" when we look at hidden sets. + then + val refs = tpe.deepCaptureSet.elems + val toCheck = refs.hiddenSet.footprint().deduct(refs.footprint()) + checkConsumedRefs(toCheck, tpe, role, i"${role.description} $tpe hides", pos) + case TypeRole.Argument(arg) => + if tpe.hasAnnotation(defn.ConsumeAnnot) then + val capts = captures(arg).footprint() + checkConsumedRefs(capts, tpe, role, i"argument to @consume parameter with type ${arg.nuType} refers to", pos) + case _ => + + if !tpe.hasAnnotation(defn.UntrackedCapturesAnnot) then + traverse(Captures.None, tpe) + traverse.toCheck.foreach(checkParts) + checkLegalRefs() + end checkType + + /** Check the (result-) type of a definition of symbol `sym` */ + def checkType(tpt: Tree, sym: Symbol)(using Context): Unit = + checkType(tpt.nuType, tpt.srcPos, + TypeRole.Result(sym, inferred = tpt.isInstanceOf[InferredTypeTree])) + + /** The list of all individual method types making up some potentially + * curried method type. + */ + private def collectMethodTypes(tp: Type): List[TermLambda] = tp match + case tp: MethodType => tp :: collectMethodTypes(tp.resType) + case tp: PolyType => collectMethodTypes(tp.resType) + case _ => Nil + + /** The inter-parameter dependencies of the function reference `fn` applied + * to the argument lists `argss`. For instance, if `f` has type + * + * f(x: A, y: B^{cap, x}, z: C^{x, y}): D + * + * then the dependencies of an application `f(a, b, c)` of type C^{y} is the map + * + * [ b -> [a] + * , c -> [a, b] + * , f(a, b, c) -> [b]] + */ + private def dependencies(fn: Tree, argss: List[List[Tree]], app: Tree)(using Context): collection.Map[Tree, List[Tree]] = + def isFunApply(sym: Symbol) = + sym.name == nme.apply && defn.isFunctionClass(sym.owner) + val mtpe = + if fn.symbol.exists && !isFunApply(fn.symbol) then fn.symbol.info + else fn.nuType.widen + val mtps = collectMethodTypes(mtpe) + assert(mtps.hasSameLengthAs(argss), i"diff for $fn: ${fn.symbol} /// $mtps /// $argss") + val mtpsWithArgs = mtps.zip(argss) + val argMap = mtpsWithArgs.toMap + val deps = mutable.HashMap[Tree, List[Tree]]().withDefaultValue(Nil) + + def recordDeps(formal: Type, actual: Tree) = + for dep <- formal.captureSet.elems.toList do + val referred = dep.stripReach match + case dep: TermParamRef => + argMap(dep.binder)(dep.paramNum) :: Nil + case dep: ThisType if dep.cls == fn.symbol.owner => + val Select(qual, _) = fn: @unchecked // TODO can we use fn instead? + qual :: Nil + case _ => + Nil + deps(actual) ++= referred + + for (mt, args) <- mtpsWithArgs; (formal, arg) <- mt.paramInfos.zip(args) do + recordDeps(formal, arg) + recordDeps(mtpe.finalResultType, app) + capt.println(i"deps for $app = ${deps.toList}") + deps + + + /** Decompose an application into a function prefix and a list of argument lists. + * If some of the arguments need a separation check because they are capture polymorphic, + * perform a separation check with `checkApply` + */ + private def traverseApply(app: Tree)(using Context): Unit = + def recur(tree: Tree, argss: List[List[Tree]]): Unit = tree match + case Apply(fn, args) => recur(fn, args :: argss) + case TypeApply(fn, args) => recur(fn, argss) // skip type arguments + case _ => + if argss.nestedExists(_.needsSepCheck) then + checkApply(tree, argss.flatten, app, dependencies(tree, argss, app)) + recur(app, Nil) + + /** Is `tree` an application of `caps.unsafe.unsafeAssumeSeparate`? */ + def isUnsafeAssumeSeparate(tree: Tree)(using Context): Boolean = tree match + case tree: Apply => tree.symbol == defn.Caps_unsafeAssumeSeparate + case _ => false + + def pushDef(tree: ValOrDefDef, hiddenByDef: Refs)(using Context): Unit = + defsShadow ++= hiddenByDef + previousDefs = DefInfo(tree, tree.symbol, hiddenByDef, hiddenByDef.peaks) :: previousDefs + + /** Check (result-) type of `tree` for separation conditions using `checkType`. + * Excluded are parameters and definitions that have an =unsafeAssumeSeparate + * application as right hand sides. + * Hidden sets of checked definitions are added to `defsShadow`. + */ + def checkValOrDefDef(tree: ValOrDefDef)(using Context): Unit = + if !tree.symbol.isOneOf(TermParamOrAccessor) && !isUnsafeAssumeSeparate(tree.rhs) then + checkType(tree.tpt, tree.symbol) + capt.println(i"sep check def ${tree.symbol}: ${tree.tpt} with ${captures(tree.tpt).hiddenSet.footprint()}") + pushDef(tree, captures(tree.tpt).hiddenSet.deductSymRefs(tree.symbol)) + + def inSection[T](op: => T)(using Context): T = + val savedDefsShadow = defsShadow + val savedPrevionsDefs = previousDefs + try op + finally + previousDefs = savedPrevionsDefs + defsShadow = savedDefsShadow + + def traverseSection[T](tree: Tree)(using Context) = inSection(traverseChildren(tree)) + + /** Traverse `tree` and perform separation checks everywhere */ + def traverse(tree: Tree)(using Context): Unit = + if !isUnsafeAssumeSeparate(tree) then trace(i"checking separate $tree"): + checkUse(tree) + tree match + case tree @ Select(qual, _) if tree.symbol.is(Method) && tree.symbol.hasAnnotation(defn.ConsumeAnnot) => + traverseChildren(tree) + checkConsumedRefs( + captures(qual).footprint(), qual.nuType, + TypeRole.Qualifier(qual, tree.symbol), + i"call prefix of @consume ${tree.symbol} refers to", qual.srcPos) + case tree: GenericApply => + traverseChildren(tree) + tree.tpe match + case _: MethodOrPoly => + case _ => traverseApply(tree) + case _: Block | _: Template => + traverseSection(tree) + case tree: ValDef => + traverseChildren(tree) + checkValOrDefDef(tree) + case tree: DefDef => + if tree.symbol.isInlineMethod then + // We currently skip inline method since these seem to generate + // spurious recheck completions. Test case is i20237.scala + capt.println(i"skipping sep check of inline def ${tree.symbol}") + else inSection: + consumed.segment: + for params <- tree.paramss; case param: ValDef <- params do + pushDef(param, emptyRefs) + traverseChildren(tree) + checkValOrDefDef(tree) + case If(cond, thenp, elsep) => + traverse(cond) + val thenConsumed = consumed.segment(traverse(thenp)) + val elseConsumed = consumed.segment(traverse(elsep)) + consumed ++= thenConsumed + consumed ++= elseConsumed + case tree @ Labeled(bind, expr) => + val consumedBuf = mutable.ListBuffer[ConsumedSet]() + openLabeled = (bind.name, consumedBuf) :: openLabeled + traverse(expr) + for cs <- consumedBuf do consumed ++= cs + openLabeled = openLabeled.tail + case Return(expr, from) => + val retConsumed = consumed.segment(traverse(expr)) + from match + case Ident(name) => + for (lbl, consumedBuf) <- openLabeled do + if lbl == name then + consumedBuf += retConsumed + case _ => + case Match(sel, cases) => + // Matches without returns might still be kept after pattern matching to + // encode table switches. + traverse(sel) + val caseConsumed = for cas <- cases yield consumed.segment(traverse(cas)) + caseConsumed.foreach(consumed ++= _) + case tree: TypeDef if tree.symbol.isClass => + consumed.segment: + traverseChildren(tree) + case tree: WhileDo => + val loopConsumed = consumed.segment(traverseChildren(tree)) + if loopConsumed.size != 0 then + val (ref, pos) = loopConsumed.toMap.head + consumeInLoopError(ref, pos) + case _ => + traverseChildren(tree) +end SepCheck \ No newline at end of file diff --git a/compiler/src/dotty/tools/dotc/cc/Setup.scala b/compiler/src/dotty/tools/dotc/cc/Setup.scala index a5e96f1f9ce2..69bfa96df836 100644 --- a/compiler/src/dotty/tools/dotc/cc/Setup.scala +++ b/compiler/src/dotty/tools/dotc/cc/Setup.scala @@ -11,15 +11,18 @@ import config.Feature import config.Printers.{capt, captDebug} import ast.tpd, tpd.* import transform.{PreRecheck, Recheck}, Recheck.* -import CaptureSet.{IdentityCaptRefMap, IdempotentCaptRefMap} import Synthetics.isExcluded import util.SimpleIdentitySet +import util.chaining.* import reporting.Message import printing.{Printer, Texts}, Texts.{Text, Str} import collection.mutable import CCState.* import dotty.tools.dotc.util.NoSourcePosition import CheckCaptures.CheckerAPI +import NamerOps.methodType +import NameKinds.{CanThrowEvidenceName, TryOwnerName} +import Capabilities.* /** Operations accessed from CheckCaptures */ trait SetupAPI: @@ -50,6 +53,15 @@ object Setup: Some((res, exc)) case _ => None + + def firstCanThrowEvidence(body: Tree)(using Context): Option[Tree] = body match + case Block(stats, expr) => + if stats.isEmpty then firstCanThrowEvidence(expr) + else stats.find: + case vd: ValDef => vd.symbol.name.is(CanThrowEvidenceName) + case _ => false + case _ => None + end Setup import Setup.* @@ -85,7 +97,7 @@ class Setup extends PreRecheck, SymTransformer, SetupAPI: /** Drops `private` from the flags of `symd` provided it is * a parameter accessor that's not `constructorOnly` or `uncheckedCaptured` * and that contains at least one @retains in co- or in-variant position. - * The @retains mught be implicit for a type deriving from `Capability`. + * The @retains might be implicit for a type deriving from `Capability`. */ private def newFlagsFor(symd: SymDenotation)(using Context): FlagSet = @@ -94,12 +106,13 @@ class Setup extends PreRecheck, SymTransformer, SetupAPI: def apply(x: Boolean, tp: Type): Boolean = if x then true else if tp.derivesFromCapability && variance >= 0 then true - else tp match + else tp.dealiasKeepAnnots match case AnnotatedType(_, ann) if ann.symbol.isRetains && variance >= 0 => true case t: TypeRef if t.symbol.isAbstractOrParamType && !seen.contains(t.symbol) => seen += t.symbol apply(x, t.info.bounds.hi) - case _ => foldOver(x, tp) + case tp1 => + foldOver(x, tp1) def apply(tp: Type): Boolean = apply(false, tp) if symd.symbol.isRefiningParamAccessor @@ -132,7 +145,7 @@ class Setup extends PreRecheck, SymTransformer, SetupAPI: def mappedInfo = if toBeUpdated.contains(sym) then symd.info // don't transform symbols that will anyway be updated - else transformExplicitType(symd.info) + else transformExplicitType(symd.info, sym, freshen = true) if Synthetics.needsTransform(symd) then Synthetics.transform(symd, mappedInfo) else if isPreCC(sym) then @@ -180,6 +193,80 @@ class Setup extends PreRecheck, SymTransformer, SetupAPI: case tp: MethodOrPoly => tp // don't box results of methods outside refinements case _ => recur(tp) + private trait SetupTypeMap extends FollowAliasesMap: + private var isTopLevel = true + + protected def innerApply(tp: Type): Type + + final def apply(tp: Type) = + val saved = isTopLevel + if variance < 0 then isTopLevel = false + try tp match + case defn.RefinedFunctionOf(rinfo: MethodType) => + val rinfo1 = apply(rinfo) + if rinfo1 ne rinfo then rinfo1.toFunctionType(alwaysDependent = true) + else tp + case _ => + innerApply(tp) + finally isTopLevel = saved + + override def mapArg(arg: Type, tparam: ParamInfo): Type = + super.mapArg(Recheck.mapExprType(arg), tparam) + + /** Map parametric functions with results that have a capture set somewhere + * to dependent functions. + */ + protected def normalizeFunctions(tp: Type, original: Type, expandAlways: Boolean = false)(using Context): Type = + tp match + case AppliedType(tycon, args) + if defn.isNonRefinedFunction(tp) && isTopLevel => + // Expand if we have an applied type that underwent some addition of capture sets + val expand = expandAlways || original.match + case AppliedType(`tycon`, args0) => args0.last ne args.last + case _ => false + if expand then + val fn = depFun( + args.init, args.last, + isContextual = defn.isContextFunctionClass(tycon.classSymbol)) + .showing(i"add function refinement $tp ($tycon, ${args.init}, ${args.last}) --> $result", capt) + AnnotatedType(fn, Annotation(defn.InferredDepFunAnnot, util.Spans.NoSpan)) + else tp + case _ => tp + + /** Pull out an embedded capture set from a part of `tp` */ + def normalizeCaptures(tp: Type)(using Context): Type = tp match + case tp @ RefinedType(parent @ CapturingType(parent1, refs), rname, rinfo) => + CapturingType(tp.derivedRefinedType(parent1, rname, rinfo), refs, parent.isBoxed) + case tp: RecType => + tp.parent match + case parent @ CapturingType(parent1, refs) => + CapturingType(tp.derivedRecType(parent1), refs, parent.isBoxed) + case _ => + tp // can return `tp` here since unlike RefinedTypes, RecTypes are never created + // by `mapInferred`. Hence if the underlying type admits capture variables + // a variable was already added, and the first case above would apply. + case AndType(tp1 @ CapturingType(parent1, refs1), tp2 @ CapturingType(parent2, refs2)) => + assert(tp1.isBoxed == tp2.isBoxed) + CapturingType(AndType(parent1, parent2), refs1 ** refs2, tp1.isBoxed) + case tp @ OrType(tp1 @ CapturingType(parent1, refs1), tp2 @ CapturingType(parent2, refs2)) => + assert(tp1.isBoxed == tp2.isBoxed) + CapturingType(OrType(parent1, parent2, tp.isSoft), refs1 ++ refs2, tp1.isBoxed) + case tp @ OrType(tp1 @ CapturingType(parent1, refs1), tp2) => + CapturingType(OrType(parent1, tp2, tp.isSoft), refs1, tp1.isBoxed) + case tp @ OrType(tp1, tp2 @ CapturingType(parent2, refs2)) => + CapturingType(OrType(tp1, parent2, tp.isSoft), refs2, tp2.isBoxed) + case tp @ AppliedType(tycon, args) + if !defn.isFunctionClass(tp.dealias.typeSymbol) && (tp.dealias eq tp) => + tp.derivedAppliedType(tycon, args.mapConserve(box)) + case tp: RealTypeBounds => + tp.derivedTypeBounds(tp.lo, box(tp.hi)) + case tp: LazyRef => + normalizeCaptures(tp.ref) + case _ => + tp + + end SetupTypeMap + /** Transform the type of an InferredTypeTree by performing the following transformation * steps everywhere in the type: * 1. Drop retains annotations @@ -197,9 +284,11 @@ class Setup extends PreRecheck, SymTransformer, SetupAPI: * Polytype bounds are only cleaned using step 1, but not otherwise transformed. */ private def transformInferredType(tp: Type)(using Context): Type = - def mapInferred(refine: Boolean): TypeMap = new TypeMap with FollowAliasesMap: + def mapInferred(refine: Boolean): TypeMap = new TypeMap with SetupTypeMap: override def toString = "map inferred" + var refiningNames: Set[Name] = Set() + /** Refine a possibly applied class type C where the class has tracked parameters * x_1: T_1, ..., x_n: T_n to C { val x_1: T_1^{CV_1}, ..., val x_n: T_n^{CV_n} } * where CV_1, ..., CV_n are fresh capture set variables. @@ -212,7 +301,7 @@ class Setup extends PreRecheck, SymTransformer, SetupAPI: cls.paramGetters.foldLeft(tp) { (core, getter) => if atPhase(thisPhase.next)(getter.hasTrackedParts) && getter.isRefiningParamAccessor - && !getter.is(Tracked) + && !refiningNames.contains(getter.name) // Don't add a refinement if we have already an explicit one for the same name then val getterType = mapInferred(refine = false)(tp.memberInfo(getter)).strippedDealias @@ -225,67 +314,32 @@ class Setup extends PreRecheck, SymTransformer, SetupAPI: case _ => tp case _ => tp - private var isTopLevel = true - - private def mapNested(ts: List[Type]): List[Type] = - val saved = isTopLevel - isTopLevel = false - try ts.mapConserve(this) - finally isTopLevel = saved - - def apply(tp: Type) = + def innerApply(tp: Type) = val tp1 = tp match case AnnotatedType(parent, annot) if annot.symbol.isRetains => // Drop explicit retains annotations apply(parent) - case tp @ AppliedType(tycon, args) => - val tycon1 = this(tycon) - if defn.isNonRefinedFunction(tp) then - // Convert toplevel generic function types to dependent functions - if !defn.isFunctionSymbol(tp.typeSymbol) && (tp.dealias ne tp) then - // This type is a function after dealiasing, so we dealias and recurse. - // See #15925. - this(tp.dealias) - else - val args0 = args.init - var res0 = args.last - val args1 = mapNested(args0) - val res1 = this(res0) - if isTopLevel then - depFun(args1, res1, - isContextual = defn.isContextFunctionClass(tycon1.classSymbol)) - .showing(i"add function refinement $tp ($tycon1, $args1, $res1) (${tp.dealias}) --> $result", capt) - else if (tycon1 eq tycon) && (args1 eq args0) && (res1 eq res0) then - tp - else - tp.derivedAppliedType(tycon1, args1 :+ res1) - else - tp.derivedAppliedType(tycon1, args.mapConserve(arg => box(this(arg)))) - case defn.RefinedFunctionOf(rinfo: MethodType) => - val rinfo1 = apply(rinfo) - if rinfo1 ne rinfo then rinfo1.toFunctionType(alwaysDependent = true) - else tp - case Existential(_, unpacked) => - // drop the existential, the bound variables will be replaced by capture set variables - apply(unpacked) - case tp: MethodType => - tp.derivedLambdaType( - paramInfos = mapNested(tp.paramInfos), - resType = this(tp.resType)) case tp: TypeLambda => // Don't recurse into parameter bounds, just cleanup any stray retains annotations - tp.derivedLambdaType( - paramInfos = tp.paramInfos.mapConserve(_.dropAllRetains.bounds), - resType = this(tp.resType)) + ccState.withoutMappedFutureElems: + tp.derivedLambdaType( + paramInfos = tp.paramInfos.mapConserve(_.dropAllRetains.bounds), + resType = this(tp.resType)) + case tp @ RefinedType(parent, rname, rinfo) => + val saved = refiningNames + refiningNames += rname + val parent1 = try this(parent) finally refiningNames = saved + tp.derivedRefinedType(parent1, rname, this(rinfo)) case _ => mapFollowingAliases(tp) - addVar(addCaptureRefinements(normalizeCaptures(tp1)), ctx.owner) - end apply + addVar( + addCaptureRefinements(normalizeCaptures(normalizeFunctions(tp1, tp))), + ctx.owner) end mapInferred try val tp1 = mapInferred(refine = true)(tp) - val tp2 = Existential.mapCapInResults(_ => assert(false))(tp1) + val tp2 = toResultInResults(NoSymbol, _ => assert(false))(tp1) if tp2 ne tp then capt.println(i"expanded inferred in ${ctx.owner}: $tp --> $tp1 --> $tp2") tp2 catch case ex: AssertionError => @@ -300,11 +354,32 @@ class Setup extends PreRecheck, SymTransformer, SetupAPI: * 3. Add universal capture sets to types deriving from Capability * 4. Map `cap` in function result types to existentially bound variables. * 5. Schedule deferred well-formed tests for types with retains annotations. - * 6. Perform normalizeCaptures + * 6. Perform normalizeCaptures */ - private def transformExplicitType(tp: Type, tptToCheck: Tree = EmptyTree)(using Context): Type = - val toCapturing = new DeepTypeMap with FollowAliasesMap: - override def toString = "expand aliases" + private def transformExplicitType(tp: Type, sym: Symbol, freshen: Boolean, tptToCheck: Tree = EmptyTree)(using Context): Type = + + def fail(msg: Message) = + if !tptToCheck.isEmpty then report.error(msg, tptToCheck.srcPos) + + /** If C derives from Capability and we have a C^cs in source, we leave it as is + * instead of expanding it to C^{cap.rd}^cs. We do this by stripping capability-generated + * universal capture sets from the parent of a CapturingType. + */ + def stripImpliedCaptureSet(tp: Type): Type = tp match + case tp @ CapturingType(parent, refs) + if (refs eq CaptureSet.csImpliedByCapability) && !tp.isBoxedCapturing => + parent + case tp: AliasingBounds => + tp.derivedAlias(stripImpliedCaptureSet(tp.alias)) + case tp: RealTypeBounds => + tp.derivedTypeBounds(stripImpliedCaptureSet(tp.lo), stripImpliedCaptureSet(tp.hi)) + case _ => tp + + object toCapturing extends DeepTypeMap, SetupTypeMap: + override def toString = "transformExplicitType" + + var keepFunAliases = true + var keptFunAliases = false /** Expand $throws aliases. This is hard-coded here since $throws aliases in stdlib * are defined with `?=>` rather than `?->`. @@ -331,20 +406,38 @@ class Setup extends PreRecheck, SymTransformer, SetupAPI: CapturingType(fntpe, cs, boxed = false) else fntpe - /** If C derives from Capability and we have a C^cs in source, we leave it as is - * instead of expanding it to C^{cap}^cs. We do this by stripping capability-generated - * universal capture sets from the parent of a CapturingType. + /** Check that types extending SharedCapability don't have a `cap` in their capture set. + * TODO This is not enough. + * We need to also track that we cannot get exclusive capabilities in paths + * where some prefix derives from SharedCapability. Also, can we just + * exclude `cap`, or do we have to extend this to all exclusive capabilties? + * The problem is that we know what is exclusive in general only after capture + * checking, not before. */ - def stripImpliedCaptureSet(tp: Type): Type = tp match - case tp @ CapturingType(parent, refs) - if (refs eq defn.universalCSImpliedByCapability) && !tp.isBoxedCapturing => - parent - case _ => tp + def checkSharedOK(tp: Type): tp.type = + tp match + case CapturingType(parent, refs) + if refs.isUniversal && parent.derivesFromSharedCapability => + fail(em"$tp extends SharedCapability, so it cannot capture `cap`") + case _ => + tp - def apply(t: Type) = + /** Map references to capability classes C to C^, + * normalize captures and map to dependent functions. + */ + def defaultApply(t: Type) = + if t.derivesFromCapability + && !t.isSingleton + && (!sym.isConstructor || (t ne tp.finalResultType)) + // Don't add ^ to result types of class constructors deriving from Capability + then CapturingType(t, CaptureSet.csImpliedByCapability, boxed = false) + else normalizeCaptures(mapFollowingAliases(t)) + + def innerApply(t: Type) = t match case t @ CapturingType(parent, refs) => - t.derivedCapturingType(stripImpliedCaptureSet(this(parent)), refs) + checkSharedOK: + t.derivedCapturingType(stripImpliedCaptureSet(this(parent)), refs) case t @ AnnotatedType(parent, ann) => val parent1 = this(parent) if ann.symbol.isRetains then @@ -352,103 +445,106 @@ class Setup extends PreRecheck, SymTransformer, SetupAPI: if !tptToCheck.isEmpty then checkWellformedLater(parent2, ann.tree, tptToCheck) try - CapturingType(parent2, ann.tree.toCaptureSet) + checkSharedOK: + CapturingType(parent2, ann.tree.toCaptureSet) catch case ex: IllegalCaptureRef => - report.error(em"Illegal capture reference: ${ex.getMessage.nn}", tptToCheck.srcPos) + if !tptToCheck.isEmpty then + report.error(em"Illegal capture reference: ${ex.getMessage}", tptToCheck.srcPos) parent2 + else if ann.symbol == defn.UncheckedCapturesAnnot then + makeUnchecked(apply(parent)) else t.derivedAnnotatedType(parent1, ann) case throwsAlias(res, exc) => this(expandThrowsAlias(res, exc, Nil)) + case t @ AppliedType(tycon, args) + if defn.isNonRefinedFunction(t) + && !defn.isFunctionSymbol(t.typeSymbol) && (t.dealias ne tp) => + if keepFunAliases then + // Hold off with dealising and expand in a second pass. + // This is necessary to bind existentialFresh instances to the right method binder. + keptFunAliases = true + mapOver(t) + else + // In the second pass, map the alias + apply(t.dealias) case t => - // Map references to capability classes C to C^ - if t.derivesFromCapability && !t.isSingleton && t.typeSymbol != defn.Caps_Exists - then CapturingType(t, defn.universalCSImpliedByCapability, boxed = false) - else normalizeCaptures(mapFollowingAliases(t)) + defaultApply(t) end toCapturing - def fail(msg: Message) = - if !tptToCheck.isEmpty then report.error(msg, tptToCheck.srcPos) + def transform(tp: Type): Type = + val tp1 = toCapturing(tp) + val tp2 = toResultInResults(sym, fail, toCapturing.keepFunAliases)(tp1) + val snd = if toCapturing.keepFunAliases then "" else " 2nd time" + if tp2 ne tp then capt.println(i"expanded explicit$snd in ${ctx.owner}: $tp --> $tp1 --> $tp2") + tp2 - val tp1 = toCapturing(tp) - val tp2 = Existential.mapCapInResults(fail)(tp1) - if tp2 ne tp then capt.println(i"expanded explicit in ${ctx.owner}: $tp --> $tp1 --> $tp2") - tp2 + val tp1 = transform(tp) + val tp2 = + if toCapturing.keptFunAliases then + toCapturing.keepFunAliases = false + transform(tp1) + else tp1 + val tp3 = + if sym.isType then stripImpliedCaptureSet(tp2) + else tp2 + if freshen then + capToFresh(tp3, Origin.InDecl(sym)).tap(addOwnerAsHidden(_, sym)) + else tp3 end transformExplicitType - /** Substitute parameter symbols in `from` to paramRefs in corresponding - * method or poly types `to`. We use a single BiTypeMap to do everything. - * @param from a list of lists of type or term parameter symbols of a curried method - * @param to a list of method or poly types corresponding one-to-one to the parameter lists - */ - private class SubstParams(from: List[List[Symbol]], to: List[LambdaType])(using Context) - extends DeepTypeMap, BiTypeMap: - - def apply(t: Type): Type = t match - case t: NamedType => - if t.prefix == NoPrefix then - val sym = t.symbol - def outer(froms: List[List[Symbol]], tos: List[LambdaType]): Type = - def inner(from: List[Symbol], to: List[ParamRef]): Type = - if from.isEmpty then outer(froms.tail, tos.tail) - else if sym eq from.head then to.head - else inner(from.tail, to.tail) - if tos.isEmpty then t - else inner(froms.head, tos.head.paramRefs) - outer(from, to) - else t.derivedSelect(apply(t.prefix)) - case _ => - mapOver(t) - - lazy val inverse = new BiTypeMap: - override def toString = "SubstParams.inverse" - def apply(t: Type): Type = t match - case t: ParamRef => - def recur(from: List[LambdaType], to: List[List[Symbol]]): Type = - if from.isEmpty then t - else if t.binder eq from.head then to.head(t.paramNum).namedType - else recur(from.tail, to.tail) - recur(to, from) - case _ => - mapOver(t) - def inverse = SubstParams.this - end SubstParams - /** Update info of `sym` for CheckCaptures phase only */ - private def updateInfo(sym: Symbol, info: Type)(using Context) = + private def updateInfo(sym: Symbol, info: Type, owner: Symbol)(using Context) = toBeUpdated += sym - sym.updateInfo(thisPhase, info, newFlagsFor(sym)) + sym.updateInfo(thisPhase, info, newFlagsFor(sym), owner) toBeUpdated -= sym /** The info of `sym` at the CheckCaptures phase */ extension (sym: Symbol) def nextInfo(using Context): Type = atPhase(thisPhase.next)(sym.info) + private def addOwnerAsHidden(tp: Type, owner: Symbol)(using Context): Unit = + val ref = owner.termRef + def add = new TypeTraverser: + var reach = false + def traverse(t: Type): Unit = t match + case t @ CapturingType(parent, refs) => + val saved = reach + reach |= t.isBoxed + try + traverse(parent) + for case fresh: FreshCap <- refs.elems.iterator do // TODO: what about fresh.rd elems? + if reach then fresh.hiddenSet.elems += ref.reach + else if ref.isTracked then fresh.hiddenSet.elems += ref + finally reach = saved + case _ => + traverseChildren(t) + if ref.isTrackableRef then add.traverse(tp) + end addOwnerAsHidden + /** A traverser that adds knownTypes and updates symbol infos */ def setupTraverser(checker: CheckerAPI) = new TreeTraverserWithPreciseImportContexts: import checker.* - /** Transform type of tree, and remember the transformed type as the type the tree */ - private def transformTT(tree: TypeTree, boxed: Boolean)(using Context): Unit = + /** Transform type of tree, and remember the transformed type as the type of the tree + * @pre !(boxed && sym.exists) + */ + private def transformTT(tree: TypeTree, sym: Symbol, boxed: Boolean)(using Context): Unit = if !tree.hasNuType then - val transformed = + var transformed = if tree.isInferred then transformInferredType(tree.tpe) - else transformExplicitType(tree.tpe, tptToCheck = tree) - tree.setNuType(if boxed then box(transformed) else transformed) + else transformExplicitType(tree.tpe, sym, freshen = !boxed, tptToCheck = tree) + if boxed then transformed = box(transformed) + tree.setNuType( + if sym.hasAnnotation(defn.UncheckedCapturesAnnot) then makeUnchecked(transformed) + else transformed) /** Transform the type of a val or var or the result type of a def */ def transformResultType(tpt: TypeTree, sym: Symbol)(using Context): Unit = // First step: Transform the type and record it as knownType of tpt. try - transformTT(tpt, - boxed = - sym.is(Mutable, butNot = Method) - && !ccConfig.useSealed - && !sym.hasAnnotation(defn.UncheckedCapturesAnnot), - // Under the sealed policy, we disallow root capabilities in the type of mutable - // variables, no need to box them here. - ) + transformTT(tpt, sym, boxed = false) catch case ex: IllegalCaptureRef => capt.println(i"fail while transforming result type $tpt of $sym") throw ex @@ -473,8 +569,8 @@ class Setup extends PreRecheck, SymTransformer, SetupAPI: if isExcluded(meth) then return - meth.recordLevel() - inNestedLevel: + ccState.recordLevel(meth) + ccState.inNestedLevel: inContext(ctx.withOwner(meth)): paramss.foreach(traverse) transformResultType(tpt, meth) @@ -482,7 +578,7 @@ class Setup extends PreRecheck, SymTransformer, SetupAPI: case tree @ ValDef(_, tpt: TypeTree, _) => val sym = tree.symbol - sym.recordLevel() + ccState.recordLevel(sym) val defCtx = if sym.isOneOf(TermParamOrAccessor) then ctx else ctx.withOwner(sym) inContext(defCtx): transformResultType(tpt, sym) @@ -490,34 +586,48 @@ class Setup extends PreRecheck, SymTransformer, SetupAPI: case tree @ TypeApply(fn, args) => traverse(fn) - if !defn.isTypeTestOrCast(fn.symbol) then - for case arg: TypeTree <- args do - transformTT(arg, boxed = true) // type arguments in type applications are boxed + for case arg: TypeTree <- args do + if defn.isTypeTestOrCast(fn.symbol) then + arg.setNuType( + capToFresh(arg.tpe, Origin.TypeArg(arg.tpe))) + else + transformTT(arg, NoSymbol, boxed = true) // type arguments in type applications are boxed case tree: TypeDef if tree.symbol.isClass => val sym = tree.symbol - sym.recordLevel() - inNestedLevelUnless(sym.is(Module)): + ccState.recordLevel(sym) + ccState.inNestedLevelUnless(sym.is(Module)): inContext(ctx.withOwner(sym)) traverseChildren(tree) + case tree @ TypeDef(_, rhs: TypeTree) => + transformTT(rhs, tree.symbol, boxed = false) + case tree @ SeqLiteral(elems, tpt: TypeTree) => traverse(elems) tpt.setNuType(box(transformInferredType(tpt.tpe))) - case tree: Block => - inNestedLevel(traverseChildren(tree)) - + case tree @ Try(body, catches, finalizer) => + val tryOwner = firstCanThrowEvidence(body) match + case Some(vd) => + newSymbol(ctx.owner, TryOwnerName.fresh(), + Method | Synthetic, ExprType(defn.NothingType), coord = tree.span) + case _ => + ctx.owner + inContext(ctx.withOwner(tryOwner)): + traverse(body) + catches.foreach(traverse) + traverse(finalizer) case _ => traverseChildren(tree) postProcess(tree) - checkProperUse(tree) + checkProperUseOrConsume(tree) end traverse /** Processing done on node `tree` after its children are traversed */ def postProcess(tree: Tree)(using Context): Unit = tree match case tree: TypeTree => - transformTT(tree, boxed = false) + transformTT(tree, NoSymbol, boxed = false) case tree: ValOrDefDef => // Make sure denotation of tree's symbol is correct val sym = tree.symbol @@ -539,92 +649,73 @@ class Setup extends PreRecheck, SymTransformer, SetupAPI: else tree.tpt.nuType // A test whether parameter signature might change. This returns true if one of - // the parameters has a new type installee. The idea here is that we store a new + // the parameters has a new type installed. The idea here is that we store a new // type only if the transformed type is different from the original. def paramSignatureChanges = tree.match case tree: DefDef => tree.paramss.nestedExists: - case param: ValDef => param.tpt.hasNuType + case param: ValDef => param.tpt.hasNuType case param: TypeDef => param.rhs.hasNuType case _ => false // A symbol's signature changes if some of its parameter types or its result type // have a new type installed here (meaning hasRememberedType is true) def signatureChanges = - tree.tpt.hasNuType && !sym.isConstructor || paramSignatureChanges - - // Replace an existing symbol info with inferred types where capture sets of - // TypeParamRefs and TermParamRefs are put in correspondence by BiTypeMaps with the - // capture sets of the types of the method's parameter symbols and result type. - def integrateRT( - info: Type, // symbol info to replace - psymss: List[List[Symbol]], // the local (type and term) parameter symbols corresponding to `info` - resType: Type, // the locally computed return type - prevPsymss: List[List[Symbol]], // the local parameter symbols seen previously in reverse order - prevLambdas: List[LambdaType] // the outer method and polytypes generated previously in reverse order - ): Type = - info match - case mt: MethodOrPoly => - val psyms = psymss.head - // TODO: the substitution does not work for param-dependent method types. - // For example, `(x: T, y: x.f.type) => Unit`. In this case, when we - // substitute `x.f.type`, `x` becomes a `TermParamRef`. But the new method - // type is still under initialization and `paramInfos` is still `null`, - // so the new `NamedType` will not have a denotation. - def adaptedInfo(psym: Symbol, info: mt.PInfo): mt.PInfo = mt.companion match - case mtc: MethodTypeCompanion => mtc.adaptParamInfo(psym, info).asInstanceOf[mt.PInfo] - case _ => info - mt.companion(mt.paramNames)( - mt1 => - if !paramSignatureChanges && !mt.isParamDependent && prevLambdas.isEmpty then - mt.paramInfos - else - val subst = SubstParams(psyms :: prevPsymss, mt1 :: prevLambdas) - psyms.map(psym => adaptedInfo(psym, subst(psym.nextInfo).asInstanceOf[mt.PInfo])), - mt1 => - integrateRT(mt.resType, psymss.tail, resType, psyms :: prevPsymss, mt1 :: prevLambdas) - ) - case info: ExprType => - info.derivedExprType(resType = - integrateRT(info.resType, psymss, resType, prevPsymss, prevLambdas)) - case info => - if prevLambdas.isEmpty then resType - else SubstParams(prevPsymss, prevLambdas)(resType) - - // If there's a change in the signature, update the info of `sym` - if sym.exists && signatureChanges then - val newInfo = - Existential.mapCapInResults(report.error(_, tree.srcPos)): - integrateRT(sym.info, sym.paramSymss, localReturnType, Nil, Nil) - .showing(i"update info $sym: ${sym.info} = $result", capt) - if newInfo ne sym.info then - val updatedInfo = - if sym.isAnonymousFunction - || sym.is(Param) - || sym.is(ParamAccessor) - || sym.isPrimaryConstructor + tree.tpt.hasNuType || paramSignatureChanges + def ownerChanges = + ctx.owner.name.is(TryOwnerName) + + def paramsToCap(mt: Type)(using Context): Type = mt match + case mt: MethodType => + try + mt.derivedLambdaType( + paramInfos = mt.paramInfos.map(freshToCap), + resType = paramsToCap(mt.resType)) + catch case ex: AssertionError => + println(i"error while mapping params ${mt.paramInfos} of $sym") + throw ex + case mt: PolyType => + mt.derivedLambdaType(resType = paramsToCap(mt.resType)) + case _ => mt + + // If there's a change in the signature or owner, update the info of `sym` + if sym.exists && (signatureChanges || ownerChanges) then + val updatedInfo = + if signatureChanges then + val paramSymss = sym.paramSymss + def newInfo(using Context) = // will be run in this or next phase + toResultInResults(sym, report.error(_, tree.srcPos)): + if sym.is(Method) then + paramsToCap(methodType(paramSymss, localReturnType)) + else tree.tpt.nuType + if tree.tpt.isInstanceOf[InferredTypeTree] + && !sym.is(Param) && !sym.is(ParamAccessor) then - // closures are handled specially; the newInfo is constrained from - // the expected type and only afterwards we recheck the definition - newInfo - else new LazyType: - // infos of other methods are determined from their definitions, which - // are checked on demand - def complete(denot: SymDenotation)(using Context) = - assert(ctx.phase == thisPhase.next, i"$sym") - capt.println(i"forcing $sym, printing = ${ctx.mode.is(Mode.Printing)}") - //if ctx.mode.is(Mode.Printing) then new Error().printStackTrace() - denot.info = newInfo - completeDef(tree, sym) - updateInfo(sym, updatedInfo) + val prevInfo = sym.info + new LazyType: + def complete(denot: SymDenotation)(using Context) = + assert(ctx.phase == thisPhase.next, i"$sym") + sym.info = prevInfo // set info provisionally so we can analyze the symbol in recheck + completeDef(tree, sym, this) + sym.info = newInfo + .showing(i"new info of $sym = $result", capt) + else if sym.is(Method) then + new LazyType: + def complete(denot: SymDenotation)(using Context) = + sym.info = newInfo + .showing(i"new info of $sym = $result", capt) + else newInfo + else sym.info + val updatedOwner = if ownerChanges then ctx.owner else sym.owner + updateInfo(sym, updatedInfo, updatedOwner) case tree: Bind => val sym = tree.symbol - updateInfo(sym, transformInferredType(sym.info)) + updateInfo(sym, transformInferredType(sym.info), sym.owner) case tree: TypeDef => tree.symbol match case cls: ClassSymbol => - inNestedLevelUnless(cls.is(Module)): + ccState.inNestedLevelUnless(cls.is(Module)): val cinfo @ ClassInfo(prefix, _, ps, decls, selfInfo) = cls.classInfo // Compute new self type @@ -644,37 +735,64 @@ class Setup extends PreRecheck, SymTransformer, SetupAPI: // Infer the self type for the rest, which is all classes without explicit // self types (to which we also add nested module classes), provided they are // neither pure, nor are publicily extensible with an unconstrained self type. - CapturingType(cinfo.selfType, CaptureSet.Var(cls, level = currentLevel)) + val cs = CaptureSet.Var(cls, level = ccState.currentLevel) + if cls.derivesFrom(defn.Caps_Capability) then + // If cls is a capability class, we need to add a fresh readonly capability to + // ensure we cannot treat the class as pure. + CaptureSet.fresh(Origin.InDecl(cls)).readOnly.subCaptures(cs) + CapturingType(cinfo.selfType, cs) // Compute new parent types val ps1 = inContext(ctx.withOwner(cls)): - ps.mapConserve(transformExplicitType(_)) + ps.mapConserve(transformExplicitType(_, NoSymbol, freshen = false)) // Install new types and if it is a module class also update module object if (selfInfo1 ne selfInfo) || (ps1 ne ps) then val newInfo = ClassInfo(prefix, cls, ps1, decls, selfInfo1) - updateInfo(cls, newInfo) + updateInfo(cls, newInfo, cls.owner) capt.println(i"update class info of $cls with parents $ps selfinfo $selfInfo to $newInfo") cls.thisType.asInstanceOf[ThisType].invalidateCaches() if cls.is(ModuleClass) then // if it's a module, the capture set of the module reference is the capture set of the self type val modul = cls.sourceModule - updateInfo(modul, CapturingType(modul.info, selfInfo1.asInstanceOf[Type].captureSet)) + val selfCaptures = selfInfo1 match + case CapturingType(_, refs) => refs + case _ => CaptureSet.empty + // Note: Can't do val selfCaptures = selfInfo1.captureSet here. + // This would potentially give stackoverflows when setup is run repeatedly. + // One test case is pos-custom-args/captures/checkbounds.scala under + // ccConfig.alwaysRepeatRun = true. + updateInfo(modul, CapturingType(modul.info, selfCaptures), modul.owner) modul.termRef.invalidateCaches() case _ => case _ => end postProcess - /** Check that @use annotations only appear on parameters and not on anonymous function parameters */ - def checkProperUse(tree: Tree)(using Context): Unit = tree match + /** Check that @use and @consume annotations only appear on parameters and not on + * anonymous function parameters + */ + def checkProperUseOrConsume(tree: Tree)(using Context): Unit = tree match case tree: MemberDef => - def useAllowed(sym: Symbol) = - (sym.is(Param) || sym.is(ParamAccessor)) && !sym.owner.isAnonymousFunction + val sym = tree.symbol + def isMethodParam = (sym.is(Param) || sym.is(ParamAccessor)) + && !sym.owner.isAnonymousFunction for ann <- tree.symbol.annotations do - if ann.symbol == defn.UseAnnot && !useAllowed(tree.symbol) then - report.error(i"Only parameters of methods can have @use annotations", tree.srcPos) + val annotCls = ann.symbol + if annotCls == defn.ConsumeAnnot then + if !(isMethodParam && sym.isTerm) + && !(sym.is(Method) && sym.owner.isClass) + then + report.error( + em"""@consume cannot be used here. Only memeber methods and their term parameters + |can have @consume annotations.""", + tree.srcPos) + else if annotCls == defn.UseAnnot then + if !isMethodParam then + report.error( + em"@use cannot be used here. Only method parameters can have @use annotations.", + tree.srcPos) case _ => - end checkProperUse + end checkProperUseOrConsume end setupTraverser // --------------- Adding capture set variables ---------------------------------- @@ -735,7 +853,7 @@ class Setup extends PreRecheck, SymTransformer, SetupAPI: case RetainingType(parent, refs) => needsVariable(parent) && !refs.tpes.exists: - case ref: TermRef => ref.isRootCapability + case ref: TermRef => ref.isCapRef case _ => false case AnnotatedType(parent, _) => needsVariable(parent) @@ -772,7 +890,7 @@ class Setup extends PreRecheck, SymTransformer, SetupAPI: /** Add a capture set variable to `tp` if necessary. */ private def addVar(tp: Type, owner: Symbol)(using Context): Type = - decorate(tp, CaptureSet.Var(owner, _, level = currentLevel)) + decorate(tp, CaptureSet.Var(owner, _, level = ccState.currentLevel)) /** A map that adds capture sets at all contra- and invariant positions * in a type where a capture set would be needed. This is used to make types @@ -780,7 +898,7 @@ class Setup extends PreRecheck, SymTransformer, SetupAPI: * We don't need to add in covariant positions since pure types are * anyway compatible with capturing types. */ - private def fluidify(using Context) = new TypeMap with IdempotentCaptRefMap: + private def fluidify(using Context) = new TypeMap: def apply(t: Type): Type = t match case t: MethodType => mapOver(t) @@ -798,37 +916,14 @@ class Setup extends PreRecheck, SymTransformer, SetupAPI: if variance > 0 then t1 else decorate(t1, Function.const(CaptureSet.Fluid)) - /** Pull out an embedded capture set from a part of `tp` */ - def normalizeCaptures(tp: Type)(using Context): Type = tp match - case tp @ RefinedType(parent @ CapturingType(parent1, refs), rname, rinfo) => - CapturingType(tp.derivedRefinedType(parent1, rname, rinfo), refs, parent.isBoxed) - case tp: RecType => - tp.parent match - case parent @ CapturingType(parent1, refs) => - CapturingType(tp.derivedRecType(parent1), refs, parent.isBoxed) - case _ => - tp // can return `tp` here since unlike RefinedTypes, RecTypes are never created - // by `mapInferred`. Hence if the underlying type admits capture variables - // a variable was already added, and the first case above would apply. - case AndType(tp1 @ CapturingType(parent1, refs1), tp2 @ CapturingType(parent2, refs2)) => - assert(tp1.isBoxed == tp2.isBoxed) - CapturingType(AndType(parent1, parent2), refs1 ** refs2, tp1.isBoxed) - case tp @ OrType(tp1 @ CapturingType(parent1, refs1), tp2 @ CapturingType(parent2, refs2)) => - assert(tp1.isBoxed == tp2.isBoxed) - CapturingType(OrType(parent1, parent2, tp.isSoft), refs1 ++ refs2, tp1.isBoxed) - case tp @ OrType(tp1 @ CapturingType(parent1, refs1), tp2) => - CapturingType(OrType(parent1, tp2, tp.isSoft), refs1, tp1.isBoxed) - case tp @ OrType(tp1, tp2 @ CapturingType(parent2, refs2)) => - CapturingType(OrType(tp1, parent2, tp.isSoft), refs2, tp2.isBoxed) - case tp @ AppliedType(tycon, args) - if !defn.isFunctionClass(tp.dealias.typeSymbol) && (tp.dealias eq tp) => - tp.derivedAppliedType(tycon, args.mapConserve(box)) - case tp: RealTypeBounds => - tp.derivedTypeBounds(tp.lo, box(tp.hi)) - case tp: LazyRef => - normalizeCaptures(tp.ref) - case _ => - tp + /** Replace all universal capture sets in this type by */ + private def makeUnchecked(using Context): TypeMap = new TypeMap with FollowAliasesMap: + def apply(t: Type) = t match + case t @ CapturingType(parent, refs) => + val parent1 = this(parent) + if refs.containsTerminalCapability then t.derivedCapturingType(parent1, CaptureSet.Fluid) + else t + case _ => mapFollowingAliases(t) /** Run setup on a compilation unit with given `tree`. * @param recheckDef the function to run for completing a val or def @@ -856,7 +951,12 @@ class Setup extends PreRecheck, SymTransformer, SetupAPI: var retained = ann.retainedElems.toArray for i <- 0 until retained.length do val refTree = retained(i) - for ref <- refTree.toCaptureRefs do + val refs = + try refTree.toCapabilities + catch case ex: IllegalCaptureRef => + report.error(em"Illegal capture reference: ${ex.getMessage}", refTree.srcPos) + Nil + for ref <- refs do def pos = if refTree.span.exists then refTree.srcPos else if ann.span.exists then ann.srcPos @@ -867,8 +967,8 @@ class Setup extends PreRecheck, SymTransformer, SetupAPI: report.warning(em"redundant capture: $dom already accounts for $ref", pos) if ref.captureSetOfInfo.elems.isEmpty - && !ref.derivesFrom(defn.Caps_Capability) - && !ref.derivesFrom(defn.Caps_CapSet) then + && !ref.coreType.derivesFrom(defn.Caps_Capability) + && !ref.coreType.derivesFrom(defn.Caps_CapSet) then val deepStr = if ref.isReach then " deep" else "" report.error(em"$ref cannot be tracked since its$deepStr capture set is empty", pos) check(parent.captureSet, parent) @@ -876,7 +976,8 @@ class Setup extends PreRecheck, SymTransformer, SetupAPI: val others = for j <- 0 until retained.length if j != i - r <- retained(j).toCaptureRefs + r <- retained(j).toCapabilities + if !r.isTerminalCapability yield r val remaining = CaptureSet(others*) check(remaining, remaining) @@ -886,7 +987,7 @@ class Setup extends PreRecheck, SymTransformer, SetupAPI: /** Check well formed at post check time. We need to wait until after * recheck because we find out only then whether capture sets are empty or - * capture references are redundant. + * capabilities are redundant. */ private def checkWellformedLater(parent: Type, ann: Tree, tpt: Tree)(using Context): Unit = if !tpt.span.isZeroExtent && enclosingInlineds.isEmpty then diff --git a/compiler/src/dotty/tools/dotc/cc/Synthetics.scala b/compiler/src/dotty/tools/dotc/cc/Synthetics.scala index 1372ebafe82f..bb2228932cb8 100644 --- a/compiler/src/dotty/tools/dotc/cc/Synthetics.scala +++ b/compiler/src/dotty/tools/dotc/cc/Synthetics.scala @@ -8,6 +8,7 @@ import StdNames.nme import Names.Name import NameKinds.DefaultGetterName import config.Printers.capt +import Capabilities.* /** Classification and transformation methods for function methods and * synthetic case class methods that need to be treated specially. @@ -77,12 +78,11 @@ object Synthetics: case tp: MethodOrPoly => tp.derivedLambdaType(resType = augmentResult(tp.resType)) case _ => - val refined = trackedParams.foldLeft(tp) { (parent, pref) => - RefinedType(parent, pref.paramName, + val refined = trackedParams.foldLeft(tp): (parent, pref) => + parent.refinedOverride(pref.paramName, CapturingType( atPhase(ctx.phase.next)(pref.underlying.stripCapturing), CaptureSet(pref))) - } CapturingType(refined, CaptureSet(trackedParams*)) if trackedParams.isEmpty then info else augmentResult(info).showing(i"augment apply/copy type $info to $result", capt) @@ -132,23 +132,26 @@ object Synthetics: val (pt: PolyType) = info: @unchecked val (mt: MethodType) = pt.resType: @unchecked val (enclThis: ThisType) = owner.thisType: @unchecked + val paramCaptures = CaptureSet(enclThis, GlobalCap) pt.derivedLambdaType(resType = MethodType(mt.paramNames)( - mt1 => mt.paramInfos.map(_.capturing(CaptureSet.universal)), + mt1 => mt.paramInfos.map(_.capturing(paramCaptures)), mt1 => CapturingType(mt.resType, CaptureSet(enclThis, mt1.paramRefs.head)))) def transformCurriedTupledCaptures(info: Type, owner: Symbol) = val (et: ExprType) = info: @unchecked val (enclThis: ThisType) = owner.thisType: @unchecked - def mapFinalResult(tp: Type, f: Type => Type): Type = - val defn.FunctionNOf(args, res, isContextual) = tp: @unchecked - if defn.isFunctionNType(res) then - defn.FunctionNOf(args, mapFinalResult(res, f), isContextual) - else + def mapFinalResult(tp: Type, f: Type => Type): Type = tp match + case FunctionOrMethod(args, res) => + tp.derivedFunctionOrMethod(args, mapFinalResult(res, f)) + case _ => f(tp) ExprType(mapFinalResult(et.resType, CapturingType(_, CaptureSet(enclThis)))) def transformCompareCaptures = - MethodType(defn.ObjectType.capturing(CaptureSet.universal) :: Nil, defn.BooleanType) + val (enclThis: ThisType) = symd.owner.thisType: @unchecked + MethodType( + defn.ObjectType.capturing(CaptureSet(GlobalCap, enclThis)) :: Nil, + defn.BooleanType) symd.copySymDenotation(info = symd.name match case DefaultGetterName(nme.copy, n) => diff --git a/compiler/src/dotty/tools/dotc/cc/ccConfig.scala b/compiler/src/dotty/tools/dotc/cc/ccConfig.scala new file mode 100644 index 000000000000..fec13d9627fb --- /dev/null +++ b/compiler/src/dotty/tools/dotc/cc/ccConfig.scala @@ -0,0 +1,61 @@ +package dotty.tools +package dotc +package cc + +import core.Contexts.Context +import config.{Feature, SourceVersion} + +object ccConfig: + + /** If enabled, use a special path in recheckClosure for closures + * to compare the result tpt of the anonymous functon with the expected + * result type. This can narrow the scope of error messages. + */ + inline val preTypeClosureResults = false + + /** If this and `preTypeClosureResults` are both enabled, disable `preTypeClosureResults` + * for eta expansions. This can improve some error messages. + */ + inline val handleEtaExpansionsSpecially = true + + /** Don't require @use for reach capabilities that are accessed + * only in a nested closure. This is unsound without additional + * mitigation measures, as shown by unsound-reach-5.scala. + */ + inline val deferredReaches = false + + /** Check that if a type map (which is not a BiTypeMap) maps initial capture + * set variable elements to themselves it will not map any elements added in + * the future to something else. That is, we can safely use a capture set + * variable itself as the image under the map. By default this is off since it + * is a bit expensive to check. + */ + inline val checkSkippedMaps = false + + /** Always repeat a capture checking run at least once if there are no errors + * yet. Used for stress-testing the logic for when a new capture checking run needs + * to be scheduled because a provisionally solved capture set was later extended. + * So far this happens only in very few tests. With the flag on, the logic is + * tested for all tests except neg tests. + */ + inline val alwaysRepeatRun = false + + /** After capture checking, check that no capture set contains ParamRefs that are outside + * its scope. This used to occur and was fixed by healTypeParam. It should no longer + * occur now. + */ + inline val postCheckCapturesets = false + + /** If true, do level checking for FreshCap instances */ + def useFreshLevels(using Context): Boolean = + Feature.sourceVersion.stable.isAtLeast(SourceVersion.`3.7`) + + /** If true, turn on separation checking */ + def useSepChecks(using Context): Boolean = + Feature.sourceVersion.stable.isAtLeast(SourceVersion.`3.8`) + + /** Not used currently. Handy for trying out new features */ + def newScheme(using Context): Boolean = + Feature.sourceVersion.stable.isAtLeast(SourceVersion.`3.7`) + +end ccConfig diff --git a/compiler/src/dotty/tools/dotc/classpath/AggregateClassPath.scala b/compiler/src/dotty/tools/dotc/classpath/AggregateClassPath.scala index cd44ba27df96..472ff11f21c0 100644 --- a/compiler/src/dotty/tools/dotc/classpath/AggregateClassPath.scala +++ b/compiler/src/dotty/tools/dotc/classpath/AggregateClassPath.scala @@ -4,8 +4,6 @@ package dotty.tools package dotc.classpath -import scala.language.unsafeNulls - import java.net.URL import scala.collection.mutable.ArrayBuffer import scala.collection.immutable.ArraySeq diff --git a/compiler/src/dotty/tools/dotc/classpath/ClassPathFactory.scala b/compiler/src/dotty/tools/dotc/classpath/ClassPathFactory.scala index 080f8d4e63d2..f434b365c3c9 100644 --- a/compiler/src/dotty/tools/dotc/classpath/ClassPathFactory.scala +++ b/compiler/src/dotty/tools/dotc/classpath/ClassPathFactory.scala @@ -67,7 +67,7 @@ class ClassPathFactory { for file <- files a <- ClassPath.expandManifestPath(file.absolutePath) - path = java.nio.file.Paths.get(a.toURI()).nn + path = java.nio.file.Paths.get(a.toURI()) if Files.exists(path) yield newClassPath(AbstractFile.getFile(path)) @@ -82,7 +82,7 @@ class ClassPathFactory { if (file.isJarOrZip) ZipAndJarSourcePathFactory.create(file) else if (file.isDirectory) - new DirectorySourcePath(file.file) + new DirectorySourcePath(file.file.nn) else sys.error(s"Unsupported sourcepath element: $file") } @@ -94,7 +94,7 @@ object ClassPathFactory { if (file.isJarOrZip) ZipAndJarClassPathFactory.create(file) else if (file.isDirectory) - new DirectoryClassPath(file.file) + new DirectoryClassPath(file.file.nn) else sys.error(s"Unsupported classpath element: $file") } diff --git a/compiler/src/dotty/tools/dotc/classpath/DirectoryClassPath.scala b/compiler/src/dotty/tools/dotc/classpath/DirectoryClassPath.scala index 2d659b532d7b..622e074b9047 100644 --- a/compiler/src/dotty/tools/dotc/classpath/DirectoryClassPath.scala +++ b/compiler/src/dotty/tools/dotc/classpath/DirectoryClassPath.scala @@ -3,8 +3,6 @@ */ package dotty.tools.dotc.classpath -import scala.language.unsafeNulls - import java.io.{File => JFile} import java.net.{URI, URL} import java.nio.file.{FileSystems, Files} @@ -119,7 +117,7 @@ trait JFileDirectoryLookup[FileEntryType <: ClassRepresentation] extends Directo protected def toAbstractFile(f: JFile): AbstractFile = f.toPath.toPlainFile protected def isPackage(f: JFile): Boolean = f.isPackage - assert(dir != null, "Directory file in DirectoryFileLookup cannot be null") + assert(dir.asInstanceOf[JFile | Null] != null, "Directory file in DirectoryFileLookup cannot be null") def asURLs: Seq[URL] = Seq(dir.toURI.toURL) def asClassPathStrings: Seq[String] = Seq(dir.getPath) @@ -216,7 +214,7 @@ final class JrtClassPath(fs: java.nio.file.FileSystem) extends ClassPath with No final class CtSymClassPath(ctSym: java.nio.file.Path, release: Int) extends ClassPath with NoSourcePaths { import java.nio.file.Path, java.nio.file.* - private val fileSystem: FileSystem = FileSystems.newFileSystem(ctSym, null: ClassLoader) + private val fileSystem: FileSystem = FileSystems.newFileSystem(ctSym, null: ClassLoader | Null) private val root: Path = fileSystem.getRootDirectories.iterator.next private val roots = Files.newDirectoryStream(root).iterator.asScala.toList diff --git a/compiler/src/dotty/tools/dotc/classpath/FileUtils.scala b/compiler/src/dotty/tools/dotc/classpath/FileUtils.scala index 4fe57a722780..e51ba29483cb 100644 --- a/compiler/src/dotty/tools/dotc/classpath/FileUtils.scala +++ b/compiler/src/dotty/tools/dotc/classpath/FileUtils.scala @@ -4,8 +4,6 @@ package dotty.tools package dotc.classpath -import scala.language.unsafeNulls - import java.io.{File => JFile, FileFilter} import java.net.URL import dotty.tools.io.AbstractFile diff --git a/compiler/src/dotty/tools/dotc/classpath/VirtualDirectoryClassPath.scala b/compiler/src/dotty/tools/dotc/classpath/VirtualDirectoryClassPath.scala index 0616d6c14ba6..9ed69942c596 100644 --- a/compiler/src/dotty/tools/dotc/classpath/VirtualDirectoryClassPath.scala +++ b/compiler/src/dotty/tools/dotc/classpath/VirtualDirectoryClassPath.scala @@ -11,11 +11,11 @@ case class VirtualDirectoryClassPath(dir: VirtualDirectory) extends ClassPath wi type F = AbstractFile // From AbstractFileClassLoader - private final def lookupPath(base: AbstractFile)(pathParts: Seq[String], directory: Boolean): AbstractFile = { - var file: AbstractFile = base + private final def lookupPath(base: AbstractFile)(pathParts: Seq[String], directory: Boolean): AbstractFile | Null = { + var file: AbstractFile | Null = base val dirParts = pathParts.init.iterator while (dirParts.hasNext) { - val dirPart = dirParts.next + val dirPart = dirParts.next() file = file.lookupName(dirPart, directory = true) if (file == null) return null diff --git a/compiler/src/dotty/tools/dotc/classpath/ZipArchiveFileLookup.scala b/compiler/src/dotty/tools/dotc/classpath/ZipArchiveFileLookup.scala index 4595f7978999..65a12c596223 100644 --- a/compiler/src/dotty/tools/dotc/classpath/ZipArchiveFileLookup.scala +++ b/compiler/src/dotty/tools/dotc/classpath/ZipArchiveFileLookup.scala @@ -3,7 +3,6 @@ */ package dotty.tools.dotc.classpath -import scala.language.unsafeNulls import java.io.File import java.net.URL @@ -21,7 +20,7 @@ trait ZipArchiveFileLookup[FileEntryType <: ClassRepresentation] extends Efficie val zipFile: File def release: Option[String] - assert(zipFile != null, "Zip file in ZipArchiveFileLookup cannot be null") + assert(zipFile ne null, "Zip file in ZipArchiveFileLookup cannot be null") override def asURLs: Seq[URL] = Seq(zipFile.toURI.toURL) override def asClassPathStrings: Seq[String] = Seq(zipFile.getPath) diff --git a/compiler/src/dotty/tools/dotc/config/CommandLineParser.scala b/compiler/src/dotty/tools/dotc/config/CommandLineParser.scala index 2e76561c9913..50aa89779f50 100644 --- a/compiler/src/dotty/tools/dotc/config/CommandLineParser.scala +++ b/compiler/src/dotty/tools/dotc/config/CommandLineParser.scala @@ -114,12 +114,12 @@ object CommandLineParser: def expandArg(arg: String): List[String] = val path = Paths.get(arg.stripPrefix("@")) if !Files.exists(path) then - System.err.nn.println(s"Argument file ${path.nn.getFileName} could not be found") + System.err.println(s"Argument file ${path.getFileName} could not be found") Nil else def stripComment(s: String) = s.indexOf('#') match { case -1 => s case i => s.substring(0, i) } - val lines = Files.readAllLines(path).nn - val params = lines.asScala.map(stripComment).filter(!_.nn.isEmpty).mkString(" ") + val lines = Files.readAllLines(path) + val params = lines.asScala.map(stripComment).filter(!_.isEmpty).mkString(" ") tokenize(params) class ParseException(msg: String) extends RuntimeException(msg) diff --git a/compiler/src/dotty/tools/dotc/config/Feature.scala b/compiler/src/dotty/tools/dotc/config/Feature.scala index 6df190f3147e..8fda99be6896 100644 --- a/compiler/src/dotty/tools/dotc/config/Feature.scala +++ b/compiler/src/dotty/tools/dotc/config/Feature.scala @@ -61,7 +61,8 @@ object Feature: (pureFunctions, "Enable pure functions for capture checking"), (captureChecking, "Enable experimental capture checking"), (into, "Allow into modifier on parameter types"), - (modularity, "Enable experimental modularity features") + (modularity, "Enable experimental modularity features"), + (packageObjectValues, "Enable experimental package objects as values"), ) // legacy language features from Scala 2 that are no longer supported. @@ -153,6 +154,10 @@ object Feature: case Some(v) => v case none => sourceVersionSetting + /* Should we behave as scala 2?*/ + def shouldBehaveAsScala2(using Context): Boolean = + ctx.settings.YcompileScala2Library.value || sourceVersion.isScala2 + def migrateTo3(using Context): Boolean = sourceVersion == `3.0-migration` diff --git a/compiler/src/dotty/tools/dotc/config/OutputDirs.scala b/compiler/src/dotty/tools/dotc/config/OutputDirs.scala index ba121d06e35a..ff0f59cb1309 100644 --- a/compiler/src/dotty/tools/dotc/config/OutputDirs.scala +++ b/compiler/src/dotty/tools/dotc/config/OutputDirs.scala @@ -2,7 +2,6 @@ package dotty.tools package dotc package config -import scala.language.unsafeNulls import io.* @@ -30,10 +29,10 @@ class OutputDirs { /** Check that dir is exists and is a directory. */ private def checkDir(dir: AbstractFile, name: String, allowJar: Boolean = false): AbstractFile = ( - if (dir != null && dir.isDirectory) + if ((dir ne null) && dir.isDirectory) dir // was: else if (allowJar && dir == null && Path.isJarOrZip(name, false)) - else if (allowJar && dir == null && Jar.isJarOrZip(File(name), false)) + else if (allowJar && (dir eq null) && Jar.isJarOrZip(File(name), false)) new PlainFile(Path(name)) else throw new FatalError(name + " does not exist or is not a directory")) diff --git a/compiler/src/dotty/tools/dotc/config/Printers.scala b/compiler/src/dotty/tools/dotc/config/Printers.scala index 81fd60497025..4c66e1cdf833 100644 --- a/compiler/src/dotty/tools/dotc/config/Printers.scala +++ b/compiler/src/dotty/tools/dotc/config/Printers.scala @@ -5,7 +5,7 @@ import core.Contexts.{Context, ctx} object Printers { class Printer { - def println(msg: => String): Unit = System.out.nn.println(msg) + def println(msg: => String): Unit = System.out.println(msg) } object noPrinter extends Printer { diff --git a/compiler/src/dotty/tools/dotc/config/Properties.scala b/compiler/src/dotty/tools/dotc/config/Properties.scala index c2046899aaef..a5ad1600041c 100644 --- a/compiler/src/dotty/tools/dotc/config/Properties.scala +++ b/compiler/src/dotty/tools/dotc/config/Properties.scala @@ -2,7 +2,6 @@ package dotty.tools package dotc package config -import scala.language.unsafeNulls import scala.annotation.internal.sharable @@ -45,7 +44,7 @@ trait PropertiesTrait { def propIsSet(name: String): Boolean = System.getProperty(name) != null def propIsSetTo(name: String, value: String): Boolean = propOrNull(name) == value - def propOrElse(name: String, alt: => String): String = Option(System.getProperty(name)).getOrElse(alt) + def propOrElse(name: String, alt: => String | Null): String = Option(System.getProperty(name)).getOrElse(alt) def propOrEmpty(name: String): String = propOrElse(name, "") def propOrNull(name: String): String = propOrElse(name, null) def propOrNone(name: String): Option[String] = Option(propOrNull(name)) diff --git a/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala b/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala index c6c0ab47de52..57dfe245e07c 100644 --- a/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala +++ b/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala @@ -1,7 +1,6 @@ package dotty.tools.dotc package config -import scala.language.unsafeNulls import dotty.tools.dotc.config.PathResolver.Defaults import dotty.tools.dotc.config.Settings.{Setting, SettingGroup, SettingCategory, Deprecation} import dotty.tools.dotc.config.SourceVersion @@ -167,6 +166,7 @@ private sealed trait WarningSettings: private val WenumCommentDiscard = BooleanSetting(WarningSetting, "Wenum-comment-discard", "Warn when a comment ambiguously assigned to multiple enum cases is discarded.") private val WimplausiblePatterns = BooleanSetting(WarningSetting, "Wimplausible-patterns", "Warn if comparison with a pattern value looks like it might always fail.") private val WunstableInlineAccessors = BooleanSetting(WarningSetting, "WunstableInlineAccessors", "Warn an inline methods has references to non-stable binary APIs.") + private val WtoStringInterpolated = BooleanSetting(WarningSetting, "Wtostring-interpolated", "Warn a standard interpolator used toString on a reference type.") private val Wunused: Setting[List[ChoiceWithHelp[String]]] = MultiChoiceHelpSetting( WarningSetting, name = "Wunused", @@ -308,6 +308,7 @@ private sealed trait WarningSettings: def enumCommentDiscard(using Context): Boolean = allOr(WenumCommentDiscard) def implausiblePatterns(using Context): Boolean = allOr(WimplausiblePatterns) def unstableInlineAccessors(using Context): Boolean = allOr(WunstableInlineAccessors) + def toStringInterpolated(using Context): Boolean = allOr(WtoStringInterpolated) def checkInit(using Context): Boolean = allOr(WcheckInit) /** -X "Extended" or "Advanced" settings */ @@ -443,7 +444,8 @@ private sealed trait YSettings: val YwithBestEffortTasty: Setting[Boolean] = BooleanSetting(ForkSetting, "Ywith-best-effort-tasty", "Allow to compile using best-effort tasty files. If such file is used, the compiler will stop after the pickler phase.") // Experimental language features - val YnoKindPolymorphism: Setting[Boolean] = BooleanSetting(ForkSetting, "Yno-kind-polymorphism", "Disable kind polymorphism.") + @deprecated(message = "This flag has no effect and will be removed in a future version.", since = "3.7.0") + val YnoKindPolymorphism: Setting[Boolean] = BooleanSetting(ForkSetting, "Yno-kind-polymorphism", "Disable kind polymorphism. (This flag has no effect)", deprecation = Deprecation.removed()) val YexplicitNulls: Setting[Boolean] = BooleanSetting(ForkSetting, "Yexplicit-nulls", "Make reference types non-nullable. Nullable types can be expressed with unions: e.g. String|Null.") val YnoFlexibleTypes: Setting[Boolean] = BooleanSetting(ForkSetting, "Yno-flexible-types", "Disable turning nullable Java return types and parameter types into flexible types, which behave like abstract types with a nullable lower bound and non-nullable upper bound.") val YcheckInitGlobal: Setting[Boolean] = BooleanSetting(ForkSetting, "Ysafe-init-global", "Check safe initialization of global objects.") @@ -452,6 +454,7 @@ private sealed trait YSettings: val YccDebug: Setting[Boolean] = BooleanSetting(ForkSetting, "Ycc-debug", "Used in conjunction with captureChecking language import, debug info for captured references.") val YccNew: Setting[Boolean] = BooleanSetting(ForkSetting, "Ycc-new", "Used in conjunction with captureChecking language import, try out new variants (debug option)") val YccLog: Setting[Boolean] = BooleanSetting(ForkSetting, "Ycc-log", "Used in conjunction with captureChecking language import, print tracing and debug info") + val YccVerbose: Setting[Boolean] = BooleanSetting(ForkSetting, "Ycc-verbose", "Print root capabilities with more details") val YccPrintSetup: Setting[Boolean] = BooleanSetting(ForkSetting, "Ycc-print-setup", "Used in conjunction with captureChecking language import, print trees after cc.Setup phase") /** Area-specific debug output */ diff --git a/compiler/src/dotty/tools/dotc/config/ScalaSettingsProperties.scala b/compiler/src/dotty/tools/dotc/config/ScalaSettingsProperties.scala index e42d2d53529e..b50fe0b93023 100644 --- a/compiler/src/dotty/tools/dotc/config/ScalaSettingsProperties.scala +++ b/compiler/src/dotty/tools/dotc/config/ScalaSettingsProperties.scala @@ -4,7 +4,6 @@ package config import Settings.Setting.ChoiceWithHelp import dotty.tools.backend.jvm.BackendUtils.classfileVersionMap import dotty.tools.io.{AbstractFile, Directory, JDK9Reflectors, PlainDirectory, NoAbstractFile} -import scala.language.unsafeNulls object ScalaSettingsProperties: @@ -25,7 +24,8 @@ object ScalaSettingsProperties: ScalaRelease.values.toList.map(_.show) def supportedSourceVersions: List[String] = - (SourceVersion.values.toList.diff(SourceVersion.illegalSourceVersionNames)).toList.map(_.toString) + SourceVersion.values.diff(SourceVersion.illegalInSettings) + .map(_.toString).toList def supportedLanguageFeatures: List[ChoiceWithHelp[String]] = Feature.values.map((n, d) => ChoiceWithHelp(n.toString, d)) diff --git a/compiler/src/dotty/tools/dotc/config/ScalaVersion.scala b/compiler/src/dotty/tools/dotc/config/ScalaVersion.scala index 9f603e6792be..a9fffa7a3bb6 100644 --- a/compiler/src/dotty/tools/dotc/config/ScalaVersion.scala +++ b/compiler/src/dotty/tools/dotc/config/ScalaVersion.scala @@ -3,8 +3,6 @@ package dotty.tools package dotc.config -import scala.language.unsafeNulls - import scala.annotation.internal.sharable import scala.util.{Try, Success, Failure} @@ -82,20 +80,20 @@ case class SpecificScalaVersion(major: Int, minor: Int, rev: Int, build: ScalaBu "The minor and revision parts are optional." )) - def toInt(s: String) = s match { + def toInt(s: String | Null) = s match { case null | "" => 0 - case _ => s.toInt + case _ => s.nn.toInt } def isInt(s: String) = Try(toInt(s)).isSuccess import ScalaBuild.* - def toBuild(s: String) = s match { + def toBuild(s: String | Null) = s match { case null | "FINAL" => Final case s if (s.toUpperCase.startsWith("RC") && isInt(s.substring(2))) => RC(toInt(s.substring(2))) case s if (s.toUpperCase.startsWith("M") && isInt(s.substring(1))) => Milestone(toInt(s.substring(1))) - case _ => Development(s) + case _ => Development(s.nn) } try versionString match { diff --git a/compiler/src/dotty/tools/dotc/config/Settings.scala b/compiler/src/dotty/tools/dotc/config/Settings.scala index f85f2cc57de4..7842113b5e48 100644 --- a/compiler/src/dotty/tools/dotc/config/Settings.scala +++ b/compiler/src/dotty/tools/dotc/config/Settings.scala @@ -1,8 +1,6 @@ package dotty.tools.dotc package config -import scala.language.unsafeNulls - import core.Contexts.* import dotty.tools.io.{AbstractFile, Directory, JarArchive, PlainDirectory} diff --git a/compiler/src/dotty/tools/dotc/config/SourceVersion.scala b/compiler/src/dotty/tools/dotc/config/SourceVersion.scala index 199350949233..d662d3c0d412 100644 --- a/compiler/src/dotty/tools/dotc/config/SourceVersion.scala +++ b/compiler/src/dotty/tools/dotc/config/SourceVersion.scala @@ -3,10 +3,13 @@ package dotc package config import core.Decorators.* +import core.Contexts.* +import Feature.isPreviewEnabled import util.Property enum SourceVersion: - case `3.0-migration`, `3.0`, `3.1` // Note: do not add `3.1-migration` here, 3.1 is the same language as 3.0. + case `3.0-migration`, `3.0` + case `3.1-migration`, `3.1` case `3.2-migration`, `3.2` case `3.3-migration`, `3.3` case `3.4-migration`, `3.4` @@ -14,7 +17,9 @@ enum SourceVersion: case `3.6-migration`, `3.6` case `3.7-migration`, `3.7` case `3.8-migration`, `3.8` + // Add 3.x-migration and 3.x here // !!! Keep in sync with scala.runtime.stdlibPatches.language !!! + case `2.13` case `future-migration`, `future` case `never` // needed for MigrationVersion.errorFrom if we never want to issue an error @@ -31,16 +36,27 @@ enum SourceVersion: def isAtMost(v: SourceVersion) = stable.ordinal <= v.ordinal + def isScala2 = this == `2.13` + def enablesFewerBraces = isAtLeast(`3.3`) def enablesClauseInterleaving = isAtLeast(`3.6`) def enablesNewGivens = isAtLeast(`3.6`) def enablesNamedTuples = isAtLeast(`3.7`) + def enablesBetterFors(using Context) = isAtLeast(`3.7`) && isPreviewEnabled object SourceVersion extends Property.Key[SourceVersion]: - def defaultSourceVersion = `3.7` + + /* The default source version used by the built compiler */ + val defaultSourceVersion = `3.7` + + /* Illegal source versions that may not appear in the settings `-source:<...>` */ + val illegalInSettings = List(`2.13`, `3.1-migration`, `never`) + + /* Illegal source versions that may not appear as an import `import scala.language.<...>` */ + val illegalInImports = List(`3.1-migration`, `never`) /** language versions that may appear in a language import, are deprecated, but not removed from the standard library. */ - val illegalSourceVersionNames = List("3.1-migration", "never").map(_.toTermName) + val illegalSourceVersionNames = illegalInImports.map(_.toString.toTermName) /** language versions that the compiler recognises. */ val validSourceVersionNames = values.toList.map(_.toString.toTermName) diff --git a/compiler/src/dotty/tools/dotc/core/Comments.scala b/compiler/src/dotty/tools/dotc/core/Comments.scala index b1d1e387c2cf..00f5b578b4d1 100644 --- a/compiler/src/dotty/tools/dotc/core/Comments.scala +++ b/compiler/src/dotty/tools/dotc/core/Comments.scala @@ -2,8 +2,6 @@ package dotty.tools package dotc package core -import scala.language.unsafeNulls - import ast.{ untpd, tpd } import Symbols.*, Contexts.* import util.{SourceFile, ReadOnlyMap} diff --git a/compiler/src/dotty/tools/dotc/core/Contexts.scala b/compiler/src/dotty/tools/dotc/core/Contexts.scala index e12ab1cc2da2..a867f90b237a 100644 --- a/compiler/src/dotty/tools/dotc/core/Contexts.scala +++ b/compiler/src/dotty/tools/dotc/core/Contexts.scala @@ -135,7 +135,7 @@ object Contexts { def outersIterator: Iterator[Context] = new Iterator[Context] { var current = thiscontext def hasNext = current != NoContext - def next = { val c = current; current = current.outer; c } + def next() = { val c = current; current = current.outer; c } } def period: Period diff --git a/compiler/src/dotty/tools/dotc/core/Decorators.scala b/compiler/src/dotty/tools/dotc/core/Decorators.scala index 96a2d45db80d..f8096dddeedd 100644 --- a/compiler/src/dotty/tools/dotc/core/Decorators.scala +++ b/compiler/src/dotty/tools/dotc/core/Decorators.scala @@ -51,7 +51,7 @@ object Decorators { if name.length != 0 then name.getChars(0, name.length, chars, s.length) termName(chars, 0, len) case name: TypeName => s.concat(name.toTermName) - case _ => termName(s.concat(name.toString).nn) + case _ => termName(s.concat(name.toString)) def indented(width: Int): String = val padding = " " * width @@ -289,10 +289,10 @@ object Decorators { case NonFatal(ex) if !ctx.settings.YshowPrintErrors.value => s"... (cannot display due to ${ex.className} ${ex.getMessage}) ..." - case _ => String.valueOf(x).nn + case _ => String.valueOf(x) /** Returns the simple class name of `x`. */ - def className: String = if x == null then "" else x.getClass.getSimpleName.nn + def className: String = if x == null then "" else x.getClass.getSimpleName extension [T](x: T) def assertingErrorsReported(using Context): T = { diff --git a/compiler/src/dotty/tools/dotc/core/Definitions.scala b/compiler/src/dotty/tools/dotc/core/Definitions.scala index f89bc8691e2d..83c85adb0f43 100644 --- a/compiler/src/dotty/tools/dotc/core/Definitions.scala +++ b/compiler/src/dotty/tools/dotc/core/Definitions.scala @@ -15,7 +15,7 @@ import Comments.{Comment, docCtx} import util.Spans.NoSpan import config.Feature import Symbols.requiredModuleRef -import cc.{CaptureSet, RetainingType, Existential} +import cc.{CaptureSet, RetainingType} import ast.tpd.ref import scala.annotation.tailrec @@ -428,8 +428,8 @@ class Definitions { @tu lazy val AnyRefAlias: TypeSymbol = enterAliasType(tpnme.AnyRef, ObjectType) def AnyRefType: TypeRef = AnyRefAlias.typeRef - @tu lazy val Object_eq: TermSymbol = enterMethod(ObjectClass, nme.eq, methOfAnyRef(BooleanType), Final) - @tu lazy val Object_ne: TermSymbol = enterMethod(ObjectClass, nme.ne, methOfAnyRef(BooleanType), Final) + @tu lazy val Object_eq: TermSymbol = enterMethod(ObjectClass, nme.eq, methOfAnyRef(BooleanType), Final | Infix) + @tu lazy val Object_ne: TermSymbol = enterMethod(ObjectClass, nme.ne, methOfAnyRef(BooleanType), Final | Infix) @tu lazy val Object_synchronized: TermSymbol = enterPolyMethod(ObjectClass, nme.synchronized_, 1, pt => MethodType(List(pt.paramRefs(0)), pt.paramRefs(0)), Final) @tu lazy val Object_clone: TermSymbol = enterMethod(ObjectClass, nme.clone_, MethodType(Nil, ObjectType), Protected) @@ -449,10 +449,7 @@ class Definitions { @tu lazy val AnyKindClass: ClassSymbol = { val cls = newCompleteClassSymbol(ScalaPackageClass, tpnme.AnyKind, AbstractFinal | Permanent, Nil, newScope(0)) - if (!ctx.settings.YnoKindPolymorphism.value) - // Enable kind-polymorphism by exposing scala.AnyKind - cls.entered - cls + cls.entered } def AnyKindType: TypeRef = AnyKindClass.typeRef @@ -508,6 +505,9 @@ class Definitions { @tu lazy val ScalaRuntime_toArray: Symbol = ScalaRuntimeModule.requiredMethod(nme.toArray) @tu lazy val ScalaRuntime_toObjectArray: Symbol = ScalaRuntimeModule.requiredMethod(nme.toObjectArray) + @tu lazy val MurmurHash3Module: Symbol = requiredModule("scala.util.hashing.MurmurHash3") + @tu lazy val MurmurHash3_productHash = MurmurHash3Module.info.member(termName("productHash")).suchThat(_.info.firstParamTypes.size == 3).symbol + @tu lazy val BoxesRunTimeModule: Symbol = requiredModule("scala.runtime.BoxesRunTime") @tu lazy val BoxesRunTimeModule_externalEquals: Symbol = BoxesRunTimeModule.info.decl(nme.equals_).suchThat(toDenot(_).info.firstParamTypes.size == 2).symbol @tu lazy val ScalaStaticsModule: Symbol = requiredModule("scala.runtime.Statics") @@ -753,6 +753,9 @@ class Definitions { @tu lazy val StringBuilderClass: ClassSymbol = requiredClass("scala.collection.mutable.StringBuilder") @tu lazy val MatchErrorClass : ClassSymbol = requiredClass("scala.MatchError") @tu lazy val ConversionClass : ClassSymbol = requiredClass("scala.Conversion").typeRef.symbol.asClass + @tu lazy val ConversionModule : Symbol = ConversionClass.companionModule + @tu lazy val ConversionModuleClass: ClassSymbol = ConversionModule.moduleClass.asClass + @tu lazy val Conversion_into : Symbol = ConversionModuleClass.requiredType("into") @tu lazy val StringAddClass : ClassSymbol = requiredClass("scala.runtime.StringAdd") @tu lazy val StringAdd_+ : Symbol = StringAddClass.requiredMethod(nme.raw.PLUS) @@ -996,20 +999,22 @@ class Definitions { @tu lazy val LabelClass: Symbol = requiredClass("scala.util.boundary.Label") @tu lazy val BreakClass: Symbol = requiredClass("scala.util.boundary.Break") - @tu lazy val CapsModule: Symbol = requiredModule("scala.caps") + @tu lazy val CapsModule: Symbol = requiredPackage("scala.caps") @tu lazy val captureRoot: TermSymbol = CapsModule.requiredValue("cap") - @tu lazy val Caps_Capability: TypeSymbol = CapsModule.requiredType("Capability") + @tu lazy val Caps_Capability: ClassSymbol = requiredClass("scala.caps.Capability") @tu lazy val Caps_CapSet: ClassSymbol = requiredClass("scala.caps.CapSet") - @tu lazy val Caps_reachCapability: TermSymbol = CapsModule.requiredMethod("reachCapability") - @tu lazy val Caps_capsOf: TermSymbol = CapsModule.requiredMethod("capsOf") - @tu lazy val Caps_Exists: ClassSymbol = requiredClass("scala.caps.Exists") + @tu lazy val CapsInternalModule: Symbol = requiredModule("scala.caps.internal") + @tu lazy val Caps_reachCapability: TermSymbol = CapsInternalModule.requiredMethod("reachCapability") + @tu lazy val Caps_readOnlyCapability: TermSymbol = CapsInternalModule.requiredMethod("readOnlyCapability") + @tu lazy val Caps_capsOf: TermSymbol = CapsInternalModule.requiredMethod("capsOf") @tu lazy val CapsUnsafeModule: Symbol = requiredModule("scala.caps.unsafe") @tu lazy val Caps_unsafeAssumePure: Symbol = CapsUnsafeModule.requiredMethod("unsafeAssumePure") + @tu lazy val Caps_unsafeAssumeSeparate: Symbol = CapsUnsafeModule.requiredMethod("unsafeAssumeSeparate") @tu lazy val Caps_ContainsTrait: TypeSymbol = CapsModule.requiredType("Contains") - @tu lazy val Caps_containsImpl: TermSymbol = CapsModule.requiredMethod("containsImpl") - - /** The same as CaptureSet.universal but generated implicitly for references of Capability subtypes */ - @tu lazy val universalCSImpliedByCapability = CaptureSet(captureRoot.termRef) + @tu lazy val Caps_ContainsModule: Symbol = requiredModule("scala.caps.Contains") + @tu lazy val Caps_containsImpl: TermSymbol = Caps_ContainsModule.requiredMethod("containsImpl") + @tu lazy val Caps_Mutable: ClassSymbol = requiredClass("scala.caps.Mutable") + @tu lazy val Caps_SharedCapability: ClassSymbol = requiredClass("scala.caps.SharedCapability") @tu lazy val PureClass: Symbol = requiredClass("scala.Pure") @@ -1033,9 +1038,8 @@ class Definitions { @tu lazy val DeprecatedInheritanceAnnot: ClassSymbol = requiredClass("scala.deprecatedInheritance") @tu lazy val ImplicitAmbiguousAnnot: ClassSymbol = requiredClass("scala.annotation.implicitAmbiguous") @tu lazy val ImplicitNotFoundAnnot: ClassSymbol = requiredClass("scala.annotation.implicitNotFound") + @tu lazy val InferredDepFunAnnot: ClassSymbol = requiredClass("scala.caps.internal.inferredDepFun") @tu lazy val InlineParamAnnot: ClassSymbol = requiredClass("scala.annotation.internal.InlineParam") - @tu lazy val IntoAnnot: ClassSymbol = requiredClass("scala.annotation.into") - @tu lazy val IntoParamAnnot: ClassSymbol = requiredClass("scala.annotation.internal.$into") @tu lazy val ErasedParamAnnot: ClassSymbol = requiredClass("scala.annotation.internal.ErasedParam") @tu lazy val MainAnnot: ClassSymbol = requiredClass("scala.main") @tu lazy val MappedAlternativeAnnot: ClassSymbol = requiredClass("scala.annotation.internal.MappedAlternative") @@ -1053,6 +1057,7 @@ class Definitions { // @tu lazy val ScalaStrictFPAnnot: ClassSymbol = requiredClass("scala.annotation.strictfp") @tu lazy val ScalaStaticAnnot: ClassSymbol = requiredClass("scala.annotation.static") @tu lazy val SerialVersionUIDAnnot: ClassSymbol = requiredClass("scala.SerialVersionUID") + @tu lazy val SilentIntoAnnot: ClassSymbol = requiredClass("scala.annotation.internal.$into") @tu lazy val TailrecAnnot: ClassSymbol = requiredClass("scala.annotation.tailrec") @tu lazy val ThreadUnsafeAnnot: ClassSymbol = requiredClass("scala.annotation.threadUnsafe") @tu lazy val ConstructorOnlyAnnot: ClassSymbol = requiredClass("scala.annotation.constructorOnly") @@ -1066,8 +1071,10 @@ class Definitions { @tu lazy val UncheckedStableAnnot: ClassSymbol = requiredClass("scala.annotation.unchecked.uncheckedStable") @tu lazy val UncheckedVarianceAnnot: ClassSymbol = requiredClass("scala.annotation.unchecked.uncheckedVariance") @tu lazy val UncheckedCapturesAnnot: ClassSymbol = requiredClass("scala.annotation.unchecked.uncheckedCaptures") - @tu lazy val UntrackedCapturesAnnot: ClassSymbol = requiredClass("scala.caps.untrackedCaptures") + @tu lazy val UntrackedCapturesAnnot: ClassSymbol = requiredClass("scala.caps.unsafe.untrackedCaptures") @tu lazy val UseAnnot: ClassSymbol = requiredClass("scala.caps.use") + @tu lazy val ConsumeAnnot: ClassSymbol = requiredClass("scala.caps.consume") + @tu lazy val RefineOverrideAnnot: ClassSymbol = requiredClass("scala.caps.internal.refineOverride") @tu lazy val VolatileAnnot: ClassSymbol = requiredClass("scala.volatile") @tu lazy val LanguageFeatureMetaAnnot: ClassSymbol = requiredClass("scala.annotation.meta.languageFeature") @tu lazy val BeanGetterMetaAnnot: ClassSymbol = requiredClass("scala.annotation.meta.beanGetter") @@ -1083,6 +1090,8 @@ class Definitions { @tu lazy val TargetNameAnnot: ClassSymbol = requiredClass("scala.annotation.targetName") @tu lazy val VarargsAnnot: ClassSymbol = requiredClass("scala.annotation.varargs") @tu lazy val ReachCapabilityAnnot = requiredClass("scala.annotation.internal.reachCapability") + @tu lazy val RootCapabilityAnnot = requiredClass("scala.caps.internal.rootCapability") + @tu lazy val ReadOnlyCapabilityAnnot = requiredClass("scala.annotation.internal.readOnlyCapability") @tu lazy val RequiresCapabilityAnnot: ClassSymbol = requiredClass("scala.annotation.internal.requiresCapability") @tu lazy val RetainsAnnot: ClassSymbol = requiredClass("scala.annotation.retains") @tu lazy val RetainsCapAnnot: ClassSymbol = requiredClass("scala.annotation.retainsCap") @@ -1106,6 +1115,10 @@ class Definitions { @tu lazy val MetaAnnots: Set[Symbol] = NonBeanMetaAnnots + BeanGetterMetaAnnot + BeanSetterMetaAnnot + // Set of annotations that are not printed in types except under -Yprint-debug + @tu lazy val SilentAnnots: Set[Symbol] = + Set(InlineParamAnnot, ErasedParamAnnot, RefineOverrideAnnot, SilentIntoAnnot) + // A list of annotations that are commonly used to indicate that a field/method argument or return // type is not null. These annotations are used by the nullification logic in JavaNullInterop to // improve the precision of type nullification. @@ -1212,13 +1225,8 @@ class Definitions { */ def unapply(tpe: RefinedType)(using Context): Option[MethodOrPoly] = tpe.refinedInfo match - case mt: MethodType - if tpe.refinedName == nme.apply - && isFunctionType(tpe.parent) - && !Existential.isExistentialMethod(mt) => Some(mt) - case mt: PolyType - if tpe.refinedName == nme.apply - && isFunctionType(tpe.parent) => Some(mt) + case mt: MethodOrPoly + if tpe.refinedName == nme.apply && isFunctionType(tpe.parent) => Some(mt) case _ => None end RefinedFunctionOf @@ -1379,6 +1387,9 @@ class Definitions { final def isNamedTuple_From(sym: Symbol)(using Context): Boolean = sym.name == tpnme.From && sym.owner == NamedTupleModule.moduleClass + final def isInto(sym: Symbol)(using Context): Boolean = + sym.name == tpnme.into && sym.owner == ConversionModuleClass + private val compiletimePackageAnyTypes: Set[Name] = Set( tpnme.Equals, tpnme.NotEquals, tpnme.IsConst, tpnme.ToString ) @@ -1529,9 +1540,7 @@ class Definitions { denot.sourceModule.info = denot.typeRef // we run into a cyclic reference when patching if this line is omitted patch2(denot, patchCls) - if ctx.settings.YcompileScala2Library.value then - () - else if denot.name == tpnme.Predef.moduleClassName && denot.symbol == ScalaPredefModuleClass then + if denot.name == tpnme.Predef.moduleClassName && denot.symbol == ScalaPredefModuleClass then patchWith(ScalaPredefModuleClassPatch) else if denot.name == tpnme.language.moduleClassName && denot.symbol == LanguageModuleClass then patchWith(LanguageModuleClassPatch) @@ -1553,6 +1562,11 @@ class Definitions { @tu lazy val pureSimpleClasses = Set(StringClass, NothingClass, NullClass) ++ ScalaValueClasses() + @tu lazy val capabilityQualifierAnnots: Set[Symbol] = + Set(ReachCapabilityAnnot, ReadOnlyCapabilityAnnot, MaybeCapabilityAnnot) + @tu lazy val capabilityWrapperAnnots: Set[Symbol] = + capabilityQualifierAnnots + RootCapabilityAnnot + @tu lazy val AbstractFunctionType: Array[TypeRef] = mkArityArray("scala.runtime.AbstractFunction", MaxImplementedFunctionArity, 0).asInstanceOf[Array[TypeRef]] val AbstractFunctionClassPerRun: PerRun[Array[Symbol]] = new PerRun(AbstractFunctionType.map(_.symbol.asClass)) def AbstractFunctionClass(n: Int)(using Context): Symbol = AbstractFunctionClassPerRun()(using ctx)(n) @@ -1869,7 +1883,7 @@ class Definitions { || tp.derivesFrom(defn.PolyFunctionClass) // TODO check for refinement? private def withSpecMethods(cls: ClassSymbol, bases: List[Name], paramTypes: Set[TypeRef]) = - if !ctx.settings.YcompileScala2Library.value then + if !Feature.shouldBehaveAsScala2 then for base <- bases; tp <- paramTypes do cls.enter(newSymbol(cls, base.specializedName(List(tp)), Method, ExprType(tp))) cls @@ -1912,7 +1926,7 @@ class Definitions { case List(x, y) => Tuple2SpecializedParamClasses().contains(x.classSymbol) && Tuple2SpecializedParamClasses().contains(y.classSymbol) case _ => false && base.owner.denot.info.member(base.name.specializedName(args)).exists // when dotc compiles the stdlib there are no specialised classes - && !ctx.settings.YcompileScala2Library.value // We do not add the specilized TupleN methods/classes when compiling the stdlib + && !Feature.shouldBehaveAsScala2 // We do not add the specilized TupleN methods/classes when compiling the stdlib def isSpecializableFunction(cls: ClassSymbol, paramTypes: List[Type], retType: Type)(using Context): Boolean = paramTypes.length <= 2 @@ -1934,7 +1948,7 @@ class Definitions { case _ => false }) - && !ctx.settings.YcompileScala2Library.value // We do not add the specilized FunctionN methods/classes when compiling the stdlib + && !Feature.shouldBehaveAsScala2 // We do not add the specilized FunctionN methods/classes when compiling the stdlib @tu lazy val Function0SpecializedApplyNames: List[TermName] = for r <- Function0SpecializedReturnTypes @@ -2033,10 +2047,6 @@ class Definitions { def hasProblematicGetClass(className: Name): Boolean = HasProblematicGetClass.contains(className) - /** Is synthesized symbol with alphanumeric name allowed to be used as an infix operator? */ - def isInfix(sym: Symbol)(using Context): Boolean = - (sym eq Object_eq) || (sym eq Object_ne) - @tu lazy val assumedTransparentNames: Map[Name, Set[Symbol]] = // add these for now, until we had a chance to retrofit 2.13 stdlib // we should do a more through sweep through it then. @@ -2090,7 +2100,12 @@ class Definitions { */ @tu lazy val ccExperimental: Set[Symbol] = Set( CapsModule, CapsModule.moduleClass, PureClass, + Caps_Capability, // TODO: Remove when Capability is stabilized RequiresCapabilityAnnot, + captureRoot, Caps_CapSet, Caps_ContainsTrait, Caps_ContainsModule, Caps_ContainsModule.moduleClass, UseAnnot, + Caps_Mutable, Caps_SharedCapability, ConsumeAnnot, + CapsUnsafeModule, CapsUnsafeModule.moduleClass, + CapsInternalModule, CapsInternalModule.moduleClass, RetainsAnnot, RetainsCapAnnot, RetainsByNameAnnot) /** Experimental language features defined in `scala.runtime.stdLibPatches.language.experimental`. diff --git a/compiler/src/dotty/tools/dotc/core/Flags.scala b/compiler/src/dotty/tools/dotc/core/Flags.scala index 0775b3caaf0c..6adf899e9da0 100644 --- a/compiler/src/dotty/tools/dotc/core/Flags.scala +++ b/compiler/src/dotty/tools/dotc/core/Flags.scala @@ -252,7 +252,7 @@ object Flags { /** A field generated for a primary constructor parameter (no matter if it's a 'val' or not), * or an accessor of such a field. */ - val (_, ParamAccessor @ _, _) = newFlags(14, "") + val (ParamAccessorOrInto @ _, ParamAccessor @ _, Into @ _) = newFlags(14, "", "into") /** A value or class implementing a module */ val (Module @ _, ModuleVal @ _, ModuleClass @ _) = newFlags(15, "module") @@ -452,7 +452,7 @@ object Flags { commonFlags(Private, Protected, Final, Case, Implicit, Given, Override, JavaStatic, Transparent, Erased) val TypeSourceModifierFlags: FlagSet = - CommonSourceModifierFlags.toTypeFlags | Abstract | Sealed | Opaque | Open + CommonSourceModifierFlags.toTypeFlags | Abstract | Sealed | Opaque | Open | Into val TermSourceModifierFlags: FlagSet = CommonSourceModifierFlags.toTermFlags | Inline | AbsOverride | Lazy | Tracked @@ -467,7 +467,7 @@ object Flags { * TODO: Should check that FromStartFlags do not change in completion */ val FromStartFlags: FlagSet = commonFlags( - Module, Package, Deferred, Method, Case, Enum, Param, ParamAccessor, + Module, Package, Deferred, Method, Case, Enum, Param, ParamAccessorOrInto, Scala2SpecialFlags, MutableOrOpen, Opaque, Touched, JavaStatic, OuterOrCovariant, LabelOrContravariant, CaseAccessor, Tracked, Extension, NonMember, Implicit, Given, Permanent, Synthetic, Exported, @@ -597,7 +597,6 @@ object Flags { val JavaInterface: FlagSet = JavaDefined | NoInits | Trait val JavaProtected: FlagSet = JavaDefined | Protected val MethodOrLazy: FlagSet = Lazy | Method - val MutableOrLazy: FlagSet = Lazy | Mutable val MethodOrLazyOrMutable: FlagSet = Lazy | Method | Mutable val LiftedMethod: FlagSet = Lifted | Method val LocalParam: FlagSet = Local | Param diff --git a/compiler/src/dotty/tools/dotc/core/GadtConstraint.scala b/compiler/src/dotty/tools/dotc/core/GadtConstraint.scala index 5a8938602523..75c23bb003b5 100644 --- a/compiler/src/dotty/tools/dotc/core/GadtConstraint.scala +++ b/compiler/src/dotty/tools/dotc/core/GadtConstraint.scala @@ -33,6 +33,18 @@ class GadtConstraint private ( reverseMapping = reverseMapping.updated(tv.origin, sym), ) + def replace(param: TypeParamRef, tp: Type)(using Context) = + var constr = constraint + for + poly <- constraint.domainLambdas + paramRef <- poly.paramRefs + do + val entry0 = constr.entry(paramRef) + val entry1 = entry0.substParam(param, tp) + if entry1 ne entry0 then + constr = constr.updateEntry(paramRef, entry1) + withConstraint(constr) + /** Is `sym1` ordered to be less than `sym2`? */ def isLess(sym1: Symbol, sym2: Symbol)(using Context): Boolean = constraint.isLess(tvarOrError(sym1).origin, tvarOrError(sym2).origin) @@ -245,6 +257,9 @@ sealed trait GadtState { result } + def replace(param: TypeParamRef, tp: Type)(using Context) = + gadt = gadt.replace(param, tp) + /** See [[ConstraintHandling.approximation]] */ def approximation(sym: Symbol, fromBelow: Boolean, maxLevel: Int = Int.MaxValue)(using Context): Type = { approximation(gadt.tvarOrError(sym).origin, fromBelow, maxLevel).match diff --git a/compiler/src/dotty/tools/dotc/core/MacroClassLoader.scala b/compiler/src/dotty/tools/dotc/core/MacroClassLoader.scala index b6b316ac14d9..c4c41a460a70 100644 --- a/compiler/src/dotty/tools/dotc/core/MacroClassLoader.scala +++ b/compiler/src/dotty/tools/dotc/core/MacroClassLoader.scala @@ -19,8 +19,6 @@ object MacroClassLoader { ctx.setProperty(MacroClassLoaderKey, makeMacroClassLoader(using ctx)) private def makeMacroClassLoader(using Context): ClassLoader = trace("new macro class loader") { - import scala.language.unsafeNulls - val entries = ClassPath.expandPath(ctx.settings.classpath.value, expandStar=true) val urls = entries.map(cp => java.nio.file.Paths.get(cp).toUri.toURL).toArray val out = Option(ctx.settings.outputDir.value.toURL) // to find classes in case of suspended compilation diff --git a/compiler/src/dotty/tools/dotc/core/Mode.scala b/compiler/src/dotty/tools/dotc/core/Mode.scala index 6fd76e37977d..571a786e9106 100644 --- a/compiler/src/dotty/tools/dotc/core/Mode.scala +++ b/compiler/src/dotty/tools/dotc/core/Mode.scala @@ -91,6 +91,14 @@ object Mode { */ val ImplicitExploration: Mode = newMode(12, "ImplicitExploration") + /** We are currently inside a capture set. + * A term name could be a capture variable, so we need to + * check that it is valid to use as type name. + * Since this mode is only used during annotation typing, + * we can reuse the value of `ImplicitExploration` to save bits. + */ + val InCaptureSet: Mode = ImplicitExploration + /** We are currently unpickling Scala2 info */ val Scala2Unpickling: Mode = newMode(13, "Scala2Unpickling") diff --git a/compiler/src/dotty/tools/dotc/core/NameKinds.scala b/compiler/src/dotty/tools/dotc/core/NameKinds.scala index e9575c7d6c4a..ff41eeb81ca0 100644 --- a/compiler/src/dotty/tools/dotc/core/NameKinds.scala +++ b/compiler/src/dotty/tools/dotc/core/NameKinds.scala @@ -312,6 +312,7 @@ object NameKinds { /** Other unique names */ val CanThrowEvidenceName: UniqueNameKind = new UniqueNameKind("canThrow$") + val TryOwnerName: UniqueNameKind = new UniqueNameKind("try$") val TempResultName: UniqueNameKind = new UniqueNameKind("ev$") val DepParamName: UniqueNameKind = new UniqueNameKind("(param)") val LazyImplicitName: UniqueNameKind = new UniqueNameKind("$_lazy_implicit_$") diff --git a/compiler/src/dotty/tools/dotc/core/NameOps.scala b/compiler/src/dotty/tools/dotc/core/NameOps.scala index 415aa049c587..766cf4abf8c4 100644 --- a/compiler/src/dotty/tools/dotc/core/NameOps.scala +++ b/compiler/src/dotty/tools/dotc/core/NameOps.scala @@ -7,7 +7,7 @@ import java.nio.CharBuffer import scala.io.Codec import Int.MaxValue import Names.*, StdNames.*, Contexts.*, Symbols.*, Flags.*, NameKinds.*, Types.* -import util.Chars.{isOperatorPart, digit2int} +import util.Chars.{isOperatorPart, isIdentifierPart, digit2int} import Decorators.* import Definitions.* import nme.* @@ -15,7 +15,7 @@ import nme.* object NameOps { object compactify { - lazy val md5: MessageDigest = MessageDigest.getInstance("MD5").nn + lazy val md5: MessageDigest = MessageDigest.getInstance("MD5") inline val CLASSFILE_NAME_CHAR_LIMIT = 240 @@ -43,9 +43,9 @@ object NameOps { val suffix = s.takeRight(edge) val cs = s.toArray - val bytes = Codec.toUTF8(CharBuffer.wrap(cs).nn) + val bytes = Codec.toUTF8(CharBuffer.wrap(cs)) md5.update(bytes) - val md5chars = md5.digest().nn.map(b => (b & 0xFF).toHexString).mkString + val md5chars = md5.digest().map(b => (b & 0xFF).toHexString).mkString prefix + marker + md5chars + marker + suffix } @@ -78,9 +78,22 @@ object NameOps { def isUnapplyName: Boolean = name == nme.unapply || name == nme.unapplySeq def isRightAssocOperatorName: Boolean = name.lastPart.last == ':' - def isOperatorName: Boolean = name match - case name: SimpleName => name.exists(isOperatorPart) - case _ => false + /** Does this name match `[{letter | digit} '_'] op`? + * + * See examples in [[NameOpsTest]]. + */ + def isOperatorName: Boolean = + name match + case name: SimpleName => + var i = name.length - 1 + // Ends with operator characters + while i >= 0 && isOperatorPart(name(i)) do i -= 1 + if i == -1 then return true + // Optionnally prefixed with alpha-numeric characters followed by `_` + if name(i) != '_' then return false + while i >= 0 && isIdentifierPart(name(i)) do i -= 1 + i == -1 + case _ => false /** Is name of a variable pattern? */ def isVarPattern: Boolean = diff --git a/compiler/src/dotty/tools/dotc/core/NamerOps.scala b/compiler/src/dotty/tools/dotc/core/NamerOps.scala index dbdb46aba334..6efb75ebac93 100644 --- a/compiler/src/dotty/tools/dotc/core/NamerOps.scala +++ b/compiler/src/dotty/tools/dotc/core/NamerOps.scala @@ -39,14 +39,12 @@ object NamerOps: */ extension (tp: Type) def separateRefinements(cls: ClassSymbol, refinements: mutable.LinkedHashMap[Name, Type] | Null)(using Context): Type = - val widenSkolemsMap = new TypeMap: - def apply(tp: Type) = mapOver(tp.widenSkolem) tp match case RefinedType(tp1, rname, rinfo) => try tp1.separateRefinements(cls, refinements) finally if refinements != null then - val rinfo1 = widenSkolemsMap(rinfo) + val rinfo1 = rinfo.widenSkolems refinements(rname) = refinements.get(rname) match case Some(tp) => tp & rinfo1 case None => rinfo1 @@ -317,4 +315,11 @@ object NamerOps: ann.tree match case ast.tpd.WitnessNamesAnnot(witnessNames) => addContextBoundCompanionFor(sym, witnessNames, Nil) + + /** if `sym` is a term parameter or parameter accessor, map all occurrences of + * `into[T]` in its type to `T @$into`. + */ + extension (tp: Type) + def suppressIntoIfParam(sym: Symbol)(using Context): Type = + if sym.isOneOf(TermParamOrAccessor) then TypeOps.suppressInto(tp) else tp end NamerOps diff --git a/compiler/src/dotty/tools/dotc/core/Names.scala b/compiler/src/dotty/tools/dotc/core/Names.scala index a31ab0662ee4..25deb3a56889 100644 --- a/compiler/src/dotty/tools/dotc/core/Names.scala +++ b/compiler/src/dotty/tools/dotc/core/Names.scala @@ -393,7 +393,7 @@ object Names { // because asserts are caught in exception handlers which might // cause other failures. In that case the first, important failure // is lost. - System.err.nn.println("Backend should not call Name#toString, Name#mangledString should be used instead.") + System.err.println("Backend should not call Name#toString, Name#mangledString should be used instead.") Thread.dumpStack() assert(false) } @@ -404,8 +404,8 @@ object Names { * from GenBCode or it also contains one of the allowed methods below. */ private def toStringOK = { - val trace: Array[StackTraceElement] = Thread.currentThread.nn.getStackTrace.asInstanceOf[Array[StackTraceElement]] - !trace.exists(_.getClassName.nn.endsWith("GenBCode")) || + val trace: Array[StackTraceElement] = Thread.currentThread.getStackTrace.asInstanceOf[Array[StackTraceElement]] + !trace.exists(_.getClassName.endsWith("GenBCode")) || trace.exists(elem => List( "mangledString", diff --git a/compiler/src/dotty/tools/dotc/core/StdNames.scala b/compiler/src/dotty/tools/dotc/core/StdNames.scala index 90e5544f19af..ac1f4f448722 100644 --- a/compiler/src/dotty/tools/dotc/core/StdNames.scala +++ b/compiler/src/dotty/tools/dotc/core/StdNames.scala @@ -41,8 +41,8 @@ object StdNames { inline val Product = "Product" @sharable - private val disallowed = java.util.regex.Pattern.compile("""[<>]""").nn - def sanitize(str: String): String = disallowed.matcher(str).nn.replaceAll("""\$""").nn + private val disallowed = java.util.regex.Pattern.compile("""[<>]""") + def sanitize(str: String): String = disallowed.matcher(str).replaceAll("""\$""") } abstract class DefinedNames[N <: Name] { @@ -121,6 +121,7 @@ object StdNames { val BITMAP_CHECKINIT: N = s"${BITMAP_PREFIX}init$$" // initialization bitmap for checkinit values val BITMAP_CHECKINIT_TRANSIENT: N = s"${BITMAP_PREFIX}inittrans$$" // initialization bitmap for transient checkinit values val CC_REACH: N = "$reach" + val CC_READONLY: N = "$readOnly" val DEFAULT_GETTER: N = str.DEFAULT_GETTER val DEFAULT_GETTER_INIT: N = "$lessinit$greater" val DO_WHILE_PREFIX: N = "doWhile$" @@ -131,7 +132,6 @@ object StdNames { val EXCEPTION_RESULT_PREFIX: N = "exceptionResult" val EXPAND_SEPARATOR: N = str.EXPAND_SEPARATOR val IMPORT: N = "" - val INTO: N = "$into" val MODULE_SUFFIX: N = str.MODULE_SUFFIX val OPS_PACKAGE: N = "" val OVERLOADED: N = "" @@ -554,6 +554,7 @@ object StdNames { val materializeTypeTag: N = "materializeTypeTag" val mirror : N = "mirror" val moduleClass : N = "moduleClass" + val mut: N = "mut" val name: N = "name" val nameDollar: N = "$name" val ne: N = "ne" @@ -588,6 +589,7 @@ object StdNames { val productPrefix: N = "productPrefix" val quotes : N = "quotes" val raw_ : N = "raw" + val rd: N = "rd" val refl: N = "refl" val reflect: N = "reflect" val reflectiveSelectable: N = "reflectiveSelectable" diff --git a/compiler/src/dotty/tools/dotc/core/Substituters.scala b/compiler/src/dotty/tools/dotc/core/Substituters.scala index 96da91293d91..425b6193f3cd 100644 --- a/compiler/src/dotty/tools/dotc/core/Substituters.scala +++ b/compiler/src/dotty/tools/dotc/core/Substituters.scala @@ -2,14 +2,14 @@ package dotty.tools.dotc package core import Types.*, Symbols.*, Contexts.* -import cc.CaptureSet.IdempotentCaptRefMap +import cc.Capabilities.{Capability, ResultCap} /** Substitution operations on types. See the corresponding `subst` and * `substThis` methods on class Type for an explanation. */ object Substituters: - final def subst(tp: Type, from: BindingType, to: BindingType, theMap: SubstBindingMap | Null)(using Context): Type = + final def subst[BT <: BindingType](tp: Type, from: BT, to: BT, theMap: SubstBindingMap[BT] | Null)(using Context): Type = tp match { case tp: BoundType => if (tp.binder eq from) tp.copyBoundType(to.asInstanceOf[tp.BT]) else tp @@ -163,11 +163,51 @@ object Substituters: .mapOver(tp) } - final class SubstBindingMap(from: BindingType, to: BindingType)(using Context) extends DeepTypeMap, BiTypeMap { + final class SubstBindingMap[BT <: BindingType](val from: BT, val to: BT)(using Context) extends DeepTypeMap, BiTypeMap { def apply(tp: Type): Type = subst(tp, from, to, this)(using mapCtx) + override def mapCapability(c: Capability, deep: Boolean = false) = c match + case c @ ResultCap(binder: MethodType) if binder eq from => + c.derivedResult(to.asInstanceOf[MethodType]) + case _ => + super.mapCapability(c, deep) + + override def fuse(next: BiTypeMap)(using Context) = next match + case next: SubstBindingMap[_] => + if next.from eq to then Some(SubstBindingMap(from, next.to)) + else Some(SubstBindingsMap(Array(from, next.from), Array(to, next.to))) + case _ => None def inverse = SubstBindingMap(to, from) } + final class SubstBindingsMap(val from: Array[BindingType], val to: Array[BindingType])(using Context) extends DeepTypeMap, BiTypeMap { + + def apply(tp: Type): Type = tp match + case tp: BoundType => + var i = 0 + while i < from.length && (from(i) ne tp.binder) do i += 1 + if i < from.length then tp.copyBoundType(to(i).asInstanceOf[tp.BT]) else tp + case _ => + mapOver(tp) + + override def mapCapability(c: Capability, deep: Boolean = false) = c match + case c @ ResultCap(binder: MethodType) => + var i = 0 + while i < from.length && (from(i) ne binder) do i += 1 + if i < from.length then c.derivedResult(to(i).asInstanceOf[MethodType]) else c + case _ => + super.mapCapability(c, deep) + + override def fuse(next: BiTypeMap)(using Context) = next match + case next: SubstBindingMap[_] => + var i = 0 + while i < from.length && (to(i) ne next.from) do i += 1 + if i < from.length then Some(SubstBindingsMap(from, to.updated(i, next.to))) + else Some(SubstBindingsMap(from :+ next.from, to :+ next.to)) + case _ => None + + def inverse = SubstBindingsMap(to, from) + } + final class Subst1Map(from: Symbol, to: Type)(using Context) extends DeepTypeMap { def apply(tp: Type): Type = subst1(tp, from, to, this)(using mapCtx) } @@ -180,7 +220,7 @@ object Substituters: def apply(tp: Type): Type = subst(tp, from, to, this)(using mapCtx) } - final class SubstSymMap(from: List[Symbol], to: List[Symbol])(using Context) extends DeepTypeMap, BiTypeMap { + final class SubstSymMap(from: List[Symbol], to: List[Symbol])(using Context) extends DeepTypeMap { def apply(tp: Type): Type = substSym(tp, from, to, this)(using mapCtx) def inverse = SubstSymMap(to, from) // implicitly requires that `to` contains no duplicates. } @@ -189,15 +229,15 @@ object Substituters: def apply(tp: Type): Type = substThis(tp, from, to, this)(using mapCtx) } - final class SubstRecThisMap(from: Type, to: Type)(using Context) extends DeepTypeMap, IdempotentCaptRefMap { + final class SubstRecThisMap(from: Type, to: Type)(using Context) extends DeepTypeMap { def apply(tp: Type): Type = substRecThis(tp, from, to, this)(using mapCtx) } - final class SubstParamMap(from: ParamRef, to: Type)(using Context) extends DeepTypeMap, IdempotentCaptRefMap { + final class SubstParamMap(from: ParamRef, to: Type)(using Context) extends DeepTypeMap { def apply(tp: Type): Type = substParam(tp, from, to, this)(using mapCtx) } - final class SubstParamsMap(from: BindingType, to: List[Type])(using Context) extends DeepTypeMap, IdempotentCaptRefMap { + final class SubstParamsMap(from: BindingType, to: List[Type])(using Context) extends DeepTypeMap { def apply(tp: Type): Type = substParams(tp, from, to, this)(using mapCtx) } diff --git a/compiler/src/dotty/tools/dotc/core/SymDenotations.scala b/compiler/src/dotty/tools/dotc/core/SymDenotations.scala index 54e18bf1ea1b..6f7f77de70a5 100644 --- a/compiler/src/dotty/tools/dotc/core/SymDenotations.scala +++ b/compiler/src/dotty/tools/dotc/core/SymDenotations.scala @@ -685,7 +685,7 @@ object SymDenotations { isAbstractOrAliasType && !isAbstractOrParamType /** Is this symbol an abstract or alias type? */ - final def isAbstractOrAliasType: Boolean = isType & !isClass + final def isAbstractOrAliasType: Boolean = isType && !isClass /** Is this symbol an abstract type or type parameter? */ final def isAbstractOrParamType(using Context): Boolean = this.isOneOf(DeferredOrTypeParam) @@ -806,6 +806,13 @@ object SymDenotations { final def isRealMethod(using Context): Boolean = this.is(Method, butNot = Accessor) && !isAnonymousFunction + /** A mutable variable (not a getter or setter for it) */ + final def isMutableVar(using Context): Boolean = is(Mutable, butNot = Method) + + /** A mutable variable or its getter or setter */ + final def isMutableVarOrAccessor(using Context): Boolean = + is(Mutable) && (!is(Method) || is(Accessor)) + /** Is this a getter? */ final def isGetter(using Context): Boolean = this.is(Accessor) && !originalName.isSetterName && !(originalName.isScala2LocalSuffix && symbol.owner.is(Scala2x)) @@ -1134,7 +1141,7 @@ object SymDenotations { final def ownersIterator(using Context): Iterator[Symbol] = new Iterator[Symbol] { private var current = symbol def hasNext = current.exists - def next: Symbol = { + def next(): Symbol = { val result = current current = current.owner result @@ -1411,7 +1418,7 @@ object SymDenotations { final def nextOverriddenSymbol(using Context): Symbol = { val overridden = allOverriddenSymbols if (overridden.hasNext) - overridden.next + overridden.next() else NoSymbol } @@ -1489,10 +1496,10 @@ object SymDenotations { val candidates = owner.info.decls.lookupAll(name) def test(sym: Symbol): Symbol = if (sym == symbol || sym.signature == signature) sym - else if (candidates.hasNext) test(candidates.next) + else if (candidates.hasNext) test(candidates.next()) else NoSymbol if (candidates.hasNext) { - val sym = candidates.next + val sym = candidates.next() if (candidates.hasNext) test(sym) else sym } else NoSymbol @@ -1946,7 +1953,7 @@ object SymDenotations { case _ => NoSymbol /** The explicitly given self type (self types of modules are assumed to be - * explcitly given here). + * explicitly given here). */ def givenSelfType(using Context): Type = classInfo.selfInfo match { case tp: Type => tp @@ -2913,9 +2920,8 @@ object SymDenotations { private var checkedPeriod: Period = Nowhere protected def invalidateDependents() = { - import scala.language.unsafeNulls if (dependent != null) { - val it = dependent.keySet.iterator() + val it = dependent.nn.keySet.iterator() while (it.hasNext()) it.next().invalidate() } dependent = null diff --git a/compiler/src/dotty/tools/dotc/core/SymUtils.scala b/compiler/src/dotty/tools/dotc/core/SymUtils.scala index 54ba0e3bdd06..3c59cecfbbff 100644 --- a/compiler/src/dotty/tools/dotc/core/SymUtils.scala +++ b/compiler/src/dotty/tools/dotc/core/SymUtils.scala @@ -118,6 +118,16 @@ class SymUtils: def isGenericProduct(using Context): Boolean = whyNotGenericProduct.isEmpty + def sanitizedDescription(using Context): String = + if self.isConstructor then + i"constructor of ${self.owner.sanitizedDescription}" + else if self.isAnonymousFunction then + i"anonymous function of type ${self.info}" + else if self.name.toString.contains('$') then + self.owner.sanitizedDescription + else + self.show + /** Is this an old style implicit conversion? * @param directOnly only consider explicitly written methods * @param forImplicitClassOnly only consider methods generated from implicit classes @@ -287,7 +297,7 @@ class SymUtils: */ def isConstExprFinalVal(using Context): Boolean = atPhaseNoLater(erasurePhase) { - self.is(Final, butNot = Mutable) && self.info.resultType.isInstanceOf[ConstantType] + self.is(Final) && !self.isMutableVarOrAccessor && self.info.resultType.isInstanceOf[ConstantType] } && !self.sjsNeedsField /** The `ConstantType` of a val known to be `isConstrExprFinalVal`. @@ -359,7 +369,6 @@ class SymUtils: /** Is symbol assumed or declared as an infix symbol? */ def isDeclaredInfix(using Context): Boolean = self.is(Infix) - || defn.isInfix(self) || self.name.isUnapplyName && self.owner.is(Module) && self.owner.linkedClass.is(Case) diff --git a/compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala b/compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala index 39297697f29a..7b7eb2d2e5a9 100644 --- a/compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala +++ b/compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala @@ -78,7 +78,7 @@ object SymbolLoaders { * and give them `completer` as type. */ def enterPackage(owner: Symbol, pname: TermName, completer: (TermSymbol, ClassSymbol) => PackageLoader)(using Context): Symbol = { - val preExisting = owner.info.decls lookup pname + val preExisting = owner.info.decls.lookup(pname) if (preExisting != NoSymbol) // Some jars (often, obfuscated ones) include a package and // object with the same name. Rather than render them unusable, @@ -95,6 +95,18 @@ object SymbolLoaders { s"Resolving package/object name conflict in favor of object ${preExisting.fullName}. The package will be inaccessible.") return NoSymbol } + else if pname == nme.caps && owner == defn.ScalaPackageClass then + // `scala.caps`` was an object until 3.6, it is a package from 3.7. Without special handling + // this would cause a TypeError to be thrown below if a build has several versions of the + // Scala standard library on the classpath. This was the case for 29 projects in OpenCB. + // These projects should be updated. But until that's the case we issue a warning instead + // of a hard failure. + report.warning( + em"""$owner contains object and package with same name: $pname. + |This indicates that there are several versions of the Scala standard library on the classpath. + |The build should be reconfigured so that only one version of the standard library is on the classpath.""") + owner.info.decls.openForMutations.unlink(preExisting) + owner.info.decls.openForMutations.unlink(preExisting.moduleClass) else throw TypeError( em"""$owner contains object and package with same name: $pname @@ -146,7 +158,7 @@ object SymbolLoaders { if (!ok) report.warning(i"""$what ${tree.name} is in the wrong directory. |It was declared to be in package ${path.reverse.mkString(".")} - |But it is found in directory ${filePath.reverse.mkString(File.separator.nn)}""", + |But it is found in directory ${filePath.reverse.mkString(File.separator)}""", tree.srcPos.focus) ok } diff --git a/compiler/src/dotty/tools/dotc/core/Symbols.scala b/compiler/src/dotty/tools/dotc/core/Symbols.scala index 821c7833a737..c8ede8bfdec2 100644 --- a/compiler/src/dotty/tools/dotc/core/Symbols.scala +++ b/compiler/src/dotty/tools/dotc/core/Symbols.scala @@ -641,6 +641,32 @@ object Symbols extends SymUtils { newClassSymbol(owner, name, flags, completer, privateWithin, coord, compUnitInfo) } + /** Same as the other `newNormalizedClassSymbol` except that `parents` can be a function returning a list of arbitrary + * types which get normalized into type refs and parameter bindings and annotations can be assigned in the completer. + */ + def newNormalizedClassSymbol( + owner: Symbol, + name: TypeName, + flags: FlagSet, + parentTypes: Symbol => List[Type], + selfInfo: Type, + privateWithin: Symbol, + annotations: List[Tree], + coord: Coord, + compUnitInfo: CompilationUnitInfo | Null)(using Context): ClassSymbol = { + def completer = new LazyType { + def complete(denot: SymDenotation)(using Context): Unit = { + val cls = denot.asClass.classSymbol + val decls = newScope + val parents = parentTypes(cls).map(_.dealias) + assert(parents.nonEmpty && !parents.head.typeSymbol.is(dotc.core.Flags.Trait), "First parent must be a class") + denot.info = ClassInfo(owner.thisType, cls, parents, decls, selfInfo) + denot.annotations = annotations.map(Annotations.Annotation(_)) + } + } + newClassSymbol(owner, name, flags, completer, privateWithin, coord, compUnitInfo) + } + def newRefinedClassSymbol(coord: Coord = NoCoord)(using Context): ClassSymbol = newCompleteClassSymbol(ctx.owner, tpnme.REFINE_CLASS, NonMember, parents = Nil, newScope, coord = coord) @@ -718,6 +744,34 @@ object Symbols extends SymUtils { privateWithin, coord, compUnitInfo) } + /** Same as `newNormalizedModuleSymbol` except that `parents` can be a function returning a list of arbitrary + * types which get normalized into type refs and parameter bindings. + */ + def newNormalizedModuleSymbol( + owner: Symbol, + name: TermName, + modFlags: FlagSet, + clsFlags: FlagSet, + parentTypes: ClassSymbol => List[Type], + decls: Scope, + privateWithin: Symbol, + coord: Coord, + compUnitInfo: CompilationUnitInfo | Null)(using Context): TermSymbol = { + def completer(module: Symbol) = new LazyType { + def complete(denot: SymDenotation)(using Context): Unit = { + val cls = denot.asClass.classSymbol + val decls = newScope + val parents = parentTypes(cls).map(_.dealias) + assert(parents.nonEmpty && !parents.head.typeSymbol.is(dotc.core.Flags.Trait), "First parent must be a class") + denot.info = ClassInfo(owner.thisType, cls, parents, decls, TermRef(owner.thisType, module)) + } + } + newModuleSymbol( + owner, name, modFlags, clsFlags, + (module, modcls) => completer(module), + privateWithin, coord, compUnitInfo) + } + /** Create a package symbol with associated package class * from its non-info fields and a lazy type for loading the package's members. */ diff --git a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala index cc0471d40213..210e7f12b4b4 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala @@ -23,8 +23,10 @@ import typer.Applications.productSelectorTypes import reporting.trace import annotation.constructorOnly import cc.* +import Capabilities.Capability import NameKinds.WildcardParamName import MatchTypes.isConcrete +import scala.util.boundary, boundary.break /** Provides methods to compare types. */ @@ -47,16 +49,19 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling monitored = false GADTused = false opaquesUsed = false - openedExistentials = Nil - assocExistentials = Nil recCount = 0 needsGc = false + maxErrorLevel = -1 + errorNotes = Nil if Config.checkTypeComparerReset then checkReset() private var pendingSubTypes: util.MutableSet[(Type, Type)] | Null = null private var recCount = 0 private var monitored = false + private var maxErrorLevel: Int = -1 + protected var errorNotes: List[(Int, ErrorNote)] = Nil + private var needsGc = false private var canCompareAtoms: Boolean = true // used for internal consistency checking @@ -67,18 +72,6 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling /** Indicates whether the subtype check used opaque types */ private var opaquesUsed: Boolean = false - /** In capture checking: The existential types that are open because they - * appear in an existential type on the left in an enclosing comparison. - */ - private var openedExistentials: List[TermParamRef] = Nil - - /** In capture checking: A map from existential types that are appear - * in an existential type on the right in an enclosing comparison. - * Each existential gets mapped to the opened existentials to which it - * may resolve at this point. - */ - private var assocExistentials: ExAssoc = Nil - private var myInstance: TypeComparer = this def currentInstance: TypeComparer = myInstance @@ -161,7 +154,7 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling def testSubType(tp1: Type, tp2: Type): CompareResult = GADTused = false opaquesUsed = false - if !topLevelSubType(tp1, tp2) then CompareResult.Fail + if !topLevelSubType(tp1, tp2) then CompareResult.Fail(Nil) else if GADTused then CompareResult.OKwithGADTUsed else if opaquesUsed then CompareResult.OKwithOpaquesUsed // we cast on GADTused, so handles if both are used else CompareResult.OK @@ -270,7 +263,7 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling report.log(explained(_.isSubType(tp1, tp2, approx), short = false)) } // Eliminate LazyRefs before checking whether we have seen a type before - val normalize = new TypeMap with CaptureSet.IdempotentCaptRefMap { + val normalize = new TypeMap { val DerefLimit = 10 var derefCount = 0 def apply(t: Type) = t match { @@ -335,10 +328,10 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling // This is safe because X$ self-type is X.type sym1 = sym1.companionModule if (sym1 ne NoSymbol) && (sym1 eq sym2) then - ctx.erasedTypes || - sym1.isStaticOwner || - isSubPrefix(tp1.prefix, tp2.prefix) || - thirdTryNamed(tp2) + ctx.erasedTypes + || sym1.isStaticOwner + || isSubPrefix(tp1.prefix, tp2.prefix) + || thirdTryNamed(tp2) else (tp1.name eq tp2.name) && !sym1.is(Private) @@ -440,7 +433,8 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling if (tp1.prefix.isStable) return tryLiftedToThis1 case _ => if isCaptureVarComparison then - return subCaptures(tp1.captureSet, tp2.captureSet, frozenConstraint).isOK + return CCState.withCapAsRoot: + subCaptures(tp1.captureSet, tp2.captureSet) if (tp1 eq NothingType) || isBottom(tp1) then return true } @@ -548,7 +542,7 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling case tp1 @ CapturingType(parent1, refs1) => def compareCapturing = if tp2.isAny then true - else if subCaptures(refs1, tp2.captureSet, frozenConstraint).isOK && sameBoxed(tp1, tp2, refs1) + else if subCaptures(refs1, tp2.captureSet) && sameBoxed(tp1, tp2, refs1) || !ctx.mode.is(Mode.CheckBoundsOrSelfType) && tp1.isAlwaysPure then val tp2a = @@ -565,8 +559,6 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling if reduced.exists then recur(reduced, tp2) && recordGadtUsageIf { MatchType.thatReducesUsingGadt(tp1) } else thirdTry - case Existential(boundVar, tp1unpacked) => - compareExistentialLeft(boundVar, tp1unpacked, tp2) case _: FlexType => true case _ => @@ -591,7 +583,8 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling && (isBottom(tp1) || GADTusage(tp2.symbol)) if isCaptureVarComparison then - return subCaptures(tp1.captureSet, tp2.captureSet, frozenConstraint).isOK + return CCState.withCapAsRoot: + subCaptures(tp1.captureSet, tp2.captureSet) isSubApproxHi(tp1, info2.lo) && (trustBounds || isSubApproxHi(tp1, info2.hi)) || compareGADT @@ -651,8 +644,6 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling thirdTryNamed(tp2) case tp2: TypeParamRef => compareTypeParamRef(tp2) - case Existential(boundVar, tp2unpacked) => - compareExistentialRight(tp1, boundVar, tp2unpacked) case tp2: RefinedType => def compareRefinedSlow: Boolean = val name2 = tp2.refinedName @@ -678,12 +669,12 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling && isSubInfo(info1.resultType, info2.resultType.subst(info2, info1)) case (info1 @ CapturingType(parent1, refs1), info2: Type) if info2.stripCapturing.isInstanceOf[MethodOrPoly] => - subCaptures(refs1, info2.captureSet, frozenConstraint).isOK && sameBoxed(info1, info2, refs1) + subCaptures(refs1, info2.captureSet) && sameBoxed(info1, info2, refs1) && isSubInfo(parent1, info2) case (info1: Type, CapturingType(parent2, refs2)) if info1.stripCapturing.isInstanceOf[MethodOrPoly] => val refs1 = info1.captureSet - (refs1.isAlwaysEmpty || subCaptures(refs1, refs2, frozenConstraint).isOK) && sameBoxed(info1, info2, refs1) + (refs1.isAlwaysEmpty || subCaptures(refs1, refs2)) && sameBoxed(info1, info2, refs1) && isSubInfo(info1, parent2) case _ => isSubType(info1, info2) @@ -871,18 +862,18 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling else // The singletonOK branch is because we sometimes have a larger capture set in a singleton // than in its underlying type. An example is `f: () -> () ->{x} T`, which might be - // the type of a closure. In that case the capture set of `f.type` is `{x}` but the - // capture set of the underlying type is `{}`. So without the `singletonOK` test, a singleton - // might not be a subtype of its underlying type. Examples where this arises is - // capt-capibility.scala and function-combinators.scala + // the type of a closure (in one of the variants we are considering). In that case the + // capture set of `f.type` is `{x}` but the capture set of the underlying type is `{}`. + // So without the `singletonOK` test, a singleton might not be a subtype of its underlying type. + // Eamples where this arises is capt-capibility.scala and function-combinators.scala val singletonOK = tp1 match case tp1: SingletonType - if subCaptures(tp1.underlying.captureSet, refs2, frozen = true).isOK => + if subCaptures(tp1.underlying.captureSet, refs2, CaptureSet.VarState.Separate) => recur(tp1.widen, tp2) case _ => false singletonOK - || subCaptures(refs1, refs2, frozenConstraint).isOK + || subCaptures(refs1, refs2) && sameBoxed(tp1, tp2, refs1) && (recur(tp1.widen.stripCapturing, parent2) || tp1.isInstanceOf[SingletonType] && recur(tp1, parent2) @@ -1019,7 +1010,7 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling if isCaptureCheckingOrSetup then tp1 .match - case tp1: CaptureRef if tp1.isTracked => + case tp1: Capability if tp1.isTracked => CapturingType(tp1w.stripCapturing, tp1.singletonCaptureSet) case _ => tp1w @@ -1028,6 +1019,11 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling comparePaths || isSubType(tp1widened, tp2, approx.addLow) case tp1: RefinedType => + if isCaptureCheckingOrSetup then + tp2.stripCapturing match + case defn.RefinedFunctionOf(_) => // was already handled in thirdTry + return false + case _ => isNewSubType(tp1.parent) case tp1: RecType => isNewSubType(tp1.parent) @@ -1594,10 +1590,9 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling ctx.gadtState.restore(savedGadt) val savedSuccessCount = successCount try - recCount += 1 - if recCount >= Config.LogPendingSubTypesThreshold then monitored = true - val result = if monitored then monitoredIsSubType else firstTry - recCount -= 1 + val result = inNestedLevel: + if recCount >= Config.LogPendingSubTypesThreshold then monitored = true + if monitored then monitoredIsSubType else firstTry if !result then restore() else if recCount == 0 && needsGc then state.gc() @@ -1612,6 +1607,32 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling throw ex } + /** Run `op` in a recursion level (indicated by `recCount`) increased by one. + * This affects when monitoring starts and how error notes are propagated. + * On exit, error notes added at the current level are either + * - promoted to the next outer level (in case of failure), + * - cancelled (in case of success). + */ + inline def inNestedLevel(inline op: Boolean): Boolean = + recCount += 1 + val result = op + recCount -= 1 + if maxErrorLevel > recCount then + if result then + maxErrorLevel = -1 + errorNotes = errorNotes.filterConserve: p => + val (level, note) = p + if level <= recCount then + if level > maxErrorLevel then maxErrorLevel = level + true + else false + else + errorNotes = errorNotes.mapConserve: p => + val (level, note) = p + if level > recCount then (recCount, note) else p + maxErrorLevel = recCount + result + private def nonExprBaseType(tp: Type, cls: Symbol)(using Context): Type = if tp.isInstanceOf[ExprType] then NoType else tp.baseType(cls) @@ -2070,6 +2091,45 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling else op2 end necessaryEither + /** Finds the necessary (the weakest) GADT constraint among a list of them. + * It returns the one being subsumed by all others if exists, and `None` otherwise. + * + * This is used when typechecking pattern alternatives, for instance: + * + * enum Expr[+T]: + * case I1(x: Int) extends Expr[Int] + * case I2(x: Int) extends Expr[Int] + * case B(x: Boolean) extends Expr[Boolean] + * import Expr.* + * + * The following function should compile: + * + * def foo[T](e: Expr[T]): T = e match + * case I1(_) | I2(_) => 42 + * + * since `T >: Int` is subsumed by both alternatives in the first match clause. + * + * However, the following should not: + * + * def foo[T](e: Expr[T]): T = e match + * case I1(_) | B(_) => 42 + * + * since the `I1(_)` case gives the constraint `T >: Int` while `B(_)` gives `T >: Boolean`. + * Neither of the constraints is subsumed by the other. + */ + def necessaryGadtConstraint(constrs: List[GadtConstraint], preGadt: GadtConstraint)(using Context): Option[GadtConstraint] = boundary: + constrs match + case Nil => break(None) + case c0 :: constrs => + var weakest = c0 + for c <- constrs do + if subsumes(weakest.constraint, c.constraint, preGadt.constraint) then + weakest = c + else if !subsumes(c.constraint, weakest.constraint, preGadt.constraint) then + // this two constraints are disjoint + break(None) + break(Some(weakest)) + inline def rollbackConstraintsUnless(inline op: Boolean): Boolean = val saved = constraint var result = false @@ -2174,7 +2234,7 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling val info2 = tp2.refinedInfo val isExpr2 = info2.isInstanceOf[ExprType] var info1 = m.info match - case info1: ValueType if isExpr2 || m.symbol.is(Mutable) => + case info1: ValueType if isExpr2 || m.symbol.isMutableVarOrAccessor => // OK: { val x: T } <: { def x: T } // OK: { var x: T } <: { def x: T } // NO: { var x: T } <: { val x: T } @@ -2806,119 +2866,24 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling // ----------- Capture checking ----------------------------------------------- - /** A type associating instantiatable existentials on the right of a comparison - * with the existentials they can be instantiated with. - */ - type ExAssoc = List[(TermParamRef, List[TermParamRef])] - - private def compareExistentialLeft(boundVar: TermParamRef, tp1unpacked: Type, tp2: Type)(using Context): Boolean = - val saved = openedExistentials - try - openedExistentials = boundVar :: openedExistentials - recur(tp1unpacked, tp2) - finally - openedExistentials = saved - - private def compareExistentialRight(tp1: Type, boundVar: TermParamRef, tp2unpacked: Type)(using Context): Boolean = - val saved = assocExistentials - try - assocExistentials = (boundVar, openedExistentials) :: assocExistentials - recur(tp1, tp2unpacked) - finally - assocExistentials = saved - - /** Is `tp1` an existential var that subsumes `tp2`? This is the case if `tp1` is - * instantiatable (i.e. it's a key in `assocExistentials`) and one of the - * following is true: - * - `tp2` is not an existential var, - * - `tp1` is associated via `assocExistentials` with `tp2`, - * - `tp2` appears as key in `assocExistentials` further out than `tp1`. - * The third condition allows to instantiate c2 to c1 in - * EX c1: A -> Ex c2. B - */ - def subsumesExistentially(tp1: TermParamRef, tp2: CaptureRef)(using Context): Boolean = - def canInstantiateWith(assoc: ExAssoc): Boolean = assoc match - case (bv, bvs) :: assoc1 => - if bv == tp1 then - !Existential.isExistentialVar(tp2) - || bvs.contains(tp2) - || assoc1.exists(_._1 == tp2) - else - canInstantiateWith(assoc1) - case Nil => - false - Existential.isExistentialVar(tp1) && canInstantiateWith(assocExistentials) - - def isOpenedExistential(ref: CaptureRef)(using Context): Boolean = - openedExistentials.contains(ref) - - /** bi-map taking existentials to the left of a comparison to matching - * existentials on the right. This is not a bijection. However - * we have `forwards(backwards(bv)) == bv` for an existentially bound `bv`. - * That's enough to qualify as a BiTypeMap. - */ - private class MapExistentials(assoc: ExAssoc)(using Context) extends BiTypeMap: - - private def bad(t: Type) = - Existential.badExistential - .showing(i"existential match not found for $t in $assoc", capt) - - def apply(t: Type) = t match - case t: TermParamRef if Existential.isExistentialVar(t) => - // Find outermost existential on the right that can be instantiated to `t`, - // or `badExistential` if none exists. - def findMapped(assoc: ExAssoc): CaptureRef = assoc match - case (bv, assocBvs) :: assoc1 => - val outer = findMapped(assoc1) - if !Existential.isBadExistential(outer) then outer - else if assocBvs.contains(t) then bv - else bad(t) - case Nil => - bad(t) - findMapped(assoc) - case _ => - mapOver(t) + protected def makeVarState() = + if frozenConstraint then CaptureSet.VarState.Closed() else CaptureSet.VarState() - /** The inverse takes existentials on the right to the innermost existential - * on the left to which they can be instantiated. - */ - lazy val inverse = new BiTypeMap: - def apply(t: Type) = t match - case t: TermParamRef if Existential.isExistentialVar(t) => - assoc.find(_._1 == t) match - case Some((_, bvs)) if bvs.nonEmpty => bvs.head - case _ => bad(t) - case _ => - mapOver(t) - - def inverse = MapExistentials.this - override def toString = "MapExistentials.inverse" - end inverse - end MapExistentials - - protected def subCaptures(refs1: CaptureSet, refs2: CaptureSet, frozen: Boolean)(using Context): CaptureSet.CompareResult = + protected def subCaptures(refs1: CaptureSet, refs2: CaptureSet, + vs: CaptureSet.VarState = makeVarState())(using Context): Boolean = try - if assocExistentials.isEmpty then - refs1.subCaptures(refs2, frozen) - else - val mapped = refs1.map(MapExistentials(assocExistentials)) - if mapped.elems.exists(Existential.isBadExistential) - then CaptureSet.CompareResult.Fail(refs2 :: Nil) - else subCapturesMapped(mapped, refs2, frozen) + refs1.subCaptures(refs2, vs) catch case ex: AssertionError => println(i"fail while subCaptures $refs1 <:< $refs2") throw ex - protected def subCapturesMapped(refs1: CaptureSet, refs2: CaptureSet, frozen: Boolean)(using Context): CaptureSet.CompareResult = - refs1.subCaptures(refs2, frozen) - /** Is the boxing status of tp1 and tp2 the same, or alternatively, is * the capture sets `refs1` of `tp1` a subcapture of the empty set? * In the latter case, boxing status does not matter. */ protected def sameBoxed(tp1: Type, tp2: Type, refs1: CaptureSet)(using Context): Boolean = (tp1.isBoxedCapturing == tp2.isBoxedCapturing) - || refs1.subCaptures(CaptureSet.empty, frozenConstraint).isOK + || refs1.subCaptures(CaptureSet.empty, makeVarState()) // ----------- Diagnostics -------------------------------------------------- @@ -3324,12 +3289,50 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling def reduceMatchWith[T](op: MatchReducer => T)(using Context): T = inSubComparer(matchReducer)(op) + + /** Add given ErrorNote note, provided there is not yet an error note with + * the same class as `note`. + */ + def addErrorNote(note: ErrorNote): Unit = + if errorNotes.forall(_._2.kind != note.kind) then + errorNotes = (recCount, note) :: errorNotes + assert(maxErrorLevel <= recCount) + maxErrorLevel = recCount + + private[TypeComparer] inline + def isolated[T](inline op: Boolean, inline mapResult: Boolean => T)(using Context): T = + val savedNotes = errorNotes + val savedLevel = maxErrorLevel + errorNotes = Nil + maxErrorLevel = -1 + try mapResult(op) + finally + errorNotes = savedNotes + maxErrorLevel = savedLevel + + /** Run `op` on current type comparer, maping its Boolean result to + * a CompareResult with possible outcomes OK and Fail(...)`. In case + * of failure pass the accumulated errorNotes of this type comparer to + * in the Fail value. + */ + def compareResult(op: => Boolean)(using Context): CompareResult = + isolated(op, res => + if res then CompareResult.OK else CompareResult.Fail(errorNotes.map(_._2))) } object TypeComparer { + /** A base trait for data producing addenda to error messages */ + trait ErrorNote: + /** A disciminating kind. An error note is not added if it has the same kind + * as an already existing error note. + */ + def kind: Class[?] = getClass + + /** A richer compare result, returned by `testSubType` and `test`. */ enum CompareResult: - case OK, Fail, OKwithGADTUsed, OKwithOpaquesUsed + case OK, OKwithGADTUsed, OKwithOpaquesUsed + case Fail(errorNotes: List[ErrorNote]) /** Class for unification variables used in `natValue`. */ private class AnyConstantType extends UncachedGroundType with ValueType { @@ -3341,7 +3344,6 @@ object TypeComparer { else res match case ClassInfo(_, cls, _, _, _) => cls.showLocated case bounds: TypeBounds => i"type bounds [$bounds]" - case CaptureSet.CompareResult.OK => "OK" case res: printing.Showable => res.show case _ => String.valueOf(res).nn @@ -3487,6 +3489,9 @@ object TypeComparer { def constrainPatternType(pat: Type, scrut: Type, forceInvariantRefinement: Boolean = false)(using Context): Boolean = comparing(_.constrainPatternType(pat, scrut, forceInvariantRefinement)) + def necessaryGadtConstraint(constrs: List[GadtConstraint], preGadt: GadtConstraint)(using Context): Option[GadtConstraint] = + comparing(_.necessaryGadtConstraint(constrs, preGadt)) + def explained[T](op: ExplainingTypeComparer => T, header: String = "Subtype trace:", short: Boolean = false)(using Context): String = comparing(_.explained(op, header, short)) @@ -3496,14 +3501,25 @@ object TypeComparer { def reduceMatchWith[T](op: MatchReducer => T)(using Context): T = comparing(_.reduceMatchWith(op)) - def subCaptures(refs1: CaptureSet, refs2: CaptureSet, frozen: Boolean)(using Context): CaptureSet.CompareResult = - comparing(_.subCaptures(refs1, refs2, frozen)) + def subCaptures(refs1: CaptureSet, refs2: CaptureSet, vs: CaptureSet.VarState)(using Context): Boolean = + comparing(_.subCaptures(refs1, refs2, vs)) + + def inNestedLevel(op: => Boolean)(using Context): Boolean = + comparer.inNestedLevel(op) + + def addErrorNote(note: ErrorNote)(using Context): Unit = + comparer.addErrorNote(note) - def subsumesExistentially(tp1: TermParamRef, tp2: CaptureRef)(using Context) = - comparing(_.subsumesExistentially(tp1, tp2)) + def updateErrorNotes(f: PartialFunction[ErrorNote, ErrorNote])(using Context): Unit = + comparer.errorNotes = comparer.errorNotes.mapConserve: p => + val (level, note) = p + if f.isDefinedAt(note) then (level, f(note)) else p - def isOpenedExistential(ref: CaptureRef)(using Context) = - comparing(_.isOpenedExistential(ref)) + def compareResult(op: => Boolean)(using Context): CompareResult = + comparing(_.compareResult(op)) + + inline def noNotes(inline op: Boolean)(using Context): Boolean = + comparer.isolated(op, x => x) } object MatchReducer: @@ -3908,9 +3924,11 @@ class ExplainingTypeComparer(initctx: Context, short: Boolean) extends TypeCompa private val b = new StringBuilder private var lastForwardGoal: String | Null = null - private def appendFailure(x: String) = + private def appendFailure(notes: List[ErrorNote]) = if lastForwardGoal != null then // last was deepest goal that failed - b.append(s" = $x") + b.append(s" = false") + for case note: printing.Showable <- notes do + b.append(i": $note") lastForwardGoal = null override def traceIndented[T](str: String)(op: => T): T = @@ -3926,9 +3944,9 @@ class ExplainingTypeComparer(initctx: Context, short: Boolean) extends TypeCompa if short then res match case false => - appendFailure("false") - case res: CaptureSet.CompareResult if res != CaptureSet.CompareResult.OK => - appendFailure(show(res)) + appendFailure(errorNotes.map(_._2)) + case CompareResult.Fail(notes) => + appendFailure(notes) case _ => b.length = curLength // don't show successful subtraces else @@ -3978,14 +3996,9 @@ class ExplainingTypeComparer(initctx: Context, short: Boolean) extends TypeCompa super.gadtAddBound(sym, b, isUpper) } - override def subCaptures(refs1: CaptureSet, refs2: CaptureSet, frozen: Boolean)(using Context): CaptureSet.CompareResult = - traceIndented(i"subcaptures $refs1 <:< $refs2 ${if frozen then "frozen" else ""}") { - super.subCaptures(refs1, refs2, frozen) - } - - override def subCapturesMapped(refs1: CaptureSet, refs2: CaptureSet, frozen: Boolean)(using Context): CaptureSet.CompareResult = - traceIndented(i"subcaptures mapped $refs1 <:< $refs2 ${if frozen then "frozen" else ""}") { - super.subCapturesMapped(refs1, refs2, frozen) + override def subCaptures(refs1: CaptureSet, refs2: CaptureSet, vs: CaptureSet.VarState)(using Context): Boolean = + traceIndented(i"subcaptures $refs1 <:< $refs2 in ${vs.toString}") { + super.subCaptures(refs1, refs2, vs) } def lastTrace(header: String): String = header + { try b.toString finally b.clear() } diff --git a/compiler/src/dotty/tools/dotc/core/TypeErasure.scala b/compiler/src/dotty/tools/dotc/core/TypeErasure.scala index 4c705c4252c0..0365b205c5b6 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeErasure.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeErasure.scala @@ -364,6 +364,8 @@ object TypeErasure { case tp: MatchType => val alts = tp.alternatives alts.nonEmpty && !fitsInJVMArray(alts.reduce(OrType(_, _, soft = true))) + case tp @ AppliedType(tycon, _) if tycon.isLambdaSub => + !fitsInJVMArray(tp.translucentSuperType) case tp: TypeProxy => isGenericArrayElement(tp.translucentSuperType, isScala2) case tp: AndType => @@ -781,11 +783,11 @@ class TypeErasure(sourceLanguage: SourceLanguage, semiEraseVCs: Boolean, isConst private def eraseArray(tp: Type)(using Context) = { val defn.ArrayOf(elemtp) = tp: @unchecked - if isGenericArrayElement(elemtp, isScala2 = sourceLanguage.isScala2) then + if isGenericArrayElement(elemtp, isScala2 = sourceLanguage.isScala2) then defn.ObjectType else if sourceLanguage.isScala2 && (elemtp.hiBound.isNullType || elemtp.hiBound.isNothingType) then JavaArrayType(defn.ObjectType) - else + else try erasureFn(sourceLanguage, semiEraseVCs = false, isConstructor, isSymbol, inSigName)(elemtp) match case _: WildcardType => WildcardType case elem => JavaArrayType(elem) diff --git a/compiler/src/dotty/tools/dotc/core/TypeErrors.scala b/compiler/src/dotty/tools/dotc/core/TypeErrors.scala index 4761beae8bd0..1b6156ee74e8 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeErrors.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeErrors.scala @@ -32,7 +32,7 @@ abstract class TypeError(using creationContext: Context) extends Exception(""): || ctx.settings.YdebugCyclic.value override def fillInStackTrace(): Throwable = - if computeStackTrace then super.fillInStackTrace().nn + if computeStackTrace then super.fillInStackTrace() else this /** Convert to message. This takes an additional Context, so that we @@ -246,7 +246,7 @@ class UnpicklingError(denot: Denotation, where: String, cause: Throwable)(using case cause: UnpicklingError => "" case _ => if ctx.settings.YdebugUnpickling.value then - cause.getStackTrace().nn.mkString("\n ", "\n ", "") + cause.getStackTrace().mkString("\n ", "\n ", "") else "\n\nRun with -Ydebug-unpickling to see full stack trace." em"""Could not read definition $denot$where. Caused by the following exception: |$cause$debugUnpickling""" diff --git a/compiler/src/dotty/tools/dotc/core/TypeEval.scala b/compiler/src/dotty/tools/dotc/core/TypeEval.scala index 03821ad4812a..b7995b1ffba2 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeEval.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeEval.scala @@ -94,7 +94,7 @@ object TypeEval: val result = try op catch case e: Throwable => - throw TypeError(em"${e.getMessage.nn}") + throw TypeError(em"${e.getMessage}") ConstantType(Constant(result)) def fieldsOf: Option[Type] = diff --git a/compiler/src/dotty/tools/dotc/core/TypeOps.scala b/compiler/src/dotty/tools/dotc/core/TypeOps.scala index a7f41a71d7ce..4909602915d3 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeOps.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeOps.scala @@ -18,8 +18,9 @@ import typer.ForceDegree import typer.Inferencing.* import typer.IfBottom import reporting.TestingReporter +import Annotations.Annotation import cc.{CapturingType, derivedCapturingType, CaptureSet, captureSet, isBoxed, isBoxedCapturing} -import CaptureSet.{CompareResult, IdempotentCaptRefMap, IdentityCaptRefMap} +import CaptureSet.{IdentityCaptRefMap, VarState} import scala.annotation.internal.sharable import scala.annotation.threadUnsafe @@ -56,7 +57,7 @@ object TypeOps: } /** The TypeMap handling the asSeenFrom */ - class AsSeenFromMap(pre: Type, cls: Symbol)(using Context) extends ApproximatingTypeMap, IdempotentCaptRefMap { + class AsSeenFromMap(pre: Type, cls: Symbol)(using Context) extends ApproximatingTypeMap { /** The number of range approximations in invariant or contravariant positions * performed by this TypeMap. @@ -161,7 +162,7 @@ object TypeOps: TypeComparer.lub(simplify(l, theMap), simplify(r, theMap), isSoft = tp.isSoft) case tp @ CapturingType(parent, refs) => if !ctx.mode.is(Mode.Type) - && refs.subCaptures(parent.captureSet, frozen = true).isOK + && refs.subCaptures(parent.captureSet, VarState.Separate) && (tp.isBoxed || !parent.isBoxedCapturing) // fuse types with same boxed status and outer boxed with any type then @@ -180,7 +181,7 @@ object TypeOps: if (normed.exists) simplify(normed, theMap) else mapOver case tp: MethodicType => // See documentation of `Types#simplified` - val addTypeVars = new TypeMap with IdempotentCaptRefMap: + val addTypeVars = new TypeMap: val constraint = ctx.typerState.constraint def apply(t: Type): Type = t match case t: TypeParamRef => constraint.typeVarOfParam(t).orElse(t) @@ -278,7 +279,15 @@ object TypeOps: } case AndType(tp11, tp12) => mergeRefinedOrApplied(tp11, tp2) & mergeRefinedOrApplied(tp12, tp2) - case tp1: TypeParamRef if tp1 == tp2 => tp1 + case tp1: TypeParamRef => + tp2.stripTypeVar match + case tp2: TypeParamRef if tp1 == tp2 => tp1 + case _ => fail + case tp1: TypeVar => + tp2 match + case tp2: TypeVar if tp1 == tp2 => tp1 + case tp2: TypeParamRef if tp1.stripTypeVar == tp2 => tp2 + case _ => fail case _ => fail } } @@ -448,7 +457,7 @@ object TypeOps: } /** An approximating map that drops NamedTypes matching `toAvoid` and wildcard types. */ - abstract class AvoidMap(using Context) extends AvoidWildcardsMap, IdempotentCaptRefMap: + abstract class AvoidMap(using Context) extends AvoidWildcardsMap: @threadUnsafe lazy val localParamRefs = util.HashSet[Type]() def toAvoid(tp: NamedType): Boolean @@ -936,6 +945,28 @@ object TypeOps: class StripTypeVarsMap(using Context) extends TypeMap: def apply(tp: Type) = mapOver(tp).stripTypeVar + /** Map no-flip covariant occurrences of `into[T]` to `T @$into` */ + def suppressInto(using Context) = new FollowAliasesMap: + def apply(t: Type): Type = t match + case AppliedType(tycon: TypeRef, arg :: Nil) if variance >= 0 && defn.isInto(tycon.symbol) => + AnnotatedType(arg, Annotation(defn.SilentIntoAnnot, util.Spans.NoSpan)) + case _: MatchType | _: LazyRef => + t + case _ => + mapFollowingAliases(t) + + /** Map no-flip covariant occurrences of `T @$into` to `into[T]` */ + def revealInto(using Context) = new FollowAliasesMap: + def apply(t: Type): Type = t match + case AnnotatedType(t1, ann) if variance >= 0 && ann.symbol == defn.SilentIntoAnnot => + AppliedType( + defn.ConversionModule.termRef.select(defn.Conversion_into), // the external reference to the opaque type + t1 :: Nil) + case _: MatchType | _: LazyRef => + t + case _ => + mapFollowingAliases(t) + /** Apply [[Type.stripTypeVar]] recursively. */ def stripTypeVars(tp: Type)(using Context): Type = new StripTypeVarsMap().apply(tp) diff --git a/compiler/src/dotty/tools/dotc/core/TypeUtils.scala b/compiler/src/dotty/tools/dotc/core/TypeUtils.scala index 739cc2b74a16..eb526c2b4d85 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeUtils.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeUtils.scala @@ -53,6 +53,11 @@ class TypeUtils: case ps => ps.reduceLeft(AndType(_, _)) } + def widenSkolems(using Context): Type = + val widenSkolemsMap = new TypeMap: + def apply(tp: Type) = mapOver(tp.widenSkolem) + widenSkolemsMap(self) + /** The element types of this tuple type, which can be made up of EmptyTuple, TupleX and `*:` pairs */ def tupleElementTypes(using Context): Option[List[Type]] = @@ -68,7 +73,7 @@ class TypeUtils: def tupleElementTypesUpTo(bound: Int, normalize: Boolean = true)(using Context): Option[List[Type]] = def recur(tp: Type, bound: Int): Option[List[Type]] = if bound < 0 then Some(Nil) - else (if normalize then tp.normalized else tp).dealias match + else (if normalize then tp.dealias.normalized else tp).dealias match case AppliedType(tycon, hd :: tl :: Nil) if tycon.isRef(defn.PairClass) => recur(tl, bound - 1).map(hd :: _) case tp: AppliedType if defn.isTupleNType(tp) && normalize => @@ -134,7 +139,7 @@ class TypeUtils: case t => throw TypeError(em"Malformed NamedTuple: names must be string types, but $t was found.") val values = vals.tupleElementTypesUpTo(bound, normalize).getOrElse(Nil) names.zip(values) - + (if normalize then self.normalized else self).dealias match // for desugaring and printer, ignore derived types to avoid infinite recursion in NamedTuple.unapply case defn.NamedTupleDirect(nmes, vals) => extractNamesTypes(nmes, vals) diff --git a/compiler/src/dotty/tools/dotc/core/Types.scala b/compiler/src/dotty/tools/dotc/core/Types.scala index 7c0c89da97ee..b06bd5c00a28 100644 --- a/compiler/src/dotty/tools/dotc/core/Types.scala +++ b/compiler/src/dotty/tools/dotc/core/Types.scala @@ -39,11 +39,11 @@ import reporting.{trace, Message} import java.lang.ref.WeakReference import compiletime.uninitialized import cc.* -import CaptureSet.{CompareResult, IdempotentCaptRefMap, IdentityCaptRefMap} +import CaptureSet.IdentityCaptRefMap +import Capabilities.* import scala.annotation.internal.sharable import scala.annotation.threadUnsafe -import dotty.tools.dotc.cc.ccConfig object Types extends TypeUtils { @@ -406,16 +406,15 @@ object Types extends TypeUtils { case tp: AndOrType => tp.tp1.unusableForInference || tp.tp2.unusableForInference case tp: LambdaType => tp.resultType.unusableForInference || tp.paramInfos.exists(_.unusableForInference) case WildcardType(optBounds) => optBounds.unusableForInference - case CapturingType(parent, refs) => parent.unusableForInference || refs.elems.exists(_.unusableForInference) + case CapturingType(parent, refs) => parent.unusableForInference || refs.elems.exists(_.coreType.unusableForInference) case _: ErrorType => true case _ => false catch case ex: Throwable => handleRecursive("unusableForInference", show, ex) /** Does the type carry an annotation that is an instance of `cls`? */ - @tailrec final def hasAnnotation(cls: ClassSymbol)(using Context): Boolean = stripTypeVar match { - case AnnotatedType(tp, annot) => (annot matches cls) || (tp hasAnnotation cls) + @tailrec final def hasAnnotation(cls: ClassSymbol)(using Context): Boolean = stripTypeVar match + case AnnotatedType(tp, annot) => annot.matches(cls) || tp.hasAnnotation(cls) case _ => false - } /** Returns the annotation that is an instance of `cls` carried by the type. */ @tailrec final def getAnnotation(cls: ClassSymbol)(using Context): Option[Annotation] = stripTypeVar match { @@ -447,11 +446,34 @@ object Types extends TypeUtils { def isRepeatedParam(using Context): Boolean = typeSymbol eq defn.RepeatedParamClass - /** Is this a parameter type that allows implicit argument converson? */ + /** Is this type of the form `compiletime.into[T]`, which means it can be the + * target of an implicit converson without requiring a language import? + */ def isInto(using Context): Boolean = this match - case AnnotatedType(_, annot) => annot.symbol == defn.IntoParamAnnot + case AppliedType(tycon: TypeRef, arg :: Nil) => defn.isInto(tycon.symbol) case _ => false + /** Is this type a legal target type for an implicit conversion, so that + * no `implicitConversions` language import is necessary? + */ + def isConversionTargetType(using Context): Boolean = + dealias(KeepOpaques).match + case tp: TypeRef => + (tp.symbol.isClass || tp.symbol.isOpaqueAlias) && tp.symbol.is(Into) + case tp @ AppliedType(tycon, _) => + isInto || tycon.isConversionTargetType + case tp: AndOrType => + tp.tp1.isConversionTargetType && tp.tp2.isConversionTargetType + case tp: TypeVar => + false + case tp: MatchType => + val tp1 = tp.reduced + (tp1 ne tp) && tp1.isConversionTargetType + case tp: RefinedType => + tp.parent.isConversionTargetType + case _ => + false + /** Is this the type of a method that has a repeated parameter type as * last parameter type? */ @@ -584,8 +606,8 @@ object Types extends TypeUtils { case AndType(l, r) => val lsym = l.classSymbol val rsym = r.classSymbol - if (lsym isSubClass rsym) lsym - else if (rsym isSubClass lsym) rsym + if lsym.isSubClass(rsym) then lsym + else if rsym.isSubClass(lsym) then rsym else NoSymbol case tp: OrType => if tp.tp1.hasClassSymbol(defn.NothingClass) then @@ -724,7 +746,7 @@ object Types extends TypeUtils { case tp: TypeProxy => tp.superType.findDecl(name, excluded) case err: ErrorType => - newErrorSymbol(classSymbol orElse defn.RootClass, name, err.msg) + newErrorSymbol(classSymbol.orElse(defn.RootClass), name, err.msg) case _ => NoDenotation } @@ -815,7 +837,7 @@ object Types extends TypeUtils { case tp: JavaArrayType => defn.ObjectType.findMember(name, pre, required, excluded) case err: ErrorType => - newErrorSymbol(pre.classSymbol orElse defn.RootClass, name, err.msg) + newErrorSymbol(pre.classSymbol.orElse(defn.RootClass), name, err.msg) case _ => NoDenotation } @@ -861,20 +883,28 @@ object Types extends TypeUtils { pdenot.asSingleDenotation.derivedSingleDenotation(pdenot.symbol, jointInfo) } else - val isRefinedMethod = rinfo.isInstanceOf[MethodOrPoly] - val joint = pdenot.meet( - new JointRefDenotation(NoSymbol, rinfo, Period.allInRun(ctx.runId), pre, isRefinedMethod), - pre, - safeIntersection = ctx.base.pendingMemberSearches.contains(name)) - joint match - case joint: SingleDenotation - if isRefinedMethod - && (rinfo <:< joint.info - || name == nme.apply && defn.isFunctionType(tp.parent)) => - // use `rinfo` to keep the right parameter names for named args. See i8516.scala. - joint.derivedSingleDenotation(joint.symbol, rinfo, pre, isRefinedMethod) - case _ => - joint + val overridingRefinement = rinfo match + case AnnotatedType(rinfo1, ann) if ann.symbol == defn.RefineOverrideAnnot => rinfo1 + case _ if pdenot.symbol.is(Tracked) => rinfo + case _ => NoType + if overridingRefinement.exists then + pdenot.asSingleDenotation.derivedSingleDenotation(pdenot.symbol, overridingRefinement) + else + val isRefinedMethod = rinfo.isInstanceOf[MethodOrPoly] + val joint = CCState.withCollapsedFresh: + pdenot.meet( + new JointRefDenotation(NoSymbol, rinfo, Period.allInRun(ctx.runId), pre, isRefinedMethod), + pre, + safeIntersection = ctx.base.pendingMemberSearches.contains(name)) + joint match + case joint: SingleDenotation + if isRefinedMethod + && (rinfo <:< joint.info + || name == nme.apply && defn.isFunctionType(tp.parent)) => + // use `rinfo` to keep the right parameter names for named args. See i8516.scala. + joint.derivedSingleDenotation(joint.symbol, rinfo, pre, isRefinedMethod) + case _ => + joint } def goApplied(tp: AppliedType, tycon: HKTypeLambda) = @@ -912,7 +942,7 @@ object Types extends TypeUtils { // member in Super instead of Sub. // As an example of this in the wild, see // loadClassWithPrivateInnerAndSubSelf in ShowClassTests - go(tp.cls.typeRef) orElse d + go(tp.cls.typeRef).orElse(d) def goParam(tp: TypeParamRef) = { val next = tp.underlying @@ -1121,7 +1151,7 @@ object Types extends TypeUtils { TypeComparer.topLevelSubType(this, that) } - /** Is this type a subtype of that type? */ + /** Is this type a subtype of that type without adding to the constraint? */ final def frozen_<:<(that: Type)(using Context): Boolean = { record("frozen_<:<") TypeComparer.isSubTypeWhenFrozen(this, that) @@ -1150,7 +1180,7 @@ object Types extends TypeUtils { false def relaxed_<:<(that: Type)(using Context): Boolean = - (this <:< that) || (this isValueSubType that) + (this <:< that) || this.isValueSubType(that) /** Is this type a legal type for member `sym1` that overrides another * member `sym2` of type `that`? This is the same as `<:<`, except that @@ -1160,10 +1190,9 @@ object Types extends TypeUtils { * * @param isSubType a function used for checking subtype relationships. */ - final def overrides(that: Type, matchLoosely: => Boolean, checkClassInfo: Boolean = true, - isSubType: (Type, Type) => Context ?=> Boolean = (tp1, tp2) => tp1 frozen_<:< tp2)(using Context): Boolean = { + final def overrides(that: Type, matchLoosely: => Boolean, checkClassInfo: Boolean = true)(using Context): Boolean = { !checkClassInfo && this.isInstanceOf[ClassInfo] - || isSubType(this.widenExpr, that.widenExpr) + || (this.widenExpr frozen_<:< that.widenExpr) || matchLoosely && { val this1 = this.widenNullaryMethod val that1 = that.widenNullaryMethod @@ -1201,10 +1230,10 @@ object Types extends TypeUtils { * vice versa. */ def matchesLoosely(that: Type)(using Context): Boolean = - (this matches that) || { + this.matches(that) || { val thisResult = this.widenExpr val thatResult = that.widenExpr - (this eq thisResult) != (that eq thatResult) && (thisResult matchesLoosely thatResult) + (this eq thisResult) != (that eq thatResult) && thisResult.matchesLoosely(thatResult) } /** The basetype of this type with given class symbol, NoType if `base` is not a class. */ @@ -1465,48 +1494,48 @@ object Types extends TypeUtils { case Atoms.Unknown => Atoms.Unknown case _ => Atoms.Unknown - private def dealias1(keep: AnnotatedType => Context ?=> Boolean, keepOpaques: Boolean)(using Context): Type = this match { + def dealias(keeps: Keeps)(using Context): Type = this match case tp: TypeRef => - if (tp.symbol.isClass) tp - else tp.info match { - case TypeAlias(alias) if !(keepOpaques && tp.symbol.is(Opaque)) => - alias.dealias1(keep, keepOpaques) + if tp.symbol.isClass then tp + else tp.info match + case TypeAlias(alias) if (keeps & KeepOpaques) == 0 || !tp.symbol.is(Opaque) => + alias.dealias(keeps) case _ => tp - } case app @ AppliedType(tycon, _) => - val tycon1 = tycon.dealias1(keep, keepOpaques) - if (tycon1 ne tycon) app.superType.dealias1(keep, keepOpaques) + val tycon1 = tycon.dealias(keeps) + if tycon1 ne tycon then app.superType.dealias(keeps) else this case tp: TypeVar => val tp1 = tp.instanceOpt - if (tp1.exists) tp1.dealias1(keep, keepOpaques) else tp + if tp1.exists then tp1.dealias(keeps) else tp case tp: AnnotatedType => - val parent1 = tp.parent.dealias1(keep, keepOpaques) - if keep(tp) then tp.derivedAnnotatedType(parent1, tp.annot) + val parent1 = tp.parent.dealias(keeps) + if (keeps & KeepAnnots) != 0 + || (keeps & KeepRefiningAnnots) != 0 && tp.isRefining + then tp.derivedAnnotatedType(parent1, tp.annot) else tp match case tp @ CapturingType(parent, refs) => tp.derivedCapturingType(parent1, refs) case _ => parent1 case tp: LazyRef => - tp.ref.dealias1(keep, keepOpaques) + tp.ref.dealias(keeps) case _ => this - } /** Follow aliases and dereference LazyRefs, annotated types and instantiated * TypeVars until type is no longer alias type, annotated type, LazyRef, * or instantiated type variable. */ - final def dealias(using Context): Type = dealias1(keepNever, keepOpaques = false) + final def dealias(using Context): Type = dealias(KeepNothing) /** Follow aliases and dereference LazyRefs and instantiated TypeVars until type * is no longer alias type, LazyRef, or instantiated type variable. * Goes through annotated types and rewraps annotations on the result. */ - final def dealiasKeepAnnots(using Context): Type = dealias1(keepAlways, keepOpaques = false) + final def dealiasKeepAnnots(using Context): Type = dealias(KeepAnnots) /** Like `dealiasKeepAnnots`, but keeps only refining annotations */ - final def dealiasKeepRefiningAnnots(using Context): Type = dealias1(keepIfRefining, keepOpaques = false) + final def dealiasKeepRefiningAnnots(using Context): Type = dealias(KeepRefiningAnnots) /** Like dealias, but does not follow aliases if symbol is Opaque. This is * necessary if we want to look at the info of a symbol containing opaque @@ -1524,13 +1553,13 @@ object Types extends TypeUtils { * Here, we dealias symbol infos at the start of capture checking in operation `fluidify`. * We have to be careful not to accidentally reveal opaque aliases when doing so. */ - final def dealiasKeepOpaques(using Context): Type = dealias1(keepNever, keepOpaques = true) + final def dealiasKeepOpaques(using Context): Type = dealias(KeepOpaques) /** Like dealiasKeepAnnots, but does not follow opaque aliases. See `dealiasKeepOpaques` * for why this is sometimes necessary. */ final def dealiasKeepAnnotsAndOpaques(using Context): Type = - dealias1(keepAlways, keepOpaques = true) + dealias(KeepAnnots | KeepOpaques) /** Approximate this type with a type that does not contain skolem types. */ final def deskolemized(using Context): Type = @@ -1562,19 +1591,18 @@ object Types extends TypeUtils { case tp: AppliedType => tp.underlyingNormalizable case _ => NoType - private def widenDealias1(keep: AnnotatedType => Context ?=> Boolean)(using Context): Type = { - val res = this.widen.dealias1(keep, keepOpaques = false) - if (res eq this) res else res.widenDealias1(keep) - } + private def widenDealias(keeps: Keeps)(using Context): Type = + val tp1 = widen.dealias(keeps) + if tp1 eq this then this else tp1.widenDealias(keeps) /** Perform successive widenings and dealiasings until none can be applied anymore */ - final def widenDealias(using Context): Type = widenDealias1(keepNever) + final def widenDealias(using Context): Type = widenDealias(KeepNothing) /** Perform successive widenings and dealiasings while rewrapping annotations, until none can be applied anymore */ - final def widenDealiasKeepAnnots(using Context): Type = widenDealias1(keepAlways) + final def widenDealiasKeepAnnots(using Context): Type = widenDealias(KeepAnnots) /** Perform successive widenings and dealiasings while rewrapping refining annotations, until none can be applied anymore */ - final def widenDealiasKeepRefiningAnnots(using Context): Type = widenDealias1(keepIfRefining) + final def widenDealiasKeepRefiningAnnots(using Context): Type = widenDealias(KeepRefiningAnnots) /** Widen from constant type to its underlying non-constant * base type. @@ -1624,7 +1652,7 @@ object Types extends TypeUtils { def underlyingIterator(using Context): Iterator[Type] = new Iterator[Type] { var current = Type.this var hasNext = true - def next = { + def next() = { val res = current hasNext = current.isInstanceOf[TypeProxy] if (hasNext) current = current.asInstanceOf[TypeProxy].underlying @@ -1862,7 +1890,7 @@ object Types extends TypeUtils { * no symbol it tries `member` as an alternative. */ def typeParamNamed(name: TypeName)(using Context): Symbol = - classSymbol.unforcedDecls.lookup(name) orElse member(name).symbol + classSymbol.unforcedDecls.lookup(name).orElse(member(name).symbol) /** If this is a prototype with some ignored component, reveal one more * layer of it. Otherwise the type itself. @@ -1961,8 +1989,7 @@ object Types extends TypeUtils { } defn.FunctionNOf( mt.paramInfos.mapConserve: - _.translateFromRepeated(toArray = isJava) - .mapIntoAnnot(defn.IntoParamAnnot, null), + _.translateFromRepeated(toArray = isJava), result1, isContextual) if mt.hasErasedParams then defn.PolyFunctionOf(mt) @@ -2001,47 +2028,15 @@ object Types extends TypeUtils { def annotatedToRepeated(using Context): Type = this match { case tp @ ExprType(tp1) => tp.derivedExprType(tp1.annotatedToRepeated) - case self @ AnnotatedType(tp, annot) if annot matches defn.RetainsByNameAnnot => + case self @ AnnotatedType(tp, annot) if annot.matches(defn.RetainsByNameAnnot) => self.derivedAnnotatedType(tp.annotatedToRepeated, annot) - case AnnotatedType(tp, annot) if annot matches defn.RepeatedAnnot => + case AnnotatedType(tp, annot) if annot.matches(defn.RepeatedAnnot) => val typeSym = tp.typeSymbol.asClass assert(typeSym == defn.SeqClass || typeSym == defn.ArrayClass) tp.translateParameterized(typeSym, defn.RepeatedParamClass) case _ => this } - /** A mapping between mapping one kind of into annotation to another or - * dropping into annotations. - * @param from the into annotation to map - * @param to either the replacement annotation symbol, or `null` - * in which case the `from` annotations are dropped. - */ - def mapIntoAnnot(from: ClassSymbol, to: ClassSymbol | Null)(using Context): Type = this match - case self @ AnnotatedType(tp, annot) => - val tp1 = tp.mapIntoAnnot(from, to) - if annot.symbol == from then - if to == null then tp1 - else AnnotatedType(tp1, Annotation(to, annot.tree.span)) - else self.derivedAnnotatedType(tp1, annot) - case AppliedType(tycon, arg :: Nil) if tycon.typeSymbol == defn.RepeatedParamClass => - val arg1 = arg.mapIntoAnnot(from, to) - if arg1 eq arg then this - else AppliedType(tycon, arg1 :: Nil) - case defn.FunctionOf(argTypes, resType, isContextual) => - val resType1 = resType.mapIntoAnnot(from, to) - if resType1 eq resType then this - else defn.FunctionOf(argTypes, resType1, isContextual) - case RefinedType(parent, rname, mt: MethodOrPoly) => - val mt1 = mt.mapIntoAnnot(from, to) - if mt1 eq mt then this - else RefinedType(parent.mapIntoAnnot(from, to), rname, mt1) - case mt: MethodOrPoly => - mt.derivedLambdaType(resType = mt.resType.mapIntoAnnot(from, to)) - case tp: ExprType => - tp.derivedExprType(tp.resType.mapIntoAnnot(from, to)) - case _ => - this - /** The set of distinct symbols referred to by this type, after all aliases are expanded */ def coveringSet(using Context): Set[Symbol] = (new CoveringSetAccumulator).apply(Set.empty[Symbol], this) @@ -2699,9 +2694,9 @@ object Types extends TypeUtils { */ final def controlled[T](op: => T)(using Context): T = try { ctx.base.underlyingRecursions += 1 - if (ctx.base.underlyingRecursions < Config.LogPendingUnderlyingThreshold) + if ctx.base.underlyingRecursions < Config.LogPendingUnderlyingThreshold then op - else if (ctx.pendingUnderlying contains this) + else if ctx.pendingUnderlying.contains(this) then throw CyclicReference(symbol) else try { @@ -2906,7 +2901,7 @@ object Types extends TypeUtils { */ abstract case class TermRef(override val prefix: Type, private var myDesignator: Designator) - extends NamedType, ImplicitRef, SingletonCaptureRef { + extends NamedType, ImplicitRef, SingletonType, ObjectCapability { type ThisType = TermRef type ThisName = TermName @@ -2935,7 +2930,7 @@ object Types extends TypeUtils { abstract case class TypeRef(override val prefix: Type, private var myDesignator: Designator) - extends NamedType, CaptureRef { + extends NamedType, SetCapability { type ThisType = TypeRef type ThisName = TypeName @@ -3071,7 +3066,7 @@ object Types extends TypeUtils { * do not survive runs whereas typerefs do. */ abstract case class ThisType(tref: TypeRef) - extends CachedProxyType, SingletonCaptureRef { + extends CachedProxyType, SingletonType, ObjectCapability { def cls(using Context): ClassSymbol = tref.stableInRunSymbol match { case cls: ClassSymbol => cls case _ if ctx.mode.is(Mode.Interactive) => defn.AnyClass // was observed to happen in IDE mode @@ -3458,8 +3453,8 @@ object Types extends TypeUtils { val bcs1set = BaseClassSet(bcs1) def recur(bcs2: List[ClassSymbol]): List[ClassSymbol] = bcs2 match { case bc2 :: bcs2rest => - if (bcs1set contains bc2) - if (bc2.is(Trait)) recur(bcs2rest) + if bcs1set.contains(bc2) then + if bc2.is(Trait) then recur(bcs2rest) else bcs1 // common class, therefore rest is the same in both sequences else bc2 :: recur(bcs2rest) case nil => bcs1 @@ -3555,9 +3550,8 @@ object Types extends TypeUtils { val bcs1set = BaseClassSet(bcs1) def recur(bcs2: List[ClassSymbol]): List[ClassSymbol] = bcs2 match { case bc2 :: bcs2rest => - if (bcs1set contains bc2) - if (bc2.is(Trait)) bc2 :: recur(bcs2rest) - else bcs2 + if bcs1set.contains(bc2) then + if bc2.is(Trait) then bc2 :: recur(bcs2rest) else bcs2 else recur(bcs2rest) case nil => bcs2 @@ -3717,7 +3711,8 @@ object Types extends TypeUtils { // is that most poly types are cyclic via poly params, // and therefore two different poly types would never be equal. - trait MethodicType extends TermType + trait MethodicType extends TermType: + def resType: Type /** A by-name parameter type of the form `=> T`, or the type of a method with no parameter list. */ abstract case class ExprType(resType: Type) @@ -3816,9 +3811,51 @@ object Types extends TypeUtils { def integrate(tparams: List[ParamInfo], tp: Type)(using Context): Type = (tparams: @unchecked) match { case LambdaParam(lam, _) :: _ => tp.subst(lam, this) // This is where the precondition is necessary. - case params: List[Symbol @unchecked] => tp.subst(params, paramRefs) + case params: List[Symbol @unchecked] => IntegrateMap(params, paramRefs)(tp) } + /** A map that replaces references to symbols in `params` by the types in + * `paramRefs`. + * + * It is similar to [[Substituters#subst]] but avoids reloading denotations + * of named types by overriding `derivedSelect`. + * + * This is needed because during integration, [[TermParamRef]]s refer to a + * [[LambdaType]] that is not yet fully constructed, in particular for wich + * `paramInfos` is `null`. In that case all [[TermParamRef]]s have + * [[NoType]] as underlying type. Reloading denotions of selections + * involving such [[TermParamRef]]s in [[NamedType#withPrefix]] could then + * result in a [[NoDenotation]], which would make later disambiguation of + * overloads impossible. See `tests/pos/annot-17242.scala` for example. + */ + private class IntegrateMap(from: List[Symbol], to: List[Type])(using Context) extends TypeMap: + override def apply(tp: Type) = + // Same implementation as in `SubstMap`, except the `derivedSelect` in + // the `NamedType` case, and the default case that just calls `mapOver`. + tp match + case tp: NamedType => + val sym = tp.symbol + var fs = from + var ts = to + while (fs.nonEmpty && ts.nonEmpty) { + if (fs.head eq sym) return ts.head + fs = fs.tail + ts = ts.tail + } + if (tp.prefix `eq` NoPrefix) tp + else derivedSelect(tp, apply(tp.prefix)) + case _: BoundType | _: ThisType => tp + case _ => mapOver(tp) + + override final def derivedSelect(tp: NamedType, pre: Type): Type = + if tp.prefix eq pre then tp + else + pre match + case ref: ParamRef if (ref.binder eq self) && tp.symbol.exists && tp.symbol.is(Method) => + NamedType(pre, tp.name, tp.denot.asSeenFrom(pre)) + case _ => + tp.derivedSelect(pre) + final def derivedLambdaType(paramNames: List[ThisName] = this.paramNames, paramInfos: List[PInfo] = this.paramInfos, resType: Type = this.resType)(using Context): This = @@ -3945,11 +3982,11 @@ object Types extends TypeUtils { override def resultType(using Context): Type = if (dependencyStatus == FalseDeps) { // dealias all false dependencies - val dealiasMap = new TypeMap with IdentityCaptRefMap { + object dealiasMap extends TypeMap with IdentityCaptRefMap { def apply(tp: Type) = tp match { case tp @ TypeRef(pre, _) => tp.info match { - case TypeAlias(alias) if depStatus(NoDeps, pre) == TrueDeps => apply(alias) + case TypeAlias(alias) if depStatus(NoDeps, pre, forParams = false) == TrueDeps => apply(alias) case _ => mapOver(tp) } case _ => @@ -3963,7 +4000,7 @@ object Types extends TypeUtils { private var myDependencyStatus: DependencyStatus = Unknown private var myParamDependencyStatus: DependencyStatus = Unknown - private def depStatus(initial: DependencyStatus, tp: Type)(using Context): DependencyStatus = + private def depStatus(initial: DependencyStatus, tp: Type, forParams: Boolean)(using Context): DependencyStatus = class DepAcc extends TypeAccumulator[DependencyStatus]: def apply(status: DependencyStatus, tp: Type) = compute(status, tp, this) def combine(x: DependencyStatus, y: DependencyStatus) = @@ -3992,11 +4029,13 @@ object Types extends TypeUtils { case tp: AnnotatedType => tp match case CapturingType(parent, refs) => - (compute(status, parent, theAcc) /: refs.elems) { + val status1 = (compute(status, parent, theAcc) /: refs.elems): (s, ref) => ref.stripReach match - case tp: TermParamRef if tp.binder eq thisLambdaType => combine(s, CaptureDeps) - case tp => combine(s, compute(status, tp, theAcc)) - } + case tp: TermParamRef if tp.binder eq thisLambdaType => combine(s, TrueDeps) + case tp => combine(s, compute(status, tp.coreType, theAcc)) + if refs.isConst || forParams // We assume capture set variables in parameters don't generate param dependencies + then status1 + else combine(status1, Provisional) case _ => if tp.annot.refersToParamOf(thisLambdaType) then TrueDeps else compute(status, tp.parent, theAcc) @@ -4020,7 +4059,7 @@ object Types extends TypeUtils { private def dependencyStatus(using Context): DependencyStatus = if (myDependencyStatus != Unknown) myDependencyStatus else { - val result = depStatus(NoDeps, resType) + val result = depStatus(NoDeps, resType, forParams = false) if ((result & Provisional) == 0) myDependencyStatus = result (result & StatusMask).toByte } @@ -4033,7 +4072,7 @@ object Types extends TypeUtils { else { val result = if (paramInfos.isEmpty) NoDeps - else paramInfos.tail.foldLeft(NoDeps)(depStatus(_, _)) + else paramInfos.tail.foldLeft(NoDeps)(depStatus(_, _, forParams = true)) if ((result & Provisional) == 0) myParamDependencyStatus = result (result & StatusMask).toByte } @@ -4042,34 +4081,33 @@ object Types extends TypeUtils { * which cannot be eliminated by de-aliasing? */ def isResultDependent(using Context): Boolean = - dependencyStatus == TrueDeps || dependencyStatus == CaptureDeps + dependencyStatus == TrueDeps /** Does one of the parameter types contain references to earlier parameters * of this method type which cannot be eliminated by de-aliasing? */ def isParamDependent(using Context): Boolean = - paramDependencyStatus == TrueDeps || paramDependencyStatus == CaptureDeps + paramDependencyStatus == TrueDeps - /** Is there a dependency involving a reference in a capture set, but - * otherwise no true result dependency? - */ - def isCaptureDependent(using Context) = dependencyStatus == CaptureDeps + /** Like isResultDependent, but without attempt to eliminate dependencies with de-aliasing */ + def looksResultDependent(using Context): Boolean = + (dependencyStatus & StatusMask) != NoDeps + + /** Like isParamDependent, but without attempt to eliminate dependencies with de-aliasing */ + def looksParamDependent(using Context): Boolean = + (paramDependencyStatus & StatusMask) != NoDeps def newParamRef(n: Int): TermParamRef = new TermParamRefImpl(this, n) /** The least supertype of `resultType` that does not contain parameter dependencies */ def nonDependentResultApprox(using Context): Type = if isResultDependent then - val dropDependencies = new ApproximatingTypeMap with IdempotentCaptRefMap { + object dropDependencies extends ApproximatingTypeMap { def apply(tp: Type) = tp match { case tp @ TermParamRef(`thisLambdaType`, _) => range(defn.NothingType, atVariance(1)(apply(tp.underlying))) case CapturingType(_, _) => mapOver(tp) - case ReachCapability(tp1) => - apply(tp1) match - case tp1a: CaptureRef if tp1a.isTrackableRef => tp1a.reach - case _ => defn.captureRoot.termRef case AnnotatedType(parent, ann) if ann.refersToParamOf(thisLambdaType) => val parent1 = mapOver(parent) if ann.symbol.isRetainsLike then @@ -4080,9 +4118,19 @@ object Types extends TypeUtils { parent1 case _ => mapOver(tp) } + override def mapCapability(c: Capability, deep: Boolean = false): Capability | (CaptureSet, Boolean) = c match + case Reach(c1) => + apply(c1) match + case tp1a: ObjectCapability if tp1a.isTrackableRef => tp1a.reach + case _ => GlobalCap + case _ => super.mapCapability(c, deep) } dropDependencies(resultType) else resultType + + /** Are all parameter names synthetic? */ + def allParamNamesSynthetic = paramNames.zipWithIndex.forall: (name, i) => + name == nme.syntheticParamName(i) } abstract case class MethodType(paramNames: List[TermName])( @@ -4112,7 +4160,6 @@ object Types extends TypeUtils { def nonErasedParamCount(using Context): Int = paramInfos.count(p => !p.hasAnnotation(defn.ErasedParamAnnot)) - protected def prefixString: String = companion.prefixString } @@ -4164,27 +4211,25 @@ object Types extends TypeUtils { /** Produce method type from parameter symbols, with special mappings for repeated * and inline parameters: - * - replace @repeated annotations on Seq or Array types by types + * - replace `@repeated` annotations on Seq or Array types by types * - map into annotations to $into annotations - * - add @inlineParam to inline parameters - * - add @erasedParam to erased parameters - * - wrap types of parameters that have an @allowConversions annotation with Into[_] + * - add `@inlineParam` to inline parameters + * - add `@erasedParam` to erased parameters + * - map `T @$into` types to `into[T]` */ def fromSymbols(params: List[Symbol], resultType: Type)(using Context): MethodType = apply(params.map(_.name.asTermName))( tl => params.map(p => tl.integrate(params, adaptParamInfo(p))), tl => tl.integrate(params, resultType)) - /** Adapt info of parameter symbol to be integhrated into corresponding MethodType + /** Adapt info of parameter symbol to be integrated into corresponding MethodType * using the scheme described in `fromSymbols`. */ def adaptParamInfo(param: Symbol, pinfo: Type)(using Context): Type = def addAnnotation(tp: Type, cls: ClassSymbol, param: Symbol): Type = tp match case ExprType(resType) => ExprType(addAnnotation(resType, cls, param)) case _ => AnnotatedType(tp, Annotation(cls, param.span)) - var paramType = pinfo - .annotatedToRepeated - .mapIntoAnnot(defn.IntoAnnot, defn.IntoParamAnnot) + var paramType = TypeOps.revealInto(pinfo).annotatedToRepeated if param.is(Inline) then paramType = addAnnotation(paramType, defn.InlineParamAnnot, param) if param.is(Erased) then @@ -4206,6 +4251,35 @@ object Types extends TypeUtils { } mt } + + /** Not safe to use in general: Check that all references to an enclosing + * TermParamRef name point to that TermParamRef + */ + def checkValid2(mt: MethodType)(using Context): mt.type = { + var t = new TypeTraverser: + val ps = mt.paramNames.zip(mt.paramRefs).toMap + def traverse(t: Type) = + t match + case CapturingType(p, refs) => + def checkRefs(refs: CaptureSet) = + for elem <- refs.elems do + elem match + case elem: TermParamRef => + val elemName = elem.binder.paramNames(elem.paramNum) + //assert(elemName.toString != "f") + ps.get(elemName) match + case Some(elemRef) => assert(elemRef eq elem, i"bad $mt") + case _ => + case ResultCap(binder: MethodType) if binder ne mt => + assert(binder.paramNames.toList != mt.paramNames.toList, i"bad $mt") + case _ => + checkRefs(refs) + traverse(p) + case _ => + traverseChildren(t) + t.traverse(mt.resType) + mt + } } object MethodType extends MethodTypeCompanion("MethodType") { @@ -4461,8 +4535,7 @@ object Types extends TypeUtils { final val Unknown: DependencyStatus = 0 // not yet computed final val NoDeps: DependencyStatus = 1 // no dependent parameters found final val FalseDeps: DependencyStatus = 2 // all dependent parameters are prefixes of non-depended alias types - final val CaptureDeps: DependencyStatus = 3 // dependencies in capture sets under captureChecking, otherwise only false dependencoes - final val TrueDeps: DependencyStatus = 4 // some truly dependent parameters exist + final val TrueDeps: DependencyStatus = 3 // some truly dependent parameters exist final val StatusMask: DependencyStatus = 7 // the bits indicating actual dependency status final val Provisional: DependencyStatus = 8 // set if dependency status can still change due to type variable instantiations } @@ -4703,7 +4776,7 @@ object Types extends TypeUtils { override def hashIsStable: Boolean = false } - abstract class ParamRef extends BoundType { + abstract class ParamRef extends BoundType, CoreCapability { type BT <: LambdaType def paramNum: Int def paramName: binder.ThisName = binder.paramNames(paramNum) @@ -4738,7 +4811,7 @@ object Types extends TypeUtils { * refer to `TermParamRef(binder, paramNum)`. */ abstract case class TermParamRef(binder: TermLambda, paramNum: Int) - extends ParamRef, SingletonCaptureRef { + extends ParamRef, SingletonType, ObjectCapability { type BT = TermLambda def kindString: String = "Term" def copyBoundType(bt: BT): Type = bt.paramRefs(paramNum) @@ -4750,7 +4823,7 @@ object Types extends TypeUtils { * refer to `TypeParamRef(binder, paramNum)`. */ abstract case class TypeParamRef(binder: TypeLambda, paramNum: Int) - extends ParamRef, CaptureRef { + extends ParamRef, SetCapability { type BT = TypeLambda def kindString: String = "Type" def copyBoundType(bt: BT): Type = bt.paramRefs(paramNum) @@ -5707,7 +5780,7 @@ object Types extends TypeUtils { // ----- Annotated and Import types ----------------------------------------------- /** An annotated type tpe @ annot */ - abstract case class AnnotatedType(parent: Type, annot: Annotation) extends CachedProxyType, CaptureRef { + abstract case class AnnotatedType(parent: Type, annot: Annotation) extends CachedProxyType, ValueType { override def underlying(using Context): Type = parent @@ -5744,11 +5817,11 @@ object Types extends TypeUtils { parent.hashIsStable override def eql(that: Type): Boolean = that match - case that: AnnotatedType => (parent eq that.parent) && (annot eql that.annot) + case that: AnnotatedType => (parent eq that.parent) && annot.eql(that.annot) case _ => false override def iso(that: Any, bs: BinderPairs): Boolean = that match - case that: AnnotatedType => parent.equals(that.parent, bs) && (annot eql that.annot) + case that: AnnotatedType => parent.equals(that.parent, bs) && annot.eql(that.annot) case _ => false } @@ -6066,33 +6139,38 @@ object Types extends TypeUtils { trait BiTypeMap extends TypeMap: thisMap => + /** Hook to control behavior on capture set variables. + * If true, install the map on capture set variables so that future elements are also mapped. + * If false, just map the elements currently present in the capture set variable. + */ + def mapFutureElements: Boolean = true + /** The inverse of the type map */ def inverse: BiTypeMap - /** A restriction of this map to a function on tracked CaptureRefs */ - def forward(ref: CaptureRef): CaptureRef = - val result = this(ref) - def ensureTrackable(tp: Type): CaptureRef = tp match - /* Issue #22437: handle case when info is not yet available during postProcess in CC setup */ - case tp: (TypeParamRef | TermRef) if tp.underlying == NoType => - tp - case tp: CaptureRef => - if tp.isTrackableRef then tp - else ensureTrackable(tp.underlying) - case tp: TypeAlias => - ensureTrackable(tp.alias) - case _ => - assert(false, i"not a trackable captureRef ref: $result, ${result.underlyingIterator.toList}") - ensureTrackable(result) + /** A restriction of this map to a function on tracked Capabilities */ + override def mapCapability(c: Capability, deep: Boolean): Capability = + super.mapCapability(c, deep) match + case c1: Capability => c1 + case (cs, _) => assert(false, i"bimap $toString should map $c to a capability, but result = $cs") + + /** Fuse with another map */ + def fuse(next: BiTypeMap)(using Context): Option[TypeMap] = None - /** A restriction of the inverse to a function on tracked CaptureRefs */ - def backward(ref: CaptureRef): CaptureRef = inverse(ref) match - /* Ensure bijection for issue #22437 fix in method forward above: */ - case result: (TypeParamRef | TermRef) if result.underlying == NoType => - result - case result: CaptureRef if result.isTrackableRef => result end BiTypeMap + /** A typemap that follows aliases and keeps their transformed results if + * there is a change. + */ + trait FollowAliasesMap(using Context) extends TypeMap: + def mapFollowingAliases(t: Type): Type = + val t1 = t.dealiasKeepAnnots + if t1 ne t then + val t2 = apply(t1) + if t2 ne t1 then t2 + else t + else mapOver(t) + abstract class TypeMap(implicit protected var mapCtx: Context) extends VariantTraversal with (Type => Type) { thisMap => @@ -6171,6 +6249,44 @@ object Types extends TypeUtils { try derivedCapturingType(tp, this(parent), refs.map(this)) finally variance = saved + def toTrackableRef(tp: Type): Capability | Null = tp match + case CapturingType(_) => + null + case tp: CoreCapability => + if tp.isTrackableRef then tp + else toTrackableRef(tp.underlying) + case tp: TypeAlias => + toTrackableRef(tp.alias) + case _ => + null + + def mapCapability(c: Capability, deep: Boolean = false): Capability | (CaptureSet, Boolean) = c match + case c: RootCapability => c + case Reach(c1) => + mapCapability(c1, deep = true) + case ReadOnly(c1) => + assert(!deep) + mapCapability(c1) match + case c2: Capability => c2.readOnly + case (cs: CaptureSet, exact) => (cs.readOnly, exact) + case Maybe(c1) => + assert(!deep) + mapCapability(c1) match + case c2: Capability => c2.maybe + case (cs: CaptureSet, exact) => (cs.maybe, exact) + case ref: CoreCapability => + val tp1 = apply(ref) + val ref1 = toTrackableRef(tp1) + if ref1 != null then + if deep then ref1.reach + else ref1 + else + val isLiteral = tp1.typeSymbol == defn.Caps_CapSet + val cs = + if deep && !isLiteral then CaptureSet.ofTypeDeeply(tp1) + else CaptureSet.ofType(tp1, followResult = false) + (cs, isLiteral) + /** Utility method. Maps the supertype of a type proxy. Returns the * type proxy itself if the mapping leaves the supertype unchanged. * This avoids needless changes in mapped types. @@ -6297,14 +6413,7 @@ object Types extends TypeUtils { } } - private def treeTypeMap = new TreeTypeMap( - typeMap = this, - // Using `ConservativeTreeCopier` is needed when copying dependent annoted - // types, where we can refer to a previous parameter represented as - // `TermParamRef` that has no underlying type yet. - // See tests/pos/annot-17242.scala. - cpy = ConservativeTreeCopier() - ) + private def treeTypeMap = new TreeTypeMap(typeMap = this) def mapOver(syms: List[Symbol]): List[Symbol] = mapSymbols(syms, treeTypeMap) @@ -6341,7 +6450,7 @@ object Types extends TypeUtils { tp.derivedClassInfo(prefix1, parents1, tp.decls, selfInfo1) end DeepTypeMap - @sharable object IdentityTypeMap extends TypeMap()(NoContext) { + @sharable object IdentityTypeMap extends TypeMap()(using NoContext) { def apply(tp: Type): Type = tp } @@ -6751,7 +6860,8 @@ object Types extends TypeUtils { foldOver(x2, tp.cases) case CapturingType(parent, refs) => - (this(x, parent) /: refs.elems)(this) + (this(x, parent) /: refs.elems): (x, elem) => + this(x, elem.coreType) case AnnotatedType(underlying, annot) => this(applyToAnnot(x, annot), underlying) @@ -6814,7 +6924,7 @@ object Types extends TypeUtils { def maybeAdd(xs: List[NamedType], tp: NamedType): List[NamedType] = if p(tp) then tp :: xs else xs val seen = util.HashSet[Type]() def apply(xs: List[NamedType], tp: Type): List[NamedType] = - if seen contains tp then xs + if seen.contains(tp) then xs else seen += tp tp match @@ -6993,7 +7103,7 @@ object Types extends TypeUtils { object fieldFilter extends NameFilter { def apply(pre: Type, name: Name)(using Context): Boolean = - name.isTermName && (pre member name).hasAltWith(!_.symbol.is(Method)) + name.isTermName && pre.member(name).hasAltWith(!_.symbol.is(Method)) def isStable = true } @@ -7011,6 +7121,15 @@ object Types extends TypeUtils { def isStable = true } + // ----- Dealias keep flags -------------------------------------------- + + private type Keeps = Int + + private val KeepNothing = 0 + private val KeepAnnots = 1 + private val KeepRefiningAnnots = 2 + private val KeepOpaques = 4 + // ----- Debug --------------------------------------------------------- @sharable var debugTrace: Boolean = false diff --git a/compiler/src/dotty/tools/dotc/core/classfile/AbstractFileReader.scala b/compiler/src/dotty/tools/dotc/core/classfile/AbstractFileReader.scala index 46581d00714e..d4f781e22126 100644 --- a/compiler/src/dotty/tools/dotc/core/classfile/AbstractFileReader.scala +++ b/compiler/src/dotty/tools/dotc/core/classfile/AbstractFileReader.scala @@ -3,8 +3,6 @@ package dotc package core package classfile -import scala.language.unsafeNulls - import java.lang.Float.intBitsToFloat import java.lang.Double.longBitsToDouble import java.io.{ByteArrayInputStream, DataInputStream} diff --git a/compiler/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala b/compiler/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala index cfbdc854a88f..a1a4d56abb15 100644 --- a/compiler/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala +++ b/compiler/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala @@ -369,27 +369,21 @@ class ClassfileParser( * Updates the read pointer of 'in'. */ def parseParents: List[Type] = { val superType = - if (classRoot.symbol == defn.ComparableClass || - classRoot.symbol == defn.JavaCloneableClass || - classRoot.symbol == defn.JavaSerializableClass) { - // Treat these interfaces as universal traits - in.nextChar + val superClass = in.nextChar + // Treat these interfaces as universal traits + if classRoot.symbol == defn.ComparableClass + || classRoot.symbol == defn.JavaCloneableClass + || classRoot.symbol == defn.JavaSerializableClass + then defn.AnyType - } else - pool.getSuperClass(in.nextChar).typeRef + pool.getSuperClass(superClass).typeRef val ifaceCount = in.nextChar - var ifaces = for (i <- (0 until ifaceCount).toList) yield pool.getSuperClass(in.nextChar).typeRef - // Dotty deviation: was - // var ifaces = for (i <- List.range(0, ifaceCount)) ... - // This does not typecheck because the type parameter of List is now lower-bounded by Int | Char. - // Consequently, no best implicit for the "Integral" evidence parameter of "range" - // is found. Previously, this worked because of weak conformance, which has been dropped. - + val ifaces = List.fill(ifaceCount.toInt): + pool.getSuperClass(in.nextChar).typeRef superType :: ifaces } - val result = unpickleOrParseInnerClasses() if (!result.isDefined) { var classInfo: Type = TempClassInfoType(parseParents, instanceScope, classRoot.symbol) @@ -408,8 +402,8 @@ class ClassfileParser( moduleRoot.setPrivateWithin(privateWithin) moduleRoot.sourceModule.setPrivateWithin(privateWithin) - for (i <- 0 until in.nextChar) parseMember(method = false) - for (i <- 0 until in.nextChar) parseMember(method = true) + for (_ <- 0 until in.nextChar) parseMember(method = false) + for (_ <- 0 until in.nextChar) parseMember(method = true) classRoot.registerCompanion(moduleRoot.symbol) moduleRoot.registerCompanion(classRoot.symbol) diff --git a/compiler/src/dotty/tools/dotc/core/tasty/AttributeUnpickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/AttributeUnpickler.scala index 43a2bea27216..c5770d69b002 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/AttributeUnpickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/AttributeUnpickler.scala @@ -1,7 +1,6 @@ package dotty.tools.dotc package core.tasty -import scala.language.unsafeNulls import scala.collection.immutable.BitSet import scala.collection.immutable.TreeMap diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TastyPrinter.scala b/compiler/src/dotty/tools/dotc/core/tasty/TastyPrinter.scala index 72f6895f122c..df3ac411989b 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TastyPrinter.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TastyPrinter.scala @@ -53,14 +53,14 @@ object TastyPrinter: else if arg.endsWith(".tasty") || (allowBetasty && arg.endsWith(".betasty")) then val path = Paths.get(arg) if Files.exists(path) then - printTasty(arg, Files.readAllBytes(path).nn, arg.endsWith(".betasty")) + printTasty(arg, Files.readAllBytes(path), arg.endsWith(".betasty")) else println("File not found: " + arg) System.exit(1) else if arg.endsWith(".jar") then val jar = JarArchive.open(Path(arg), create = false) try - for file <- jar.iterator() if file.hasTastyExtension do + for file <- jar.iterator if file.hasTastyExtension do printTasty(s"$arg ${file.path}", file.toByteArray, isBestEffortTasty = false) finally jar.close() else @@ -123,7 +123,7 @@ class TastyPrinter(bytes: Array[Byte], isBestEffortTasty: Boolean, val testPickl unpickle0(new PositionSectionUnpickler(sb)) unpickle0(new CommentSectionUnpickler(sb)) unpickle0(new AttributesSectionUnpickler(sb)) - sb.result + sb.result() } def unpickle0[R](sec: PrinterSectionUnpickler[R])(using NameRefs): Option[R] = @@ -266,7 +266,7 @@ class TastyPrinter(bytes: Array[Byte], isBestEffortTasty: Boolean, val testPickl val value = nameAtRef(utf8Ref).toString sb.append(nameStr(s" ${utf8Ref.index} [$value]")) sb.append("\n") - sb.result + sb.result() } } @@ -295,7 +295,7 @@ class TastyPrinter(bytes: Array[Byte], isBestEffortTasty: Boolean, val testPickl for ((_, nameRef) <- sources.iterator) { buf += nameRef } - NameRefs(buf.result) + NameRefs(buf.result()) } } diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala index cf9885d16d1f..4e63c7e973fe 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala @@ -716,8 +716,13 @@ class TreePickler(pickler: TastyPickler, attributes: Attributes) { if passesConditionForErroringBestEffortCode(tree.hasType) then pickleType(tree.tpe) else pickleErrorType() case SingletonTypeTree(ref) => - writeByte(SINGLETONtpt) - pickleTree(ref) + val tp = ref.tpe + val tp1 = tp.deskolemized + if tp1 ne tp then + pickleType(tp1) + else + writeByte(SINGLETONtpt) + pickleTree(ref) case RefinedTypeTree(parent, refinements) => if (refinements.isEmpty) pickleTree(parent) else { @@ -852,6 +857,9 @@ class TreePickler(pickler: TastyPickler, attributes: Attributes) { if (flags.is(ParamAccessor) && sym.isTerm && !sym.isSetter) flags = flags &~ ParamAccessor // we only generate a tag for parameter setters pickleFlags(flags, sym.isTerm) + if flags.is(Into) then + // Temporary measure until we can change TastyFormat to include an INTO tag + pickleAnnotation(sym, mdef, Annotation(defn.SilentIntoAnnot, util.Spans.NoSpan)) val annots = sym.annotations.foreach(pickleAnnotation(sym, mdef, _)) } @@ -958,7 +966,7 @@ class TreePickler(pickler: TastyPickler, attributes: Attributes) { val it = mp.keysIterator var i = 0 while i < keys.length do - keys(i) = it.next + keys(i) = it.next() i += 1 assert(!it.hasNext) i = 0 diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala index d6f2812dad0d..766f6019faf9 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala @@ -667,7 +667,11 @@ class TreeUnpickler(reader: TastyReader, } val annotOwner = if sym.owner.isClass then newLocalDummy(sym.owner) else sym.owner - val annots = annotFns.map(_(annotOwner)) + var annots = annotFns.map(_(annotOwner)) + if annots.exists(_.symbol == defn.SilentIntoAnnot) then + // Temporary measure until we can change TastyFormat to include an INTO tag + sym.setFlag(Into) + annots = annots.filterNot(_.symbol == defn.SilentIntoAnnot) sym.annotations = annots if sym.isOpaqueAlias then sym.setFlag(Deferred) val isScala2MacroDefinedInScala3 = flags.is(Macro, butNot = Inline) && flags.is(Erased) @@ -933,7 +937,7 @@ class TreeUnpickler(reader: TastyReader, DefDef(paramDefss, tpt) case VALDEF => val tpt = readTpt()(using localCtx) - sym.info = tpt.tpe + sym.info = tpt.tpe.suppressIntoIfParam(sym) ValDef(tpt) case TYPEDEF | TYPEPARAM => if (sym.isClass) { @@ -978,7 +982,7 @@ class TreeUnpickler(reader: TastyReader, case PARAM => val tpt = readTpt()(using localCtx) assert(nothingButMods(end)) - sym.info = tpt.tpe + sym.info = tpt.tpe.suppressIntoIfParam(sym) ValDef(tpt) } goto(end) diff --git a/compiler/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala b/compiler/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala index 25245f5ca1b6..e97e73dc5760 100644 --- a/compiler/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala +++ b/compiler/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala @@ -63,9 +63,15 @@ object Scala2Unpickler { denot.info = PolyType.fromParams(denot.owner.typeParams, denot.info) } - def ensureConstructor(cls: ClassSymbol, clsDenot: ClassDenotation, scope: Scope)(using Context): Unit = { - if (scope.lookup(nme.CONSTRUCTOR) == NoSymbol) { - val constr = newDefaultConstructor(cls) + def ensureConstructor(cls: ClassSymbol, clsDenot: ClassDenotation, scope: Scope)(using Context): Unit = + doEnsureConstructor(cls, clsDenot, scope, fromScala2 = true) + + private def doEnsureConstructor(cls: ClassSymbol, clsDenot: ClassDenotation, scope: Scope, fromScala2: Boolean) + (using Context): Unit = + if scope.lookup(nme.CONSTRUCTOR) == NoSymbol then + val constr = + if fromScala2 || cls.isAllOf(Trait | JavaDefined) then newDefaultConstructor(cls) + else newConstructor(cls, Private, paramNames = Nil, paramTypes = Nil) // Scala 2 traits have a constructor iff they have initialization code // In dotc we represent that as !StableRealizable, which is also owner.is(NoInits) if clsDenot.flagsUNSAFE.is(Trait) then @@ -73,8 +79,6 @@ object Scala2Unpickler { clsDenot.setFlag(NoInits) addConstructorTypeParams(constr) cls.enter(constr, scope) - } - } def setClassInfo(denot: ClassDenotation, info: Type, fromScala2: Boolean, selfInfo: Type = NoType)(using Context): Unit = { val cls = denot.classSymbol @@ -108,7 +112,7 @@ object Scala2Unpickler { if (tsym.exists) tsym.setFlag(TypeParam) else denot.enter(tparam, decls) } - if (!denot.flagsUNSAFE.isAllOf(JavaModule)) ensureConstructor(cls, denot, decls) + if (!denot.flagsUNSAFE.isAllOf(JavaModule)) doEnsureConstructor(cls, denot, decls, fromScala2) val scalacCompanion = denot.classSymbol.scalacLinkedClass diff --git a/compiler/src/dotty/tools/dotc/coverage/Coverage.scala b/compiler/src/dotty/tools/dotc/coverage/Coverage.scala index 3061bfa4ee5c..7df2e503e3f4 100644 --- a/compiler/src/dotty/tools/dotc/coverage/Coverage.scala +++ b/compiler/src/dotty/tools/dotc/coverage/Coverage.scala @@ -19,7 +19,7 @@ class Coverage: def addStatement(stmt: Statement): Unit = statementsById(stmt.id) = stmt - def removeStatementsFromFile(sourcePath: Path) = + def removeStatementsFromFile(sourcePath: Path | Null) = val removedIds = statements.filter(_.location.sourcePath == sourcePath).map(_.id.toLong) removedIds.foreach(statementsById.remove) diff --git a/compiler/src/dotty/tools/dotc/coverage/Location.scala b/compiler/src/dotty/tools/dotc/coverage/Location.scala index de1a3db710a3..de8eb7a607db 100644 --- a/compiler/src/dotty/tools/dotc/coverage/Location.scala +++ b/compiler/src/dotty/tools/dotc/coverage/Location.scala @@ -46,5 +46,5 @@ object Location: s"$packageName.$className", classType, methodName, - source.file.absolute.jpath + source.file.absolute.jpath.nn ) diff --git a/compiler/src/dotty/tools/dotc/coverage/Serializer.scala b/compiler/src/dotty/tools/dotc/coverage/Serializer.scala index 26efa8934e00..de9c29965ded 100644 --- a/compiler/src/dotty/tools/dotc/coverage/Serializer.scala +++ b/compiler/src/dotty/tools/dotc/coverage/Serializer.scala @@ -3,7 +3,6 @@ package coverage import java.nio.file.{Path, Paths, Files} import java.io.Writer -import scala.language.unsafeNulls import scala.collection.mutable.StringBuilder /** diff --git a/compiler/src/dotty/tools/dotc/inlines/Inliner.scala b/compiler/src/dotty/tools/dotc/inlines/Inliner.scala index 92cae663352a..047ab80e6b0f 100644 --- a/compiler/src/dotty/tools/dotc/inlines/Inliner.scala +++ b/compiler/src/dotty/tools/dotc/inlines/Inliner.scala @@ -96,6 +96,15 @@ object Inliner: } end isElideableExpr + // InlineCopier is a more fault-tolerant copier that does not cause errors when + // function types in applications are undefined. This is necessary since we copy at + // the same time as establishing the proper context in which the copied tree should + // be evaluated. This matters for opaque types, see neg/i14653.scala. + private class InlineCopier() extends TypedTreeCopier: + override def Apply(tree: Tree)(fun: Tree, args: List[Tree])(using Context): Apply = + if fun.tpe.widen.exists then super.Apply(tree)(fun, args) + else untpd.cpy.Apply(tree)(fun, args).withTypeUnchecked(tree.tpe) + // InlinerMap is a TreeTypeMap with special treatment for inlined arguments: // They are generally left alone (not mapped further, and if they wrap a type // the type Inlined wrapper gets dropped. @@ -108,13 +117,7 @@ object Inliner: substFrom: List[Symbol], substTo: List[Symbol])(using Context) extends TreeTypeMap( - typeMap, treeMap, oldOwners, newOwners, substFrom, substTo, - // It is necessary to use the `ConservativeTreeCopier` since we copy at - // the same time as establishing the proper context in which the copied - // tree should be evaluated. This matters for opaque types, see - // neg/i14653.scala. - ConservativeTreeCopier() - ): + typeMap, treeMap, oldOwners, newOwners, substFrom, substTo, InlineCopier()): override def transform(tree: Tree)(using Context): Tree = tree match @@ -162,28 +165,9 @@ object Inliner: else Nil case _ => Nil val refinements = openOpaqueAliases(cls.givenSelfType) - - // Map references in the refinements from the proxied termRef - // to the recursive type of the refined type - // e.g.: Obj.type{type A = Obj.B; type B = Int} -> Obj.type{type A = .B; type B = Int} - def mapRecTermRefReferences(recType: RecType, refinedType: Type) = - new TypeMap { - def apply(tp: Type) = tp match - case RefinedType(a: RefinedType, b, info) => RefinedType(apply(a), b, apply(info)) - case RefinedType(a, b, info) => RefinedType(a, b, apply(info)) - case TypeRef(prefix, des) => TypeRef(apply(prefix), des) - case termRef: TermRef if termRef == ref => recType.recThis - case _ => mapOver(tp) - }.apply(refinedType) - val refinedType = refinements.foldLeft(ref: Type): (parent, refinement) => RefinedType(parent, refinement._1, TypeAlias(refinement._2)) - - val recType = RecType.closeOver ( recType => - mapRecTermRefReferences(recType, refinedType) - ) - - val refiningSym = newSym(InlineBinderName.fresh(), Synthetic, recType, span) + val refiningSym = newSym(InlineBinderName.fresh(), Synthetic, refinedType, span) refiningSym.termRef def unapply(refiningRef: TermRef)(using Context): Option[TermRef] = @@ -402,9 +386,6 @@ class Inliner(val call: tpd.Tree)(using Context): */ private val opaqueProxies = new mutable.ListBuffer[(TermRef, TermRef)] - /** TermRefs for which we already started synthesising proxies */ - private val visitedTermRefs = new mutable.HashSet[TermRef] - protected def hasOpaqueProxies = opaqueProxies.nonEmpty /** Map first halves of opaqueProxies pairs to second halves, using =:= as equality */ @@ -432,15 +413,12 @@ class Inliner(val call: tpd.Tree)(using Context): for cls <- ref.widen.baseClasses do if cls.containsOpaques && (forThisProxy || inlinedMethod.isContainedIn(cls)) - && !visitedTermRefs.contains(ref) + && mapRef(ref).isEmpty then - visitedTermRefs += ref val refiningRef = OpaqueProxy(ref, cls, call.span) val refiningSym = refiningRef.symbol.asTerm val refinedType = refiningRef.info - val refiningDef = addProxiesForRecurrentOpaques( - ValDef(refiningSym, tpd.ref(ref).cast(refinedType), inferred = true).withSpan(span) - ) + val refiningDef = ValDef(refiningSym, tpd.ref(ref).cast(refinedType), inferred = true).withSpan(span) inlining.println(i"add opaque alias proxy $refiningDef for $ref in $tp") bindingsBuf += refiningDef opaqueProxies += ((ref, refiningSym.termRef)) @@ -460,27 +438,6 @@ class Inliner(val call: tpd.Tree)(using Context): } ) - /** Transforms proxies that reference other opaque types, like for: - * object Obj1 { opaque type A = Int } - * object Obj2 { opaque type B = A } - * and proxy$1 of type Obj2.type{type B = Obj1.A} - * creates proxy$2 of type Obj1.type{type A = Int} - * and transforms proxy$1 into Obj2.type{type B = proxy$2.A} - */ - private def addProxiesForRecurrentOpaques(binding: ValDef)(using Context): ValDef = - def fixRefinedTypes(ref: Type): Unit = - ref match - case recType: RecType => fixRefinedTypes(recType.underlying) - case RefinedType(parent, name, info) => - addOpaqueProxies(info.widen, binding.span, true) - fixRefinedTypes(parent) - case _ => - fixRefinedTypes(binding.symbol.info) - binding.symbol.info = mapOpaques.typeMap(binding.symbol.info) - mapOpaques.transform(binding).asInstanceOf[ValDef] - .showing(i"transformed this binding exposing opaque aliases: $result", inlining) - end addProxiesForRecurrentOpaques - /** If `binding` contains TermRefs that refer to objects with opaque * type aliases, add proxy definitions that expose these aliases * and substitute such TermRefs with theproxies. Example from pos/opaque-inline1.scala: @@ -726,7 +683,7 @@ class Inliner(val call: tpd.Tree)(using Context): // call. This way, a defensively written rewrite method can always // report bad inputs at the point of call instead of revealing its internals. val callToReport = if (enclosingInlineds.nonEmpty) enclosingInlineds.last else call - val ctxToReport = ctx.outersIterator.dropWhile(enclosingInlineds(using _).nonEmpty).next + val ctxToReport = ctx.outersIterator.dropWhile(enclosingInlineds(using _).nonEmpty).next() // The context in which we report should still use the existing context reporter val ctxOrigReporter = ctxToReport.fresh.setReporter(ctx.reporter) inContext(ctxOrigReporter) { diff --git a/compiler/src/dotty/tools/dotc/inlines/Inlines.scala b/compiler/src/dotty/tools/dotc/inlines/Inlines.scala index 85bea871b955..a7269c83bccb 100644 --- a/compiler/src/dotty/tools/dotc/inlines/Inlines.scala +++ b/compiler/src/dotty/tools/dotc/inlines/Inlines.scala @@ -395,6 +395,11 @@ object Inlines: case ConstantType(Constant(code: String)) => val unitName = "tasty-reflect" val source2 = SourceFile.virtual(unitName, code) + def compilationUnits(untpdTree: untpd.Tree, tpdTree: Tree): List[CompilationUnit] = + val compilationUnit = CompilationUnit(unitName, code) + compilationUnit.tpdTree = tpdTree + compilationUnit.untpdTree = untpdTree + List(compilationUnit) // We need a dummy owner, as the actual one does not have a computed denotation yet, // but might be inspected in a transform phase, leading to cyclic errors val dummyOwner = newSymbol(ctx.owner, "$dummySymbol$".toTermName, Private, defn.AnyType, NoSymbol) @@ -407,31 +412,30 @@ object Inlines: .withOwner(dummyOwner) inContext(newContext) { - val tree2 = new Parser(source2).block() - if ctx.reporter.allErrors.nonEmpty then + def noErrors = ctx.reporter.allErrors.isEmpty + val untpdTree = new Parser(source2).block() + if !noErrors then ctx.reporter.allErrors.map((ErrorKind.Parser, _)) else - val tree3 = ctx.typer.typed(tree2) + val tpdTree1 = ctx.typer.typed(untpdTree) ctx.base.postTyperPhase match - case postTyper: PostTyper if ctx.reporter.allErrors.isEmpty => - val tree4 = atPhase(postTyper) { postTyper.newTransformer.transform(tree3) } + case postTyper: PostTyper if noErrors => + val tpdTree2 = + atPhase(postTyper) { postTyper.runOn(compilationUnits(untpdTree, tpdTree1)).head.tpdTree } ctx.base.setRootTreePhase match - case setRootTree => - val tree5 = - val compilationUnit = CompilationUnit(unitName, code) - compilationUnit.tpdTree = tree4 - compilationUnit.untpdTree = tree2 - var units = List(compilationUnit) - atPhase(setRootTree)(setRootTree.runOn(units).head.tpdTree) + case setRootTree if noErrors => // might be noPhase, if -Yretain-trees is not used + val tpdTree3 = + atPhase(setRootTree)(setRootTree.runOn(compilationUnits(untpdTree, tpdTree2)).head.tpdTree) ctx.base.inliningPhase match - case inlining: Inlining if ctx.reporter.allErrors.isEmpty => - val tree6 = atPhase(inlining) { inlining.newTransformer.transform(tree5) } - if ctx.reporter.allErrors.isEmpty && reconstructedTransformPhases.nonEmpty then - var transformTree = tree6 + case inlining: Inlining if noErrors => + val tpdTree4 = atPhase(inlining) { inlining.newTransformer.transform(tpdTree3) } + if noErrors && reconstructedTransformPhases.nonEmpty then + var transformTree = tpdTree4 for phase <- reconstructedTransformPhases do - if ctx.reporter.allErrors.isEmpty then + if noErrors then transformTree = atPhase(phase.end + 1)(phase.transformUnit(transformTree)) case _ => + case _ => case _ => ctx.reporter.allErrors.map((ErrorKind.Typer, _)) } diff --git a/compiler/src/dotty/tools/dotc/interactive/Completion.scala b/compiler/src/dotty/tools/dotc/interactive/Completion.scala index e59a8e0b882d..b2dab9606a9d 100644 --- a/compiler/src/dotty/tools/dotc/interactive/Completion.scala +++ b/compiler/src/dotty/tools/dotc/interactive/Completion.scala @@ -48,6 +48,16 @@ case class Completion(label: String, description: String, symbols: List[Symbol]) object Completion: + def scopeContext(pos: SourcePosition)(using Context): CompletionResult = + val tpdPath = Interactive.pathTo(ctx.compilationUnit.tpdTree, pos.span) + val completionContext = Interactive.contextOfPath(tpdPath).withPhase(Phases.typerPhase) + inContext(completionContext): + val untpdPath = Interactive.resolveTypedOrUntypedPath(tpdPath, pos) + val mode = completionMode(untpdPath, pos, forSymbolSearch = true) + val rawPrefix = completionPrefix(untpdPath, pos) + val completer = new Completer(mode, pos, untpdPath, _ => true) + completer.scopeCompletions + /** Get possible completions from tree at `pos` * * @return offset and list of symbols for possible completions @@ -60,7 +70,6 @@ object Completion: val mode = completionMode(untpdPath, pos) val rawPrefix = completionPrefix(untpdPath, pos) val completions = rawCompletions(pos, mode, rawPrefix, tpdPath, untpdPath) - postProcessCompletions(untpdPath, completions, rawPrefix) /** Get possible completions from tree at `pos` @@ -89,7 +98,7 @@ object Completion: * * Otherwise, provide no completion suggestion. */ - def completionMode(path: List[untpd.Tree], pos: SourcePosition): Mode = path match + def completionMode(path: List[untpd.Tree], pos: SourcePosition, forSymbolSearch: Boolean = false): Mode = path match // Ignore `package foo@@` and `package foo.bar@@` case ((_: tpd.Select) | (_: tpd.Ident)):: (_ : tpd.PackageDef) :: _ => Mode.None case GenericImportSelector(sel) => @@ -97,14 +106,19 @@ object Completion: else if sel.isGiven && sel.bound.span.contains(pos.span) then Mode.ImportOrExport else Mode.None // import scala.{util => u@@} case GenericImportOrExport(_) => Mode.ImportOrExport | Mode.Scope // import TrieMa@@ + case untpd.InterpolatedString(_, untpd.Literal(Constants.Constant(_: String)) :: _) :: _ => + Mode.Term | Mode.Scope case untpd.Literal(Constants.Constant(_: String)) :: _ => Mode.Term | Mode.Scope // literal completions case (ref: untpd.RefTree) :: _ => val maybeSelectMembers = if ref.isInstanceOf[untpd.Select] then Mode.Member else Mode.Scope - - if (ref.name.isTermName) Mode.Term | maybeSelectMembers + if (forSymbolSearch) then Mode.Term | Mode.Type | maybeSelectMembers + else if (ref.name.isTermName) Mode.Term | maybeSelectMembers else if (ref.name.isTypeName) Mode.Type | maybeSelectMembers else Mode.None + case (_: tpd.TypeTree | _: tpd.MemberDef) :: _ if forSymbolSearch => Mode.Type | Mode.Term + case (_: tpd.CaseDef) :: _ if forSymbolSearch => Mode.Type | Mode.Term + case Nil if forSymbolSearch => Mode.Type | Mode.Term case _ => Mode.None /** When dealing with in varios palces we check to see if they are @@ -171,6 +185,14 @@ object Completion: case (importOrExport: untpd.ImportOrExport) :: _ => Some(importOrExport) case _ => None + private object StringContextApplication: + def unapply(path: List[tpd.Tree]): Option[tpd.Apply] = + path match + case tpd.Select(qual @ tpd.Apply(tpd.Select(tpd.Select(_, StdNames.nme.StringContext), _), _), _) :: _ => + Some(qual) + case _ => None + + /** Inspect `path` to determine the offset where the completion result should be inserted. */ def completionOffset(untpdPath: List[untpd.Tree]): Int = untpdPath match @@ -220,11 +242,12 @@ object Completion: val result = adjustedPath match // Ignore synthetic select from `This` because in code it was `Ident` // See example in dotty.tools.languageserver.CompletionTest.syntheticThis - case tpd.Select(qual @ tpd.This(_), _) :: _ if qual.span.isSynthetic => completer.scopeCompletions - case tpd.Select(qual, _) :: _ if qual.typeOpt.hasSimpleKind => completer.selectionCompletions(qual) - case tpd.Select(qual, _) :: _ => Map.empty + case tpd.Select(qual @ tpd.This(_), _) :: _ if qual.span.isSynthetic => completer.scopeCompletions.names + case StringContextApplication(qual) => + completer.scopeCompletions.names ++ completer.selectionCompletions(qual) + case tpd.Select(qual, _) :: _ => completer.selectionCompletions(qual) case (tree: tpd.ImportOrExport) :: _ => completer.directMemberCompletions(tree.expr) - case _ => completer.scopeCompletions + case _ => completer.scopeCompletions.names interactiv.println(i"""completion info with pos = $pos, | term = ${completer.mode.is(Mode.Term)}, @@ -325,6 +348,7 @@ object Completion: (completionMode.is(Mode.Term) && (sym.isTerm || sym.is(ModuleClass)) || (completionMode.is(Mode.Type) && (sym.isType || sym.isStableMember))) ) + end isValidCompletionSymbol given ScopeOrdering(using Context): Ordering[Seq[SingleDenotation]] with val order = @@ -344,7 +368,7 @@ object Completion: * For the results of all `xyzCompletions` methods term names and type names are always treated as different keys in the same map * and they never conflict with each other. */ - class Completer(val mode: Mode, pos: SourcePosition, untpdPath: List[untpd.Tree], matches: Name => Boolean): + class Completer(val mode: Mode, pos: SourcePosition, untpdPath: List[untpd.Tree], matches: Name => Boolean)(using Context): /** Completions for terms and types that are currently in scope: * the members of the current class, local definitions and the symbols that have been imported, * recursively adding completions from outer scopes. @@ -358,7 +382,7 @@ object Completion: * (even if the import follows it syntactically) * - a more deeply nested import shadowing a member or a local definition causes an ambiguity */ - def scopeCompletions(using context: Context): CompletionMap = + lazy val scopeCompletions: CompletionResult = /** Temporary data structure representing denotations with the same name introduced in a given scope * as a member of a type, by a local definition or by an import clause @@ -369,14 +393,19 @@ object Completion: ScopedDenotations(denots.filter(includeFn), ctx) val mappings = collection.mutable.Map.empty[Name, List[ScopedDenotations]].withDefaultValue(List.empty) + val renames = collection.mutable.Map.empty[Symbol, Name] def addMapping(name: Name, denots: ScopedDenotations) = mappings(name) = mappings(name) :+ denots ctx.outersIterator.foreach { case ctx @ given Context => if ctx.isImportContext then - importedCompletions.foreach { (name, denots) => + val imported = importedCompletions + imported.names.foreach { (name, denots) => addMapping(name, ScopedDenotations(denots, ctx, include(_, name))) } + imported.renames.foreach { (name, newName) => + renames(name) = newName + } else if ctx.owner.isClass then accessibleMembers(ctx.owner.thisType) .groupByName.foreach { (name, denots) => @@ -420,7 +449,6 @@ object Completion: // most deeply nested member or local definition if not shadowed by an import case Some(local) if local.ctx.scope == first.ctx.scope => resultMappings += name -> local.denots - case None if isSingleImport || isImportedInDifferentScope || isSameSymbolImportedDouble => resultMappings += name -> first.denots case None if notConflictingWithDefaults => @@ -430,7 +458,7 @@ object Completion: } } - resultMappings + CompletionResult(resultMappings, renames.toMap) end scopeCompletions /** Widen only those types which are applied or are exactly nothing @@ -448,17 +476,16 @@ object Completion: def selectionCompletions(qual: tpd.Tree)(using Context): CompletionMap = val adjustedQual = widenQualifier(qual) - val implicitConversionMembers = implicitConversionMemberCompletions(adjustedQual) - val extensionMembers = extensionCompletions(adjustedQual) - val directMembers = directMemberCompletions(adjustedQual) - val namedTupleMembers = namedTupleCompletions(adjustedQual) + if qual.symbol.is(Package) then + directMemberCompletions(adjustedQual) + else if qual.typeOpt.hasSimpleKind then + implicitConversionMemberCompletions(adjustedQual) ++ + extensionCompletions(adjustedQual) ++ + directMemberCompletions(adjustedQual) ++ + namedTupleCompletions(adjustedQual) + else + Map.empty - List( - implicitConversionMembers, - extensionMembers, - directMembers, - namedTupleMembers - ).reduce(_ ++ _) /** Completions for members of `qual`'s type. * These include inherited definitions but not members added by extensions or implicit conversions @@ -472,15 +499,20 @@ object Completion: /** Completions introduced by imports directly in this context. * Completions from outer contexts are not included. */ - private def importedCompletions(using Context): CompletionMap = + private def importedCompletions(using Context): CompletionResult = val imp = ctx.importInfo + val renames = collection.mutable.Map.empty[Symbol, Name] if imp == null then - Map.empty + CompletionResult(Map.empty, Map.empty) else def fromImport(name: Name, nameInScope: Name): Seq[(Name, SingleDenotation)] = imp.site.member(name).alternatives - .collect { case denot if include(denot, nameInScope) => nameInScope -> denot } + .collect { case denot if include(denot, nameInScope) => + if name != nameInScope then + renames(denot.symbol) = nameInScope + nameInScope -> denot + } val givenImports = imp.importedImplicits .map { ref => (ref.implicitName: Name, ref.underlyingRef.denot.asSingleDenotation) } @@ -506,7 +538,8 @@ object Completion: fromImport(original.toTypeName, nameInScope.toTypeName) }.toSeq.groupByName - givenImports ++ wildcardMembers ++ explicitMembers + val results = givenImports ++ wildcardMembers ++ explicitMembers + CompletionResult(results, renames.toMap) end importedCompletions /** Completions from implicit conversions including old style extensions using implicit classes */ @@ -583,8 +616,7 @@ object Completion: // There are four possible ways for an extension method to be applicable // 1. The extension method is visible under a simple name, by being defined or inherited or imported in a scope enclosing the reference. - val termCompleter = new Completer(Mode.Term, pos, untpdPath, matches) - val extMethodsInScope = termCompleter.scopeCompletions.toList.flatMap: + val extMethodsInScope = scopeCompletions.names.toList.flatMap: case (name, denots) => denots.collect: case d: SymDenotation if d.isTerm && d.termRef.symbol.is(Extension) => (d.termRef, name.asTermName) @@ -686,6 +718,7 @@ object Completion: private type CompletionMap = Map[Name, Seq[SingleDenotation]] + case class CompletionResult(names: Map[Name, Seq[SingleDenotation]], renames: Map[Symbol, Name]) /** * The completion mode: defines what kinds of symbols should be included in the completion * results. diff --git a/compiler/src/dotty/tools/dotc/parsing/JavaScanners.scala b/compiler/src/dotty/tools/dotc/parsing/JavaScanners.scala index 3f993195e4f3..64f586ca0410 100644 --- a/compiler/src/dotty/tools/dotc/parsing/JavaScanners.scala +++ b/compiler/src/dotty/tools/dotc/parsing/JavaScanners.scala @@ -666,7 +666,7 @@ object JavaScanners { val limit: Double = if (token == DOUBLELIT) Double.MaxValue else Float.MaxValue try { - val value: Double = java.lang.Double.valueOf(strVal.toString).nn.doubleValue() + val value: Double = java.lang.Double.valueOf(strVal.toString).doubleValue() if (value > limit) error(em"floating point number too large") if (negated) -value else value diff --git a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala index f87f0d957325..c607d0377d83 100644 --- a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala +++ b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala @@ -68,8 +68,8 @@ object Parsers { this == Given || this == ExtensionFollow def acceptsVariance = this == Class || this == CaseClass || this == Hk - def acceptsCtxBounds = - !(this == Type || this == Hk) + def acceptsCtxBounds(using Context) = + !(this == Type || this == Hk) || (sourceVersion.enablesNewGivens && this == Type) def acceptsWildcard = this == Type || this == Hk @@ -78,9 +78,6 @@ object Parsers { enum ParseKind: case Expr, Type, Pattern - enum IntoOK: - case Yes, No, Nested - type StageKind = Int object StageKind { val None = 0 @@ -227,6 +224,7 @@ object Parsers { def isNumericLit = numericLitTokens contains in.token def isTemplateIntro = templateIntroTokens contains in.token def isDclIntro = dclIntroTokens contains in.token + def isDclIntroNext = dclIntroTokens contains in.lookahead.token def isStatSeqEnd = in.isNestedEnd || in.token == EOF || in.token == RPAREN def mustStartStat = mustStartStatTokens contains in.token @@ -720,7 +718,7 @@ object Parsers { def checkNextNotIndented(): Unit = if in.isNewLine then val nextIndentWidth = in.indentWidth(in.next.offset) - if in.currentRegion.indentWidth < nextIndentWidth then + if in.currentRegion.indentWidth < nextIndentWidth && in.currentRegion.closedBy == OUTDENT then warning(em"Line is indented too far to the right, or a `{` or `:` is missing", in.next.offset) /* -------- REWRITES ----------------------------------------------------------- */ @@ -732,16 +730,17 @@ object Parsers { def testChar(idx: Int, p: Char => Boolean): Boolean = { val txt = source.content - idx < txt.length && p(txt(idx)) + idx >= 0 && idx < txt.length && p(txt(idx)) } def testChar(idx: Int, c: Char): Boolean = { val txt = source.content - idx < txt.length && txt(idx) == c + idx >= 0 && idx < txt.length && txt(idx) == c } def testChars(from: Int, str: String): Boolean = - str.isEmpty || + str.isEmpty + || testChar(from, str.head) && testChars(from + 1, str.tail) def skipBlanks(idx: Int, step: Int = 1): Int = @@ -1330,16 +1329,6 @@ object Parsers { */ def qualId(): Tree = dotSelectors(termIdent()) - /** Singleton ::= SimpleRef - * | SimpleLiteral - * | Singleton ‘.’ id - * -- not yet | Singleton ‘(’ Singletons ‘)’ - * -- not yet | Singleton ‘[’ Types ‘]’ - */ - def singleton(): Tree = - if isSimpleLiteral then simpleLiteral() - else dotSelectors(simpleRef()) - /** SimpleLiteral ::= [‘-’] integerLiteral * | [‘-’] floatingPointLiteral * | booleanLiteral @@ -1590,8 +1579,8 @@ object Parsers { /** Same as [[typ]], but if this results in a wildcard it emits a syntax error and * returns a tree for type `Any` instead. */ - def toplevelTyp(intoOK: IntoOK = IntoOK.No, inContextBound: Boolean = false): Tree = - rejectWildcardType(typ(intoOK, inContextBound)) + def toplevelTyp(inContextBound: Boolean = false): Tree = + rejectWildcardType(typ(inContextBound)) private def getFunction(tree: Tree): Option[Function] = tree match { case Parens(tree1) => getFunction(tree1) @@ -1600,31 +1589,39 @@ object Parsers { case _ => None } - /** CaptureRef ::= { SimpleRef `.` } SimpleRef [`*`] - * | [ { SimpleRef `.` } SimpleRef `.` ] id `^` + /** CaptureRef ::= { SimpleRef `.` } SimpleRef [`*`] [`.` `rd`] -- under captureChecking */ def captureRef(): Tree = - val ref = dotSelectors(simpleRef()) - if isIdent(nme.raw.STAR) then - in.nextToken() - atSpan(startOffset(ref)): - PostfixOp(ref, Ident(nme.CC_REACH)) - else if isIdent(nme.UPARROW) then + + def derived(ref: Tree, name: TermName) = in.nextToken() - atSpan(startOffset(ref)): - convertToTypeId(ref) match - case ref: RefTree => makeCapsOf(ref) - case ref => ref - else ref + atSpan(startOffset(ref)) { PostfixOp(ref, Ident(name)) } + + def recur(ref: Tree): Tree = + if in.token == DOT then + in.nextToken() + if in.isIdent(nme.rd) then derived(ref, nme.CC_READONLY) + else recur(selector(ref)) + else if in.isIdent(nme.raw.STAR) then + val reachRef = derived(ref, nme.CC_REACH) + if in.token == DOT && in.lookahead.isIdent(nme.rd) then + in.nextToken() + derived(reachRef, nme.CC_READONLY) + else reachRef + else ref + + recur(simpleRef()) + end captureRef /** CaptureSet ::= `{` CaptureRef {`,` CaptureRef} `}` -- under captureChecking */ - def captureSet(): List[Tree] = inBraces { - if in.token == RBRACE then Nil else commaSeparated(captureRef) - } + def captureSet(): List[Tree] = + inBraces { + if in.token == RBRACE then Nil else commaSeparated(captureRef) + } def capturesAndResult(core: () => Tree): Tree = - if Feature.ccEnabled && in.token == LBRACE && in.offset == in.lastOffset + if Feature.ccEnabled && in.token == LBRACE && canStartCaptureSetContentsTokens.contains(in.lookahead.token) then CapturesAndResult(captureSet(), core()) else core() @@ -1635,28 +1632,19 @@ object Parsers { * | InfixType * FunType ::= (MonoFunType | PolyFunType) * MonoFunType ::= FunTypeArgs (‘=>’ | ‘?=>’) Type - * | (‘->’ | ‘?->’ ) [CaptureSet] Type -- under pureFunctions + * | (‘->’ | ‘?->’ ) [CaptureSet] Type -- under pureFunctions and captureChecking * PolyFunType ::= TypTypeParamClause '=>' Type - * | TypTypeParamClause ‘->’ [CaptureSet] Type -- under pureFunctions + * | TypTypeParamClause ‘->’ [CaptureSet] Type -- under pureFunctions and captureChecking * FunTypeArgs ::= InfixType * | `(' [ FunArgType {`,' FunArgType } ] `)' * | '(' [ TypedFunParam {',' TypedFunParam } ')' * MatchType ::= InfixType `match` <<< TypeCaseClauses >>> - * IntoType ::= [‘into’] IntoTargetType - * | ‘( IntoType ‘)’ - * IntoTargetType ::= Type - * | FunTypeArgs (‘=>’ | ‘?=>’) IntoType */ - def typ(intoOK: IntoOK = IntoOK.No, inContextBound: Boolean = false): Tree = + def typ(inContextBound: Boolean = false): Tree = val start = in.offset var imods = Modifiers() val erasedArgs: ListBuffer[Boolean] = ListBuffer() - def nestedIntoOK(token: Int) = - if token == TLARROW then IntoOK.No - else if intoOK == IntoOK.Nested then IntoOK.Yes - else intoOK - def functionRest(params: List[Tree]): Tree = val paramSpan = Span(start, in.lastOffset) atSpan(start, in.offset) { @@ -1685,9 +1673,8 @@ object Parsers { else accept(ARROW) - def resType() = typ(nestedIntoOK(token)) val resultType = - if isPure then capturesAndResult(resType) else resType() + if isPure then capturesAndResult(() => typ()) else typ() if token == TLARROW then for case ValDef(_, tpt, _) <- params do if isByNameType(tpt) then @@ -1722,12 +1709,6 @@ object Parsers { syntaxError(ErasedTypesCanOnlyBeFunctionTypes(), implicitKwPos(start)) t - def isIntoPrefix: Boolean = - intoOK == IntoOK.Yes - && in.isIdent(nme.into) - && in.featureEnabled(Feature.into) - && canStartTypeTokens.contains(in.lookahead.token) - def convertToElem(t: Tree): Tree = t match case ByNameTypeTree(t1) => syntaxError(ByNameParameterNotSupported(t), t.span) @@ -1764,32 +1745,6 @@ object Parsers { funArgType() commaSeparatedRest(t, funArg) accept(RPAREN) - - val intoAllowed = - intoOK == IntoOK.Yes - && args.lengthCompare(1) == 0 - && (!canFollowSimpleTypeTokens.contains(in.token) || followingIsVararg()) - val byNameAllowed = in.isArrow || isPureArrow - - def sanitize(arg: Tree): Tree = arg match - case ByNameTypeTree(t) if !byNameAllowed => - syntaxError(ByNameParameterNotSupported(t), t.span) - t - case PrefixOp(id @ Ident(tpnme.into), t) if !intoAllowed => - syntaxError(em"no `into` modifier allowed here", id.span) - t - case Parens(t) => - cpy.Parens(arg)(sanitize(t)) - case arg: FunctionWithMods => - val body1 = sanitize(arg.body) - if body1 eq arg.body then arg - else FunctionWithMods(arg.args, body1, arg.mods, arg.erasedParams).withSpan(arg.span) - case Function(args, res) if !intoAllowed => - cpy.Function(arg)(args, sanitize(res)) - case arg => - arg - val args1 = args.mapConserve(sanitize) - if in.isArrow || isPureArrow || erasedArgs.contains(true) then functionRest(args) else @@ -1805,23 +1760,29 @@ object Parsers { val start = in.offset val tparams = typeParamClause(ParamOwner.Type) if in.token == TLARROW then + // Filter illegal context bounds and report syntax error atSpan(start, in.skipToken()): - LambdaTypeTree(tparams, toplevelTyp()) + LambdaTypeTree(tparams.mapConserve(stripContextBounds("type lambdas")), toplevelTyp()) else if in.token == ARROW || isPureArrow(nme.PUREARROW) then val arrowOffset = in.skipToken() - val body = toplevelTyp(nestedIntoOK(in.token)) + val body = toplevelTyp() makePolyFunction(tparams, body, "type", Ident(nme.ERROR.toTypeName), start, arrowOffset) else accept(TLARROW) typ() else if in.token == INDENT then enclosed(INDENT, typ()) - else if isIntoPrefix then - PrefixOp(typeIdent(), typ(IntoOK.Nested)) else typeRest(infixType(inContextBound)) end typ + /** Removes context bounds from TypeDefs and returns a syntax error. */ + private def stripContextBounds(in: String)(tparam: TypeDef) = tparam match + case TypeDef(name, rhs: ContextBounds) => + syntaxError(em"context bounds are not allowed in $in", rhs.span) + TypeDef(name, rhs.bounds) + case other => other + private def makeKindProjectorTypeDef(name: TypeName): TypeDef = { val isVarianceAnnotated = name.startsWith("+") || name.startsWith("-") // We remove the variance marker from the name without passing along the specified variance at all @@ -1872,7 +1833,7 @@ object Parsers { if in.token == LPAREN then funParamClause() :: funParamClauses() else Nil /** InfixType ::= RefinedType {id [nl] RefinedType} - * | RefinedType `^` // under capture checking + * | RefinedType `^` -- under captureChecking */ def infixType(inContextBound: Boolean = false): Tree = infixTypeRest(inContextBound)(refinedType()) @@ -1903,6 +1864,12 @@ object Parsers { || !canStartInfixTypeTokens.contains(ahead.token) || ahead.lineOffset > 0 + inline def gobbleHat(): Boolean = + if Feature.ccEnabled && isIdent(nme.UPARROW) then + in.nextToken() + true + else false + def refinedTypeRest(t: Tree): Tree = { argumentStart() if in.isNestedStart then @@ -2028,7 +1995,7 @@ object Parsers { /** SimpleType ::= SimpleLiteral * | ‘?’ TypeBounds * | SimpleType1 - * | SimpleType ‘(’ Singletons ‘)’ -- under language.experimental.dependent, checked in Typer + * | SimpleType ‘(’ Singletons ‘)’ * Singletons ::= Singleton {‘,’ Singleton} */ def simpleType(): Tree = @@ -2060,11 +2027,11 @@ object Parsers { val start = in.skipToken() typeBounds().withSpan(Span(start, in.lastOffset, start)) else - def singletonArgs(t: Tree): Tree = - if in.token == LPAREN && in.featureEnabled(Feature.dependent) - then singletonArgs(AppliedTypeTree(t, inParensWithCommas(commaSeparated(singleton)))) - else t - singletonArgs(simpleType1()) + val tpt = simpleType1() + if in.featureEnabled(Feature.modularity) && in.token == LPAREN then + parArgumentExprss(wrapNew(tpt)) + else + tpt /** SimpleType1 ::= id * | Singleton `.' id @@ -2159,35 +2126,45 @@ object Parsers { atSpan(startOffset(t), startOffset(id)) { Select(t, id.name) } } - /** ArgTypes ::= Type {`,' Type} - * | NamedTypeArg {`,' NamedTypeArg} - * NamedTypeArg ::= id `=' Type + /** ArgTypes ::= TypeArg {‘,’ TypeArg} + * | NamedTypeArg {‘,’ NamedTypeArg} + * TypeArg ::= Type + * | CaptureSet -- under captureChecking + * NamedTypeArg ::= id ‘=’ TypeArg * NamesAndTypes ::= NameAndType {‘,’ NameAndType} - * NameAndType ::= id ':' Type + * NameAndType ::= id ‘:’ Type */ def argTypes(namedOK: Boolean, wildOK: Boolean, tupleOK: Boolean): List[Tree] = - def argType() = - val t = typ() + def wildCardCheck(gen: Tree): Tree = + val t = gen if wildOK then t else rejectWildcardType(t) - def namedArgType() = + def argType() = wildCardCheck(typ()) + + def typeArg() = wildCardCheck: + if Feature.ccEnabled && in.token == LBRACE && !isDclIntroNext then // is this a capture set and not a refinement type? + // This case is ambiguous w.r.t. an Object literal {}. But since CC is enabled, we probably expect it to designate the empty set + concreteCapsType(captureSet()) + else typ() + + def namedTypeArg() = atSpan(in.offset): val name = ident() accept(EQUALS) - NamedArg(name.toTypeName, argType()) + NamedArg(name.toTypeName, typeArg()) - def namedElem() = + def nameAndType() = atSpan(in.offset): val name = ident() acceptColon() NamedArg(name, argType()) - if namedOK && isIdent && in.lookahead.token == EQUALS then - commaSeparated(() => namedArgType()) + if namedOK && (isIdent && in.lookahead.token == EQUALS) then + commaSeparated(() => namedTypeArg()) else if tupleOK && isIdent && in.lookahead.isColon && sourceVersion.enablesNamedTuples then - commaSeparated(() => namedElem()) + commaSeparated(() => nameAndType()) else - commaSeparated(() => argType()) + commaSeparated(() => typeArg()) end argTypes def paramTypeOf(core: () => Tree): Tree = @@ -2204,9 +2181,7 @@ object Parsers { * | `=>' Type * | `->' [CaptureSet] Type */ - val funArgType: () => Tree = - () => paramTypeOf(() => typ(IntoOK.Yes)) - // We allow intoOK and filter out afterwards in typ() + val funArgType: () => Tree = () => paramTypeOf(() => typ()) /** ParamType ::= ParamValueType * | `=>' ParamValueType @@ -2215,23 +2190,16 @@ object Parsers { def paramType(): Tree = paramTypeOf(paramValueType) /** ParamValueType ::= Type [`*'] - * | IntoType - * | ‘(’ IntoType ‘)’ `*' */ def paramValueType(): Tree = - val t = toplevelTyp(IntoOK.Yes) + val t = toplevelTyp() if isIdent(nme.raw.STAR) then - if !t.isInstanceOf[Parens] && isInto(t) then - syntaxError( - em"""`*` cannot directly follow `into` parameter - |the `into` parameter needs to be put in parentheses""", - in.offset) in.nextToken() atSpan(startOffset(t)): PostfixOp(t, Ident(tpnme.raw.STAR)) else t - /** TypeArgs ::= `[' Type {`,' Type} `]' + /** TypeArgs ::= `[' TypeArg {`,' TypeArg} `]' * NamedTypeArgs ::= `[' NamedTypeArg {`,' NamedTypeArg} `]' */ def typeArgs(namedOK: Boolean, wildOK: Boolean): List[Tree] = @@ -2245,21 +2213,28 @@ object Parsers { else inBraces(refineStatSeq()) - /** TypeBounds ::= [`>:' Type] [`<:' Type] - * | `^` -- under captureChecking + /** TypeBounds ::= [`>:' TypeBound ] [`<:' TypeBound ] + * TypeBound ::= Type + * | CaptureSet -- under captureChecking */ def typeBounds(): TypeBoundsTree = atSpan(in.offset): - if in.isIdent(nme.UPARROW) && Feature.ccEnabled then - in.nextToken() - makeCapsBound() - else - TypeBoundsTree(bound(SUPERTYPE), bound(SUBTYPE)) + TypeBoundsTree(bound(SUPERTYPE), bound(SUBTYPE)) private def bound(tok: Int): Tree = - if (in.token == tok) { in.nextToken(); toplevelTyp() } + if in.token == tok then + in.nextToken() + if Feature.ccEnabled && in.token == LBRACE && !isDclIntroNext then + capsBound(captureSet(), isLowerBound = tok == SUPERTYPE) + else toplevelTyp() else EmptyTree + private def capsBound(refs: List[Tree], isLowerBound: Boolean = false): Tree = + if isLowerBound && refs.isEmpty then // lower bounds with empty capture sets become a pure CapSet + Select(scalaDot(nme.caps), tpnme.CapSet) + else + concreteCapsType(refs) + /** TypeAndCtxBounds ::= TypeBounds [`:` ContextBounds] */ def typeAndCtxBounds(pname: TypeName): Tree = { @@ -2956,7 +2931,7 @@ object Parsers { /** Enumerators ::= Generator {semi Enumerator | Guard} */ def enumerators(): List[Tree] = - if sourceVersion.isAtLeast(`3.7`) then + if sourceVersion.enablesBetterFors then aliasesUntilGenerator() ++ enumeratorsRest() else generator() :: enumeratorsRest() @@ -3164,14 +3139,18 @@ object Parsers { def pattern1(location: Location = Location.InPattern): Tree = val p = pattern2(location) if in.isColon then - val isVariableOrNumber = isVarPattern(p) || p.isInstanceOf[Number] + val isVariable = unsplice(p) match { + case x: Ident => x.name.isVarPattern + case _ => false + } + val isVariableOrNumber = isVariable || p.isInstanceOf[Number] if !isVariableOrNumber then report.errorOrMigrationWarning( em"""Type ascriptions after patterns other than: | * variable pattern, e.g. `case x: String =>` | * number literal pattern, e.g. `case 10.5: Double =>` |are no longer supported. Remove the type ascription or move it to a separate variable pattern.""", - in.sourcePos(), + p.sourcePos, MigrationVersion.AscriptionAfterPattern) in.nextToken() ascription(p, location) @@ -3304,13 +3283,15 @@ object Parsers { case SEALED => Mod.Sealed() case IDENTIFIER => name match { - case nme.erased if in.erasedEnabled => Mod.Erased() case nme.inline => Mod.Inline() + case nme.into => Mod.Into() case nme.opaque => Mod.Opaque() case nme.open => Mod.Open() case nme.transparent => Mod.Transparent() case nme.infix => Mod.Infix() case nme.tracked => Mod.Tracked() + case nme.erased if in.erasedEnabled => Mod.Erased() + case nme.mut if Feature.ccEnabled => Mod.Mut() } } @@ -3378,7 +3359,8 @@ object Parsers { * | override * | opaque * LocalModifier ::= abstract | final | sealed | open | implicit | lazy | erased | - * inline | transparent | infix + * inline | transparent | infix | + * mut -- under captureChecking */ def modifiers(allowed: BitSet = modifierTokens, start: Modifiers = Modifiers()): Modifiers = { @tailrec @@ -3467,22 +3449,25 @@ object Parsers { recur(numLeadParams, firstClause = true, prevIsTypeClause = false) end typeOrTermParamClauses - /** ClsTypeParamClause::= ‘[’ ClsTypeParam {‘,’ ClsTypeParam} ‘]’ - * ClsTypeParam ::= {Annotation} [‘+’ | ‘-’] - * id [HkTypeParamClause] TypeAndCtxBounds + * ClsTypeParam ::= {Annotation} [‘+’ | ‘-’] + * id [HkTypeParamClause] TypeAndCtxBounds + * | {Annotation} [‘+’ | ‘-’] id `^` TypeAndCtxBounds -- under captureChecking * * DefTypeParamClause::= ‘[’ DefTypeParam {‘,’ DefTypeParam} ‘]’ - * DefTypeParam ::= {Annotation} - * id [HkTypeParamClause] TypeAndCtxBounds + * DefTypeParam ::= {Annotation} + * id [HkTypeParamClause] TypeAndCtxBounds + * | {Annotation} id `^` TypeAndCtxBounds -- under captureChecking * * TypTypeParamClause::= ‘[’ TypTypeParam {‘,’ TypTypeParam} ‘]’ - * TypTypeParam ::= {Annotation} - * (id | ‘_’) [HkTypeParamClause] TypeAndCtxBounds + * TypTypeParam ::= {Annotation} + * (id | ‘_’) [HkTypeParamClause] TypeAndCtxBounds + * | {Annotation} (id | ‘_’) `^` TypeAndCtxBounds -- under captureChecking * * HkTypeParamClause ::= ‘[’ HkTypeParam {‘,’ HkTypeParam} ‘]’ - * HkTypeParam ::= {Annotation} [‘+’ | ‘-’] - * (id | ‘_’) [HkTypePamClause] TypeBounds + * HkTypeParam ::= {Annotation} [‘+’ | ‘-’] + * (id | ‘_’) [HkTypePamClause] TypeBounds + * | {Annotation} [‘+’ | ‘-’] (id | ‘_’) `^` TypeBounds -- under captureChecking */ def typeParamClause(paramOwner: ParamOwner): List[TypeDef] = inBracketsWithCommas { @@ -3507,12 +3492,20 @@ object Parsers { in.nextToken() WildcardParamName.fresh().toTypeName else ident().toTypeName + val isCap = gobbleHat() val hkparams = typeParamClauseOpt(ParamOwner.Hk) - val bounds = - if paramOwner.acceptsCtxBounds then typeAndCtxBounds(name) - else if sourceVersion.enablesNewGivens && paramOwner == ParamOwner.Type then typeAndCtxBounds(name) - else typeBounds() - TypeDef(name, lambdaAbstract(hkparams, bounds)).withMods(mods) + val bounds = typeAndCtxBounds(name) match + case bounds: TypeBoundsTree => bounds + case bounds: ContextBounds if paramOwner.acceptsCtxBounds => bounds + case ContextBounds(bounds, cxBounds) => + for cbound <- cxBounds do report.error(IllegalContextBounds(), cbound.srcPos) + bounds + val res = TypeDef(name, lambdaAbstract(hkparams, bounds)).withMods(mods) + if isCap then + res.pushAttachment(CaptureVar, ()) + // putting the attachment here as well makes post-processing in the typer easier + bounds.pushAttachment(CaptureVar, ()) + res } } commaSeparated(() => typeParam()) @@ -3539,7 +3532,7 @@ object Parsers { * ClsParams ::= ClsParam {‘,’ ClsParam} * ClsParam ::= {Annotation} * [{Modifier} (‘val’ | ‘var’)] Param - * TypelessClause ::= DefTermParamClause + * ConstrParamClause ::= DefTermParamClause * | UsingParamClause * * DefTermParamClause::= [nl] ‘(’ [DefTermParams] ‘)’ @@ -3627,7 +3620,10 @@ object Parsers { // begin termParamClause inParensWithCommas { if in.token == RPAREN && paramOwner != ParamOwner.ExtensionPrefix && !impliedMods.is(Given) - then Nil + then + if paramOwner.takesOnlyUsingClauses then + syntaxError(em"`using` expected") + Nil else val clause = if paramOwner == ParamOwner.ExtensionPrefix @@ -3665,7 +3661,7 @@ object Parsers { } /** ClsTermParamClauses ::= {ClsTermParamClause} [[nl] ‘(’ [‘implicit’] ClsParams ‘)’] - * TypelessClauses ::= TypelessClause {TypelessClause} + * ConstrParamClauses ::= ConstrParamClause {ConstrParamClause} * * @return The parameter definitions */ @@ -3939,7 +3935,7 @@ object Parsers { } /** DefDef ::= DefSig [‘:’ Type] [‘=’ Expr] - * | this TypelessClauses [DefImplicitClause] `=' ConstrExpr + * | this ConstrParamClauses [DefImplicitClause] `=' ConstrExpr * DefSig ::= id [DefParamClauses] [DefImplicitClause] */ def defDefOrDcl(start: Offset, mods: Modifiers, numLeadParams: Int = 0): DefDef = atSpan(start, nameStart) { @@ -4033,12 +4029,22 @@ object Parsers { argumentExprss(mkApply(Ident(nme.CONSTRUCTOR), argumentExprs())) } - /** TypeDef ::= id [HkTypeParamClause] {FunParamClause} TypeAndCtxBounds [‘=’ Type] + /** TypeDef ::= id [HkTypeParamClause] {FunParamClause} TypeAndCtxBounds [‘=’ TypeDefRHS ] + * | id `^` TypeAndCtxBounds [‘=’ TypeDefRHS ] -- under captureChecking + * TypeDefRHS ::= Type + * | CaptureSet -- under captureChecking */ def typeDefOrDcl(start: Offset, mods: Modifiers): Tree = { + + def typeDefRHS(): Tree = + if Feature.ccEnabled && in.token == LBRACE && !isDclIntroNext then + concreteCapsType(captureSet()) + else toplevelTyp() + newLinesOpt() atSpan(start, nameStart) { val nameIdent = typeIdent() + val isCapDef = gobbleHat() val tname = nameIdent.name.asTypeName val tparams = typeParamClauseOpt(ParamOwner.Hk) val vparamss = funParamClauses() @@ -4046,20 +4052,24 @@ object Parsers { def makeTypeDef(rhs: Tree): Tree = { val rhs1 = lambdaAbstractAll(tparams :: vparamss, rhs) val tdef = TypeDef(nameIdent.name.toTypeName, rhs1) - if (nameIdent.isBackquoted) + if nameIdent.isBackquoted then tdef.pushAttachment(Backquoted, ()) + if isCapDef then + tdef.pushAttachment(CaptureVar, ()) + // putting the attachment here as well makes post-processing in the typer easier + rhs.pushAttachment(CaptureVar, ()) finalizeDef(tdef, mods, start) } in.token match { case EQUALS => in.nextToken() - makeTypeDef(toplevelTyp()) + makeTypeDef(typeDefRHS()) case SUBTYPE | SUPERTYPE => typeAndCtxBounds(tname) match case bounds: TypeBoundsTree if in.token == EQUALS => val eqOffset = in.skipToken() - var rhs = toplevelTyp() + var rhs = typeDefRHS() rhs match { case mtt: MatchTypeTree => bounds match { @@ -4088,6 +4098,9 @@ object Parsers { } } + private def concreteCapsType(refs: List[Tree]): Tree = + makeRetaining(Select(scalaDot(nme.caps), tpnme.CapSet), refs, tpnme.retains) + /** TmplDef ::= ([‘case’] ‘class’ | ‘trait’) ClassDef * | [‘case’] ‘object’ ObjectDef * | ‘enum’ EnumDef @@ -4405,7 +4418,10 @@ object Parsers { leadParamss += extParams isUsingClause(extParams) do () - leadParamss ++= termParamClauses(ParamOwner.ExtensionFollow, numLeadParams) + // Empty parameter clauses are filtered out. They are already reported as syntax errors and are not + // allowed here. + val extFollowParams = termParamClauses(ParamOwner.ExtensionFollow, numLeadParams).filterNot(_.isEmpty) + leadParamss ++= extFollowParams if in.isColon then syntaxError(em"no `:` expected here") in.nextToken() @@ -4691,7 +4707,8 @@ object Parsers { syntaxError(msg, tree.span) Nil tree match - case tree: MemberDef if !(tree.mods.flags & (ModifierFlags &~ Mutable)).isEmpty => + case tree: MemberDef + if !(tree.mods.flags & ModifierFlags).isEmpty && !tree.mods.isMutableVar => // vars are OK, mut defs are not fail(em"refinement cannot be ${(tree.mods.flags & ModifierFlags).flagStrings().mkString("`", "`, `", "`")}") case tree: DefDef if tree.termParamss.nestedExists(!_.rhs.isEmpty) => fail(em"refinement cannot have default arguments") diff --git a/compiler/src/dotty/tools/dotc/parsing/Scanners.scala b/compiler/src/dotty/tools/dotc/parsing/Scanners.scala index ed20c189796b..2764715a3209 100644 --- a/compiler/src/dotty/tools/dotc/parsing/Scanners.scala +++ b/compiler/src/dotty/tools/dotc/parsing/Scanners.scala @@ -603,6 +603,20 @@ object Scanners { lastWidth = r.knownWidth newlineIsSeparating = r.isInstanceOf[InBraces] + // can emit OUTDENT if line is not non-empty blank line at EOF + inline def isTrailingBlankLine: Boolean = + token == EOF && { + val end = buf.length - 1 // take terminal NL as empty last line + val prev = buf.lastIndexWhere(!isWhitespace(_), end = end) + prev < 0 || end - prev > 0 && isLineBreakChar(buf(prev)) + } + + inline def canDedent: Boolean = + lastToken != INDENT + && !isLeadingInfixOperator(nextWidth) + && !statCtdTokens.contains(lastToken) + && !isTrailingBlankLine + if newlineIsSeparating && canEndStatTokens.contains(lastToken) && canStartStatTokens.contains(token) @@ -615,9 +629,8 @@ object Scanners { || nextWidth == lastWidth && (indentPrefix == MATCH || indentPrefix == CATCH) && token != CASE then if currentRegion.isOutermost then if nextWidth < lastWidth then currentRegion = topLevelRegion(nextWidth) - else if !isLeadingInfixOperator(nextWidth) && !statCtdTokens.contains(lastToken) && lastToken != INDENT then + else if canDedent then currentRegion match - case _ if token == EOF => // no OUTDENT at EOF case r: Indented => insert(OUTDENT, offset) handleNewIndentWidth(r.enclosing, ir => @@ -671,13 +684,16 @@ object Scanners { reset() if atEOL then token = COLONeol - // consume => and insert if applicable + // consume => and insert if applicable. Used to detect colon arrow: x => def observeArrowIndented(): Unit = if isArrow && indentSyntax then peekAhead() - val atEOL = isAfterLineEnd || token == EOF + val atEOL = isAfterLineEnd + val atEOF = token == EOF reset() - if atEOL then + if atEOF then + token = EOF + else if atEOL then val nextWidth = indentWidth(next.offset) val lastWidth = currentRegion.indentWidth if lastWidth < nextWidth then @@ -1132,7 +1148,7 @@ object Scanners { val lookahead = LookaheadScanner() while lookahead.nextToken() - lookahead.isNewLine || lookahead.isSoftModifier + lookahead.token == NEWLINE || lookahead.isSoftModifier do () modifierFollowers.contains(lookahead.token) } @@ -1209,7 +1225,10 @@ object Scanners { def isSoftModifier: Boolean = token == IDENTIFIER - && (softModifierNames.contains(name) || name == nme.erased && erasedEnabled || name == nme.tracked && trackedEnabled) + && (softModifierNames.contains(name) + || name == nme.erased && erasedEnabled + || name == nme.tracked && trackedEnabled + || name == nme.mut && Feature.ccEnabled) def isSoftModifierInModifierPosition: Boolean = isSoftModifier && inModifierPosition() diff --git a/compiler/src/dotty/tools/dotc/parsing/Tokens.scala b/compiler/src/dotty/tools/dotc/parsing/Tokens.scala index c78a336ecdf5..5b9a62fcb7da 100644 --- a/compiler/src/dotty/tools/dotc/parsing/Tokens.scala +++ b/compiler/src/dotty/tools/dotc/parsing/Tokens.scala @@ -295,9 +295,11 @@ object Tokens extends TokensCommon { final val colonEOLPredecessors = BitSet(RPAREN, RBRACKET, BACKQUOTED_IDENT, THIS, SUPER, NEW) + final val canStartCaptureSetContentsTokens = BitSet(IDENTIFIER, BACKQUOTED_IDENT, THIS, RBRACE) + final val closingParens = BitSet(RPAREN, RBRACKET, RBRACE) - final val softModifierNames = Set(nme.inline, nme.opaque, nme.open, nme.transparent, nme.infix) + final val softModifierNames = Set(nme.inline, nme.into, nme.opaque, nme.open, nme.transparent, nme.infix) def showTokenDetailed(token: Int): String = debugString(token) diff --git a/compiler/src/dotty/tools/dotc/plugins/Plugins.scala b/compiler/src/dotty/tools/dotc/plugins/Plugins.scala index a6672d475129..f3a58cde1a0c 100644 --- a/compiler/src/dotty/tools/dotc/plugins/Plugins.scala +++ b/compiler/src/dotty/tools/dotc/plugins/Plugins.scala @@ -1,8 +1,6 @@ package dotty.tools.dotc package plugins -import scala.language.unsafeNulls - import core.* import Contexts.* import Decorators.em @@ -40,20 +38,19 @@ trait Plugins { // Explicit parameterization of recover to avoid -Xlint warning about inferred Any errors foreach (_.recover[Any] { // legacy behavior ignores altogether, so at least warn devs - case e: MissingPluginException => report.warning(e.getMessage.nn) - case e: Exception => report.inform(e.getMessage.nn) + case e: MissingPluginException => report.warning(e.getMessage) + case e: Exception => report.inform(e.getMessage) }) goods map (_.get) } - private var _roughPluginsList: List[Plugin] = uninitialized + private var _roughPluginsList: List[Plugin] | Null = null protected def roughPluginsList(using Context): List[Plugin] = if (_roughPluginsList == null) { _roughPluginsList = loadRoughPluginsList - _roughPluginsList } - else _roughPluginsList + _roughPluginsList.nn /** Load all available plugins. Skips plugins that * either have the same name as another one, or which @@ -99,13 +96,12 @@ trait Plugins { plugs } - private var _plugins: List[Plugin] = uninitialized + private var _plugins: List[Plugin] | Null = null def plugins(using Context): List[Plugin] = if (_plugins == null) { _plugins = loadPlugins - _plugins } - else _plugins + _plugins.nn /** A description of all the plugins that are loaded */ def pluginDescriptions(using Context): String = diff --git a/compiler/src/dotty/tools/dotc/printing/Formatting.scala b/compiler/src/dotty/tools/dotc/printing/Formatting.scala index ccd7b4e4e282..70d305c2e372 100644 --- a/compiler/src/dotty/tools/dotc/printing/Formatting.scala +++ b/compiler/src/dotty/tools/dotc/printing/Formatting.scala @@ -8,11 +8,22 @@ import core.* import Texts.*, Types.*, Flags.*, Symbols.*, Contexts.* import Decorators.* import reporting.Message -import util.DiffUtil +import util.{DiffUtil, SimpleIdentitySet} import Highlighting.* object Formatting { + /** Essentially, a function Context => T, which can be created with `delay` */ + abstract class Delay[T]: + def apply(c: Context): T + + /** Delay a Context => T computation so that it is generated from the embedded + * context of a string formatter instead of the enclosing context. This is needed + * to make disambiguation work for such embedded computatons. + */ + def delay[T](fn: Context ?=> T): Delay[T] = new Delay[T]: + def apply(c: Context) = fn(using c) + object ShownDef: /** Represents a value that has been "shown" and can be consumed by StringFormatter. * Not just a string because it may be a Seq that StringFormatter will intersperse with the trailing separator. @@ -76,6 +87,9 @@ object Formatting { given [X: Show]: Show[Seq[X]] with def show(x: Seq[X]) = CtxShow(x.map(toStr)) + given [X: Show]: Show[Delay[X]] = new Show: + def show(x: Delay[X]) = CtxShow(c ?=> x(c)) + given Show[Seq[Nothing]] with def show(x: Seq[Nothing]) = CtxShow(x) @@ -87,6 +101,9 @@ object Formatting { def show(x: H *: T) = CtxShow(toStr(x.head) *: toShown(x.tail).asInstanceOf[Tuple]) + given [X <: AnyRef: Show]: Show[SimpleIdentitySet[X]] with + def show(x: SimpleIdentitySet[X]) = summon[Show[List[X]]].show(x.toList) + given Show[FlagSet] with def show(x: FlagSet) = x.flagsString @@ -127,6 +144,7 @@ object Formatting { given Show[Class[?]] = ShowAny given Show[Throwable] = ShowAny given Show[StringBuffer] = ShowAny + given Show[StringBuilder] = ShowAny given Show[CompilationUnit] = ShowAny given Show[Phases.Phase] = ShowAny given Show[TyperState] = ShowAny diff --git a/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala b/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala index e90aeb217362..750e4b646e0d 100644 --- a/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala +++ b/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala @@ -15,7 +15,9 @@ import util.SourcePosition import scala.util.control.NonFatal import scala.annotation.switch import config.{Config, Feature} +import ast.tpd import cc.* +import Capabilities.* class PlainPrinter(_ctx: Context) extends Printer { @@ -27,6 +29,14 @@ class PlainPrinter(_ctx: Context) extends Printer { protected def printDebug = ctx.settings.YprintDebug.value + /** Print Fresh instances as */ + protected def ccVerbose = ctx.settings.YccVerbose.value + + /** Elide redundant ^ and ^{cap.rd} when printing instances of Capability + * classes. Gets set when singletons are printed as `(x: T)` to reduce verbosity. + */ + private var elideCapabilityCaps = false + private var openRecs: List[RecType] = Nil protected def maxToTextRecursions: Int = 100 @@ -153,35 +163,66 @@ class PlainPrinter(_ctx: Context) extends Printer { + defn.FromJavaObjectSymbol def toTextCaptureSet(cs: CaptureSet): Text = - if printDebug && ctx.settings.YccDebug.value && !cs.isConst then cs.toString + if printDebug && ctx.settings.YccDebug.value + && !cs.isConst && !cs.isInstanceOf[CaptureSet.HiddenSet] //HiddenSets can be cyclic + then cs.toString else if cs == CaptureSet.Fluid then "" else val core: Text = if !cs.isConst && cs.elems.isEmpty then "?" - else "{" ~ Text(cs.elems.toList.map(toTextCaptureRef), ", ") ~ "}" + else "{" ~ Text(cs.processElems(_.toList.map(toTextCapability)), ", ") ~ "}" // ~ Str("?").provided(!cs.isConst) core ~ cs.optionalInfo private def toTextRetainedElem[T <: Untyped](ref: Tree[T]): Text = ref match - case ref: RefTree[?] if ref.typeOpt.exists => - toTextCaptureRef(ref.typeOpt) + case ref: RefTree[?] => + ref.typeOpt match + case c: Capability => toTextCapability(c) + case _ => toText(ref) case TypeApply(fn, arg :: Nil) if fn.symbol == defn.Caps_capsOf => toTextRetainedElem(arg) - case _ => - toText(ref) + case ReachCapabilityApply(ref1) => toTextRetainedElem(ref1) ~ "*" + case ReadOnlyCapabilityApply(ref1) => toTextRetainedElem(ref1) ~ ".rd" + case _ => toText(ref) private def toTextRetainedElems[T <: Untyped](refs: List[Tree[T]]): Text = "{" ~ Text(refs.map(ref => toTextRetainedElem(ref)), ", ") ~ "}" + type GeneralCaptureSet = CaptureSet | List[tpd.Tree] + + protected def isUniversalCaptureSet(refs: GeneralCaptureSet): Boolean = refs match + case refs: CaptureSet => + // The set if universal if it consists only of caps.cap or + // only of an existential Fresh that is bound to the immediately enclosing method. + val isUniversal = + refs.elems.size == 1 + && (refs.isUniversal + || !printDebug && !ccVerbose && !showUniqueIds && refs.elems.nth(0).match + case ResultCap(binder) => + CCState.openExistentialScopes match + case b :: _ => binder eq b + case _ => false + case _ => + false + ) + isUniversal + || !refs.elems.isEmpty && refs.elems.forall(_.isCapOrFresh) && !ccVerbose + case (ref: tpd.Tree) :: Nil => ref.symbol == defn.captureRoot + case _ => false + + protected def toTextGeneralCaptureSet(refs: GeneralCaptureSet): Text = refs match + case refs: CaptureSet => toTextCaptureSet(refs) + case refs: List[tpd.Tree] => toTextRetainedElems(refs) + /** Print capturing type, overridden in RefinedPrinter to account for * capturing function types. */ - protected def toTextCapturing(parent: Type, refsText: Text, boxText: Text): Text = + protected def toTextCapturing(parent: Type, refs: GeneralCaptureSet, boxText: Text): Text = changePrec(InfixPrec): - boxText ~ toTextLocal(parent) ~ "^" - ~ (refsText provided refsText != rootSetText) - - final protected def rootSetText = Str("{cap}") // TODO Use disambiguation + boxText + ~ toTextLocal(parent) + ~ "^" + ~ toTextGeneralCaptureSet(refs).provided(!isUniversalCaptureSet(refs) || ccVerbose) def toText(tp: Type): Text = controlled { homogenize(tp) match { @@ -190,7 +231,6 @@ class PlainPrinter(_ctx: Context) extends Printer { case tp: TermRef if !tp.denotationIsCurrent && !homogenizedView // always print underlying when testing picklers - && !tp.isRootCapability || tp.symbol.is(Module) || tp.symbol.name == nme.IMPORT => toTextRef(tp) ~ ".type" @@ -202,14 +242,14 @@ class PlainPrinter(_ctx: Context) extends Printer { else toTextPrefixOf(tp) ~ selectionString(tp) case tp: TermParamRef => - ParamRefNameString(tp) ~ lambdaHash(tp.binder) ~ ".type" + ParamRefNameString(tp) ~ hashStr(tp.binder) ~ ".type" case tp: TypeParamRef => val suffix = if showNestingLevel then val tvar = ctx.typerState.constraint.typeVarOfParam(tp) if tvar.exists then s"#${tvar.asInstanceOf[TypeVar].nestingLevel.toString}" else "" else "" - ParamRefNameString(tp) ~ lambdaHash(tp.binder) ~ suffix + ParamRefNameString(tp) ~ hashStr(tp.binder) ~ suffix case tp: SingletonType => toTextSingleton(tp) case AppliedType(tycon, args) => @@ -242,15 +282,15 @@ class PlainPrinter(_ctx: Context) extends Printer { }.close case tp @ CapturingType(parent, refs) => val boxText: Text = Str("box ") provided tp.isBoxed //&& ctx.settings.YccDebug.value - val showAsCap = refs.isUniversal && (refs.elems.size == 1 || !printDebug) - val refsText = if showAsCap then rootSetText else toTextCaptureSet(refs) - toTextCapturing(parent, refsText, boxText) + if elideCapabilityCaps + && parent.derivesFrom(defn.Caps_Capability) + && refs.containsTerminalCapability + && refs.isReadOnly + then toText(parent) + else toTextCapturing(parent, refs, boxText) case tp @ RetainingType(parent, refs) => if Feature.ccEnabledSomewhere then - val refsText = refs match - case ref :: Nil if ref.symbol == defn.captureRoot => rootSetText - case _ => toTextRetainedElems(refs) - toTextCapturing(parent, refsText, "") ~ Str("R").provided(printDebug) + toTextCapturing(parent, refs, "") ~ Str("R").provided(printDebug) else toText(parent) case tp: PreviousErrorType if ctx.settings.XprintTypes.value => "" // do not print previously reported error message because they may try to print this error type again recursively @@ -270,33 +310,30 @@ class PlainPrinter(_ctx: Context) extends Printer { ~ paramsText(tp) ~ ")" ~ (Str(": ") provided !tp.resultType.isInstanceOf[MethodOrPoly]) - ~ toText(tp.resultType) + ~ CCState.inNewExistentialScope(tp)(toText(tp.resultType)) } case ExprType(restp) => def arrowText: Text = restp match case AnnotatedType(parent, ann) if ann.symbol == defn.RetainsByNameAnnot => - val refs = ann.tree.retainedElems - if refs.exists(_.symbol == defn.captureRoot) then Str("=>") - else Str("->") ~ toTextRetainedElems(refs) + ann.tree.retainedElems match + case ref :: Nil if ref.symbol == defn.captureRoot => Str("=>") + case refs => Str("->") ~ toTextRetainedElems(refs) case _ => if Feature.pureFunsEnabled then "->" else "=>" changePrec(GlobalPrec)(arrowText ~ " " ~ toText(restp)) case tp: HKTypeLambda => changePrec(GlobalPrec) { - "[" ~ paramsText(tp) ~ "]" ~ lambdaHash(tp) ~ Str(" =>> ") ~ toTextGlobal(tp.resultType) + "[" ~ paramsText(tp) ~ "]" ~ hashStr(tp) ~ Str(" =>> ") ~ toTextGlobal(tp.resultType) } case tp: PolyType => changePrec(GlobalPrec) { - "[" ~ paramsText(tp) ~ "]" ~ lambdaHash(tp) ~ + "[" ~ paramsText(tp) ~ "]" ~ hashStr(tp) ~ (Str(": ") provided !tp.resultType.isInstanceOf[MethodOrPoly]) ~ toTextGlobal(tp.resultType) } case AnnotatedType(tpe, annot) => - if annot.symbol == defn.InlineParamAnnot || annot.symbol == defn.ErasedParamAnnot - then toText(tpe) - else if (annot.symbol == defn.IntoAnnot || annot.symbol == defn.IntoParamAnnot) - && !printDebug - then atPrec(GlobalPrec)( Str("into ") ~ toText(tpe) ) + if defn.SilentAnnots.contains(annot.symbol) && !printDebug then + toText(tpe) else if annot.isInstanceOf[CaptureAnnotation] then toTextLocal(tpe) ~ "^" ~ toText(annot) else @@ -333,12 +370,16 @@ class PlainPrinter(_ctx: Context) extends Printer { }.close def toTextSingleton(tp: SingletonType): Text = - "(" ~ toTextRef(tp) ~ " : " ~ toTextGlobal(tp.underlying) ~ ")" + val saved = elideCapabilityCaps + elideCapabilityCaps = !ccVerbose && !ctx.settings.explain.value + // don't elide capability capture sets under -Ycc-verbose or -explain + try "(" ~ toTextRef(tp) ~ " : " ~ toTextGlobal(tp.underlying) ~ ")" + finally elideCapabilityCaps = saved protected def paramsText(lam: LambdaType): Text = { def paramText(ref: ParamRef) = val erased = ref.underlying.hasAnnotation(defn.ErasedParamAnnot) - keywordText("erased ").provided(erased) ~ ParamRefNameString(ref) ~ lambdaHash(lam) ~ toTextRHS(ref.underlying, isParameter = true) + keywordText("erased ").provided(erased) ~ ParamRefNameString(ref) ~ hashStr(lam) ~ toTextRHS(ref.underlying, isParameter = true) Text(lam.paramRefs.map(paramText), ", ") } @@ -350,11 +391,11 @@ class PlainPrinter(_ctx: Context) extends Printer { /** The name of the symbol without a unique id. */ protected def simpleNameString(sym: Symbol): String = nameString(sym.name) - /** If -uniqid is set, the hashcode of the lambda type, after a # */ - protected def lambdaHash(pt: LambdaType): Text = - if (showUniqueIds) - try "#" + pt.hashCode - catch { case ex: NullPointerException => "" } + /** If -uniqid is set, the hashcode of the type, after a # */ + protected def hashStr(tp: Type): String = + if showUniqueIds then + try "#" + tp.hashCode + catch case ex: NullPointerException => "" else "" /** A string to append to a symbol composed of: @@ -397,13 +438,13 @@ class PlainPrinter(_ctx: Context) extends Printer { case tp: ThisType => nameString(tp.cls) + ".this" case SuperType(thistpe: SingletonType, _) => - toTextRef(thistpe).map(_.replaceAll("""\bthis$""", "super").nn) + toTextRef(thistpe).map(_.replaceAll("""\bthis$""", "super")) case SuperType(thistpe, _) => "Super(" ~ toTextGlobal(thistpe) ~ ")" case tp @ ConstantType(value) => toText(value) case pref: TermParamRef => - ParamRefNameString(pref) ~ lambdaHash(pref.binder) + ParamRefNameString(pref) ~ hashStr(pref.binder) case tp: RecThis => val idx = openRecs.reverse.indexOf(tp.binder) if (idx >= 0) selfRecName(idx + 1) @@ -415,14 +456,27 @@ class PlainPrinter(_ctx: Context) extends Printer { } } - def toTextCaptureRef(tp: Type): Text = - homogenize(tp) match - case tp: TermRef if tp.symbol == defn.captureRoot => Str("cap") - case tp: SingletonType => toTextRef(tp) - case tp: (TypeRef | TypeParamRef) => toText(tp) ~ "^" - case ReachCapability(tp1) => toTextCaptureRef(tp1) ~ "*" - case MaybeCapability(tp1) => toTextCaptureRef(tp1) ~ "?" - case tp => toText(tp) + def toTextCapability(c: Capability): Text = c match + case ReadOnly(c1) => toTextCapability(c1) ~ ".rd" + case Reach(c1) => toTextCapability(c1) ~ "*" + case Maybe(c1) => toTextCapability(c1) ~ "?" + case GlobalCap => "cap" + case c: ResultCap => + def idStr = s"##${c.rootId}" + // TODO: Better printing? USe a mode where we print more detailed + val vbleText: Text = CCState.openExistentialScopes.indexOf(c.binder) match + case -1 => + "" + case n => "outer_" * n ++ (if ccVerbose then "localcap" else "cap") + vbleText ~ Str(hashStr(c.binder)).provided(printDebug) ~ Str(idStr).provided(showUniqueIds) + case c: FreshCap => + val idStr = if showUniqueIds then s"#${c.rootId}" else "" + if ccVerbose then s"" + else "cap" + case tp: TypeProxy => + homogenize(tp) match + case tp: SingletonType => toTextRef(tp) + case tp => toText(tp) protected def isOmittablePrefix(sym: Symbol): Boolean = defn.unqualifiedOwnerTypes.exists(_.symbol == sym) || isEmptyPrefix(sym) @@ -536,7 +590,7 @@ class PlainPrinter(_ctx: Context) extends Printer { else if sym.is(Param) then "parameter" else if sym.is(Given) then "given instance" else if (flags.is(Lazy)) "lazy value" - else if (flags.is(Mutable)) "variable" + else if (sym.isMutableVar) "variable" else if (sym.isClassConstructor && sym.isPrimaryConstructor) "primary constructor" else if (sym.isClassConstructor) "constructor" else if (sym.is(Method)) "method" @@ -552,7 +606,7 @@ class PlainPrinter(_ctx: Context) extends Printer { else if (flags.is(Module)) "object" else if (sym.isClass) "class" else if (sym.isType) "type" - else if (flags.is(Mutable)) "var" + else if (sym.isMutableVarOrAccessor) "var" else if (flags.is(Package)) "package" else if (sym.is(Method)) "def" else if (sym.isTerm && !flags.is(Param)) "val" @@ -631,7 +685,7 @@ class PlainPrinter(_ctx: Context) extends Printer { case '"' => "\\\"" case '\'' => "\\\'" case '\\' => "\\\\" - case _ => if ch.isControl then f"${"\\"}u${ch.toInt}%04x" else String.valueOf(ch).nn + case _ => if ch.isControl then f"${"\\"}u${ch.toInt}%04x" else String.valueOf(ch) } def toText(const: Constant): Text = const.tag match { @@ -641,7 +695,7 @@ class PlainPrinter(_ctx: Context) extends Printer { case LongTag => literalText(const.longValue.toString + "L") case DoubleTag => literalText(const.doubleValue.toString + "d") case FloatTag => literalText(const.floatValue.toString + "f") - case _ => literalText(String.valueOf(const.value).nn) + case _ => literalText(String.valueOf(const.value)) } /** Usual target for `Annotation#toText`, overridden in RefinedPrinter */ diff --git a/compiler/src/dotty/tools/dotc/printing/Printer.scala b/compiler/src/dotty/tools/dotc/printing/Printer.scala index 9f485ee84cda..9b37589585f0 100644 --- a/compiler/src/dotty/tools/dotc/printing/Printer.scala +++ b/compiler/src/dotty/tools/dotc/printing/Printer.scala @@ -11,6 +11,7 @@ import typer.Implicits.* import util.SourcePosition import typer.ImportInfo import cc.CaptureSet +import cc.Capabilities.Capability import scala.annotation.internal.sharable @@ -108,7 +109,7 @@ abstract class Printer { def toTextRefinement(rt: RefinedType): Text /** Textual representation of a reference in a capture set */ - def toTextCaptureRef(tp: Type): Text + def toTextCapability(ref: Capability): Text /** Textual representation of a reference in a capture set */ def toTextCaptureSet(cs: CaptureSet): Text diff --git a/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala b/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala index 32115e6bc087..324d4f0c1d23 100644 --- a/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala +++ b/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala @@ -32,6 +32,7 @@ import dotty.tools.dotc.util.SourcePosition import dotty.tools.dotc.ast.untpd.{MemberDef, Modifiers, PackageDef, RefTree, Template, TypeDef, ValOrDefDef} import cc.* import dotty.tools.dotc.parsing.JavaParsers +import dotty.tools.dotc.transform.TreeExtractors.BinaryOp class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { @@ -155,54 +156,70 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { else simpleNameString(tsym) } - private def arrow(isGiven: Boolean, isPure: Boolean): String = + protected def arrow(isGiven: Boolean, isPure: Boolean): String = (if isGiven then "?" else "") + (if isPure then "->" else "=>") - private def toTextFunction(tp: AppliedType, refs: Text = Str("")): Text = + private def toTextFunction(tp: AppliedType, refs: GeneralCaptureSet | Null): Text = val AppliedType(tycon, args) = (tp: @unchecked) val tsym = tycon.typeSymbol - val isGiven = tsym.name.isContextFunction - val capturesRoot = refs == rootSetText - val isPure = - Feature.pureFunsEnabled && !tsym.name.isImpureFunction && !capturesRoot - changePrec(GlobalPrec) { - val argStr: Text = - if args.length == 2 - && !defn.isDirectTupleNType(args.head) - && !isGiven - then - atPrec(InfixPrec) { argText(args.head) } - else + toTextFunction(args.init, args.last, tp, refs, + isContextual = tsym.name.isContextFunction, + isPure = Feature.pureFunsEnabled && !tsym.name.isImpureFunction) + + protected def funMiddleText(isContextual: Boolean, isPure: Boolean, refs: GeneralCaptureSet | Null): Text = + val (printPure, refsText) = + if refs == null then (isPure, Str("")) + else if isUniversalCaptureSet(refs) then (false, Str("")) + else (isPure, toTextGeneralCaptureSet(refs)) + arrow(isContextual, printPure) ~ refsText + + private def toTextFunction(args: List[Type], res: Type, fn: MethodType | AppliedType, + refs: GeneralCaptureSet | Null, isContextual: Boolean, isPure: Boolean): Text = + changePrec(GlobalPrec): + val argStr: Text = args match + case arg :: Nil if !defn.isDirectTupleNType(arg) && !isContextual => + atPrec(InfixPrec): + argText(arg) + case _=> "(" - ~ argsText(args.init) + ~ argsText(args) ~ ")" - argStr - ~ " " ~ arrow(isGiven, isPure) - ~ (refs provided !capturesRoot) - ~ " " ~ argText(args.last) - } - - protected def toTextMethodAsFunction(info: Type, isPure: Boolean, refs: Text = Str("")): Text = info match - case info: MethodType => - val capturesRoot = refs == rootSetText - changePrec(GlobalPrec) { - "(" - ~ paramsText(info) - ~ ") " - ~ arrow(info.isImplicitMethod, isPure && !capturesRoot) - ~ (refs provided !capturesRoot) - ~ " " - ~ toTextMethodAsFunction(info.resultType, isPure) - } - case info: PolyType => - changePrec(GlobalPrec) { - "[" - ~ paramsText(info) - ~ "] => " - ~ toTextMethodAsFunction(info.resultType, isPure) - } - case _ => - toText(info) + argStr ~ " " + ~ funMiddleText(isContextual, isPure, refs) ~ " " + ~ fn.match + case fn: MethodType => CCState.inNewExistentialScope(fn)(argText(res)) + case _ => argText(res) + + protected def toTextMethodAsFunction(info: Type, isPure: Boolean, refs: GeneralCaptureSet | Null): Text = + def recur(tp: Type, enclInfo: MethodType | Null): Text = tp match + case tp: MethodType => + val isContextual = tp.isImplicitMethod + if cc.isCaptureCheckingOrSetup + && tp.allParamNamesSynthetic + && !tp.looksResultDependent && !tp.looksParamDependent + && !showUniqueIds && !printDebug && !ccVerbose + then + // cc.Setup converts all functions to dependent functions. Undo that when printing. + toTextFunction(tp.paramInfos, tp.resType, tp, refs, isContextual, isPure) + else + changePrec(GlobalPrec): + "(" + ~ paramsText(tp) + ~ ") " + ~ funMiddleText(isContextual, isPure, refs) + ~ " " + ~ recur(tp.resultType, tp) + case tp: PolyType => + changePrec(GlobalPrec) { + "[" + ~ paramsText(tp) + ~ "] => " + ~ recur(tp.resultType, enclInfo) + } + case _ => + if enclInfo != null then CCState.inNewExistentialScope(enclInfo)(toText(tp)) + else toText(tp) + recur(info, null) override def toText(tp: Type): Text = controlled { def toTextTuple(args: List[Type]): Text = @@ -261,7 +278,7 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { case _ => val tsym = tycon.typeSymbol if tycon.isRepeatedParam then toTextLocal(args.head) ~ "*" - else if defn.isFunctionSymbol(tsym) then toTextFunction(tp) + else if defn.isFunctionSymbol(tsym) then toTextFunction(tp, null) else if isInfixType(tp) then val l :: r :: Nil = args: @unchecked val opName = tyconName(tycon) @@ -286,15 +303,12 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { if !printDebug && appliedText(tp.asInstanceOf[HKLambda].resType).isEmpty => // don't eta contract if the application would be printed specially toText(tycon) - case Existential(boundVar, unpacked) - if !printDebug && !ctx.settings.YccDebug.value && !unpacked.existsPart(_ == boundVar) => - toText(unpacked) case tp: RefinedType if defn.isFunctionType(tp) && !printDebug => toTextMethodAsFunction(tp.refinedInfo, isPure = Feature.pureFunsEnabled && !tp.typeSymbol.name.isImpureFunction, refs = tp.parent match - case CapturingType(_, cs) => toTextCaptureSet(cs) - case _ => "") + case CapturingType(_, cs) => cs + case _ => null) case tp: TypeRef => if (tp.symbol.isAnonymousClass && !showUniqueIds) toText(tp.info) @@ -310,7 +324,7 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { case tp: ClassInfo => if tp.cls.derivesFrom(defn.PolyFunctionClass) then tp.member(nme.apply).info match - case info: PolyType => toTextMethodAsFunction(info, isPure = false) + case info: PolyType => toTextMethodAsFunction(info, isPure = false, refs = null) case _ => toTextParents(tp.parents) ~~ "{...}" else toTextParents(tp.parents) ~~ "{...}" case JavaArrayType(elemtp) => @@ -337,8 +351,6 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { "?" ~ (("(ignored: " ~ toText(ignored) ~ ")") provided printDebug) case tp @ PolyProto(targs, resType) => "[applied to [" ~ toTextGlobal(targs, ", ") ~ "] returning " ~ toText(resType) - case ReachCapability(_) | MaybeCapability(_) => - toTextCaptureRef(tp) case _ => super.toText(tp) } @@ -379,6 +391,10 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { def optDotPrefix(tree: This) = optText(tree.qual)(_ ~ ".") provided !isLocalThis(tree) + /** Should a binary operation with this operator be printed infix? */ + def isInfix(op: Symbol) = + op.exists && (op.isDeclaredInfix || op.name.isOperatorName) + def caseBlockText(tree: Tree): Text = tree match { case Block(stats, expr) => toText(stats :+ expr, "\n") case expr => toText(expr) @@ -470,6 +486,13 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { optDotPrefix(tree) ~ keywordStr("this") ~ idText(tree) case Super(qual: This, mix) => optDotPrefix(qual) ~ keywordStr("super") ~ optText(mix)("[" ~ _ ~ "]") + case BinaryOp(l, op, r) if isInfix(op) => + val isRightAssoc = op.name.endsWith(":") + val opPrec = parsing.precedence(op.name) + val leftPrec = if isRightAssoc then opPrec + 1 else opPrec + val rightPrec = if !isRightAssoc then opPrec + 1 else opPrec + changePrec(opPrec): + atPrec(leftPrec)(toText(l)) ~ " " ~ toText(op.name) ~ " " ~ atPrec(rightPrec)(toText(r)) case app @ Apply(fun, args) => if (fun.hasType && fun.symbol == defn.throwMethod) changePrec (GlobalPrec) { @@ -496,7 +519,7 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { } } case Typed(expr, tpt) => - changePrec(InfixPrec) { + changePrec(DotPrec) { if isWildcardStarArg(tree) then expr match case Ident(nme.WILDCARD_STAR) => @@ -679,9 +702,6 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { && Feature.ccEnabled && !printDebug && Phases.checkCapturesPhase.exists // might be missing on -Ytest-pickler then toTextRetainsAnnot - else if annot.symbol.enclosingClass == defn.IntoAnnot && !printDebug then - atPrec(GlobalPrec): - Str("into ") ~ toText(arg) else toTextAnnot case EmptyTree => "" @@ -744,6 +764,8 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { case PostfixOp(l, op) => if op.name == nme.CC_REACH then changePrec(DotPrec) { toText(l) ~ "*" } + else if op.name == nme.CC_READONLY then + changePrec(DotPrec) { toText(l) ~ ".rd" } else changePrec(InfixPrec) { toText(l) ~ " " ~ toText(op) } case PrefixOp(op, r) => @@ -826,13 +848,13 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { } } - override protected def toTextCapturing(tp: Type, refsText: Text, boxText: Text): Text = tp match + override protected def toTextCapturing(tp: Type, refs: GeneralCaptureSet, boxText: Text): Text = tp match case tp: AppliedType if defn.isFunctionSymbol(tp.typeSymbol) && !printDebug => - boxText ~ toTextFunction(tp, refsText) + boxText ~ toTextFunction(tp, refs) case tp: RefinedType if defn.isFunctionType(tp) && !printDebug => - boxText ~ toTextMethodAsFunction(tp.refinedInfo, isPure = !tp.typeSymbol.name.isImpureFunction, refsText) + boxText ~ toTextMethodAsFunction(tp.refinedInfo, isPure = !tp.typeSymbol.name.isImpureFunction, refs) case _ => - super.toTextCapturing(tp, refsText, boxText) + super.toTextCapturing(tp, refs, boxText) override def toText[T <: Untyped](tree: Tree[T]): Text = controlled { import untpd.* @@ -938,7 +960,7 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { tree.hasType && tree.symbol.exists && ctx.settings.YprintSyms.value protected def nameIdText[T <: Untyped](tree: NameTree[T]): Text = - if (tree.hasType && tree.symbol.exists) { + if (tree.hasType && tree.symbol.exists && tree.symbol.isType == tree.name.isTypeName) { val str = nameString(tree.symbol) tree match { case tree: RefTree => withPos(str, tree.sourcePos) diff --git a/compiler/src/dotty/tools/dotc/printing/Showable.scala b/compiler/src/dotty/tools/dotc/printing/Showable.scala index 4480aa9c76a4..490778a7413a 100644 --- a/compiler/src/dotty/tools/dotc/printing/Showable.scala +++ b/compiler/src/dotty/tools/dotc/printing/Showable.scala @@ -25,7 +25,7 @@ trait Showable extends Any { /** The string representation with each line after the first one indented * by the given given margin (in spaces). */ - def showIndented(margin: Int)(using Context): String = show.replace("\n", "\n" + " " * margin).nn + def showIndented(margin: Int)(using Context): String = show.replace("\n", "\n" + " " * margin) /** The summarized string representation of this showable element. * Recursion depth is limited to some smallish value. Default is diff --git a/compiler/src/dotty/tools/dotc/printing/Texts.scala b/compiler/src/dotty/tools/dotc/printing/Texts.scala index 475e2c6900d5..203e3fd58155 100644 --- a/compiler/src/dotty/tools/dotc/printing/Texts.scala +++ b/compiler/src/dotty/tools/dotc/printing/Texts.scala @@ -5,7 +5,7 @@ import scala.annotation.internal.sharable object Texts { @sharable - private val ansi = java.util.regex.Pattern.compile("\u001b\\[\\d+m").nn + private val ansi = java.util.regex.Pattern.compile("\u001b\\[\\d+m") sealed abstract class Text { @@ -74,7 +74,7 @@ object Texts { else appendIndented(that)(width) private def lengthWithoutAnsi(str: String): Int = - ansi.matcher(str).nn.replaceAll("").nn.length + ansi.matcher(str).replaceAll("").length def layout(width: Int): Text = this match { case Str(s, _) => @@ -137,7 +137,7 @@ object Texts { case _ => relems.foldLeft(-1)((acc, relem) => acc max relem.maxLine) } - def mkString(width: Int, withLineNumbers: Boolean): String = { + def mkString(width: Int = Int.MaxValue, withLineNumbers: Boolean = false): String = { val sb = new StringBuilder val numberWidth = if (withLineNumbers) (2 * maxLine.toString.length) + 2 else 0 layout(width - numberWidth).print(sb, numberWidth) diff --git a/compiler/src/dotty/tools/dotc/quoted/Interpreter.scala b/compiler/src/dotty/tools/dotc/quoted/Interpreter.scala index e34d35065476..dbc2694dc891 100644 --- a/compiler/src/dotty/tools/dotc/quoted/Interpreter.scala +++ b/compiler/src/dotty/tools/dotc/quoted/Interpreter.scala @@ -1,8 +1,6 @@ package dotty.tools.dotc package quoted -import scala.language.unsafeNulls - import scala.collection.mutable import scala.reflect.ClassTag diff --git a/compiler/src/dotty/tools/dotc/report.scala b/compiler/src/dotty/tools/dotc/report.scala index 7f1eeb8e22eb..61bcc9c8d780 100644 --- a/compiler/src/dotty/tools/dotc/report.scala +++ b/compiler/src/dotty/tools/dotc/report.scala @@ -21,7 +21,7 @@ object report: ctx.reporter.report(warning) def deprecationWarning(msg: Message, pos: SrcPos, origin: String = "")(using Context): Unit = - issueWarning(new DeprecationWarning(msg, pos.sourcePos, origin)) + issueWarning(DeprecationWarning(msg, addInlineds(pos), origin)) def migrationWarning(msg: Message, pos: SrcPos)(using Context): Unit = issueWarning(new MigrationWarning(msg, pos.sourcePos)) @@ -84,8 +84,8 @@ object report: def bestEffortError(ex: Throwable, msg: String)(using Context): Unit = val stackTrace = Option(ex.getStackTrace()).map { st => - if st.nn.isEmpty then "" - else s"Stack trace: \n ${st.nn.mkString("\n ")}".stripMargin + if st.isEmpty then "" + else s"Stack trace: \n ${st.mkString("\n ")}".stripMargin }.getOrElse("") // Build tools and dotty's test framework may check precisely for // "Unsuccessful best-effort compilation." error text. @@ -138,7 +138,9 @@ object report: private def addInlineds(pos: SrcPos)(using Context): SourcePosition = def recur(pos: SourcePosition, inlineds: List[Trees.Tree[?]]): SourcePosition = inlineds match - case inlined :: inlineds1 => pos.withOuter(recur(inlined.sourcePos, inlineds1)) + case inlined :: inlineds => + val outer = recur(inlined.sourcePos, inlineds) + pos.withOuter(outer) case Nil => pos recur(pos.sourcePos, tpd.enclosingInlineds) diff --git a/compiler/src/dotty/tools/dotc/reporting/ErrorMessageID.scala b/compiler/src/dotty/tools/dotc/reporting/ErrorMessageID.scala index 8f8f4676f43b..035f1fa1ab48 100644 --- a/compiler/src/dotty/tools/dotc/reporting/ErrorMessageID.scala +++ b/compiler/src/dotty/tools/dotc/reporting/ErrorMessageID.scala @@ -222,6 +222,12 @@ enum ErrorMessageID(val isActive: Boolean = true) extends java.lang.Enum[ErrorMe case EnumMayNotBeValueClassesID // errorNumber: 206 case IllegalUnrollPlacementID // errorNumber: 207 case ExtensionHasDefaultID // errorNumber: 208 + case FormatInterpolationErrorID // errorNumber: 209 + case ValueClassCannotExtendAliasOfAnyValID // errorNumber: 210 + case MatchIsNotPartialFunctionID // errorNumber: 211 + case OnlyFullyDependentAppliedConstructorTypeID // errorNumber: 212 + case PointlessAppliedConstructorTypeID // errorNumber: 213 + case IllegalContextBoundsID // errorNumber: 214 def errorNumber = ordinal - 1 diff --git a/compiler/src/dotty/tools/dotc/reporting/Message.scala b/compiler/src/dotty/tools/dotc/reporting/Message.scala index 1ac5c6ecf407..1e313ca749d3 100644 --- a/compiler/src/dotty/tools/dotc/reporting/Message.scala +++ b/compiler/src/dotty/tools/dotc/reporting/Message.scala @@ -8,6 +8,8 @@ import printing.{RefinedPrinter, MessageLimiter, ErrorMessageLimiter} import printing.Texts.Text import printing.Formatting.hl import config.SourceVersion +import cc.CaptureSet +import cc.Capabilities.* import scala.language.unsafeNulls import scala.annotation.threadUnsafe @@ -40,7 +42,18 @@ object Message: i"\n$what can be rewritten automatically under -rewrite $optionStr." else "" - private type Recorded = Symbol | ParamRef | SkolemType + enum Disambiguation: + case All + case AllExcept(strs: List[String]) + case None + + def recordOK(str: String): Boolean = this match + case All => true + case AllExcept(strs) => !strs.contains(str) + case None => false + end Disambiguation + + private type Recorded = Symbol | ParamRef | SkolemType | RootCapability private case class SeenKey(str: String, isType: Boolean) @@ -48,7 +61,7 @@ object Message: * adds superscripts for disambiguations, and can explain recorded symbols * in ` where` clause */ - private class Seen(disambiguate: Boolean): + private class Seen(disambiguate: Disambiguation): /** The set of lambdas that were opened at some point during printing. */ private val openedLambdas = new collection.mutable.HashSet[LambdaType] @@ -62,12 +75,14 @@ object Message: var nonSensical = false /** If false, stop all recordings */ - private var recordOK = disambiguate + private var disambi = disambiguate + + def isActive = disambi != Disambiguation.None /** Clear all entries and stop further entries to be added */ def disable() = seen.clear() - recordOK = false + disambi = Disambiguation.None /** Record an entry `entry` with given String representation `str` and a * type/term namespace identified by `isType`. @@ -76,63 +91,65 @@ object Message: * and following recordings get consecutive superscripts starting with 2. * @return The possibly superscripted version of `str`. */ - def record(str: String, isType: Boolean, entry: Recorded)(using Context): String = if !recordOK then str else - //println(s"recording $str, $isType, $entry") - - /** If `e1` is an alias of another class of the same name, return the other - * class symbol instead. This normalization avoids recording e.g. scala.List - * and scala.collection.immutable.List as two different types - */ - def followAlias(e1: Recorded): Recorded = e1 match { - case e1: Symbol if e1.isAliasType => - val underlying = e1.typeRef.underlyingClassRef(refinementOK = false).typeSymbol - if (underlying.name == e1.name) underlying else e1.namedType.dealias.typeSymbol - case _ => e1 - } - val key = SeenKey(str, isType) - val existing = seen(key) - lazy val dealiased = followAlias(entry) - - /** All lambda parameters with the same name are given the same superscript as - * long as their corresponding binder has been printed. - * See tests/neg/lambda-rename.scala for test cases. - */ - def sameSuperscript(cur: Recorded, existing: Recorded) = - (cur eq existing) || - (cur, existing).match - case (cur: ParamRef, existing: ParamRef) => - (cur.paramName eq existing.paramName) && - openedLambdas.contains(cur.binder) && - openedLambdas.contains(existing.binder) - case _ => - false - - // The length of alts corresponds to the number of superscripts we need to print. - var alts = existing.dropWhile(alt => !sameSuperscript(dealiased, followAlias(alt))) - if alts.isEmpty then - alts = entry :: existing - seen(key) = alts - - val suffix = alts.length match { - case 1 => "" - case n => n.toString.toCharArray.map { - case '0' => '⁰' - case '1' => '¹' - case '2' => '²' - case '3' => '³' - case '4' => '⁴' - case '5' => '⁵' - case '6' => '⁶' - case '7' => '⁷' - case '8' => '⁸' - case '9' => '⁹' - }.mkString - } - str + suffix + def record(str: String, isType: Boolean, entry: Recorded)(using Context): String = + if disambi.recordOK(str) then + //println(s"recording $str, $isType, $entry") + + /** If `e1` is an alias of another class of the same name, return the other + * class symbol instead. This normalization avoids recording e.g. scala.List + * and scala.collection.immutable.List as two different types + */ + def followAlias(e1: Recorded): Recorded = e1 match { + case e1: Symbol if e1.isAliasType => + val underlying = e1.typeRef.underlyingClassRef(refinementOK = false).typeSymbol + if (underlying.name == e1.name) underlying else e1.namedType.dealias.typeSymbol + case _ => e1 + } + val key = SeenKey(str, isType) + val existing = seen(key) + lazy val dealiased = followAlias(entry) + + /** All lambda parameters with the same name are given the same superscript as + * long as their corresponding binder has been printed. + * See tests/neg/lambda-rename.scala for test cases. + */ + def sameSuperscript(cur: Recorded, existing: Recorded) = + (cur eq existing) || + (cur, existing).match + case (cur: ParamRef, existing: ParamRef) => + (cur.paramName eq existing.paramName) && + openedLambdas.contains(cur.binder) && + openedLambdas.contains(existing.binder) + case _ => + false + + // The length of alts corresponds to the number of superscripts we need to print. + var alts = existing.dropWhile(alt => !sameSuperscript(dealiased, followAlias(alt))) + if alts.isEmpty then + alts = entry :: existing + seen(key) = alts + + val suffix = alts.length match { + case 1 => "" + case n => n.toString.toCharArray.map { + case '0' => '⁰' + case '1' => '¹' + case '2' => '²' + case '3' => '³' + case '4' => '⁴' + case '5' => '⁵' + case '6' => '⁶' + case '7' => '⁷' + case '8' => '⁸' + case '9' => '⁹' + }.mkString + } + str + suffix + else str end record /** Create explanation for single `Recorded` type or symbol */ - private def explanation(entry: AnyRef)(using Context): String = + private def explanation(entry: AnyRef, key: String)(using Context): String = def boundStr(bound: Type, default: ClassSymbol, cmp: String) = if (bound.isRef(default)) "" else i"$cmp $bound" @@ -152,7 +169,7 @@ object Message: "" } - entry match { + entry match case param: TypeParamRef => s"is a type variable${addendum("constraint", TypeComparer.bounds(param))}" case param: TermParamRef => @@ -166,7 +183,11 @@ object Message: s"is a ${ctx.printer.kindString(sym)}${sym.showExtendedLocation}${addendum("bounds", info)}" case tp: SkolemType => s"is an unknown value of type ${tp.widen.show}" - } + case ref: RootCapability => + val relation = + if List("^", "=>", "?=>").exists(key.startsWith) then "refers to" + else "is" + s"$relation ${ref.descr}" end explanation /** Produce a where clause with explanations for recorded iterms. @@ -177,6 +198,7 @@ object Message: case param: ParamRef => false case skolem: SkolemType => true case sym: Symbol => ctx.gadt.contains(sym) && ctx.gadt.fullBounds(sym) != TypeBounds.empty + case ref: Capability => ref.isTerminalCapability } val toExplain: List[(String, Recorded)] = seen.toList.flatMap { kvs => @@ -201,7 +223,7 @@ object Message: } } - val explainParts = toExplain.map { case (str, entry) => (str, explanation(entry)) } + val explainParts = toExplain.map { case (str, entry) => (str, explanation(entry, str)) } val explainLines = columnar(explainParts) if (explainLines.isEmpty) "" else i"where: $explainLines%\n %\n" end explanations @@ -225,7 +247,27 @@ object Message: case tp: SkolemType => seen.record(tp.repr.toString, isType = true, tp) case _ => super.toTextRef(tp) - override def toTextMethodAsFunction(info: Type, isPure: Boolean, refs: Text): Text = + override def toTextCapability(c: Capability): Text = c match + case c: RootCapability if seen.isActive => seen.record("cap", isType = false, c) + case _ => super.toTextCapability(c) + + override def toTextCapturing(parent: Type, refs: GeneralCaptureSet, boxText: Text) = refs match + case refs: CaptureSet + if isUniversalCaptureSet(refs) && !defn.isFunctionType(parent) && !printDebug && seen.isActive => + boxText + ~ toTextLocal(parent) + ~ seen.record("^", isType = true, refs.elems.nth(0).asInstanceOf[RootCapability]) + case _ => + super.toTextCapturing(parent, refs, boxText) + + override def funMiddleText(isContextual: Boolean, isPure: Boolean, refs: GeneralCaptureSet | Null): Text = + refs match + case refs: CaptureSet if isUniversalCaptureSet(refs) && seen.isActive => + seen.record(arrow(isContextual, isPure = false), isType = true, refs.elems.nth(0).asInstanceOf[RootCapability]) + case _ => + super.funMiddleText(isContextual, isPure, refs) + + override def toTextMethodAsFunction(info: Type, isPure: Boolean, refs: GeneralCaptureSet): Text = info match case info: LambdaType => seen.openLambda(info) @@ -348,13 +390,15 @@ abstract class Message(val errorId: ErrorMessageID)(using Context) { self => */ def isNonSensical: Boolean = { message; myIsNonSensical } - private var disambiguate: Boolean = true + private var disambiguate: Disambiguation = Disambiguation.All - def withoutDisambiguation(): this.type = - disambiguate = false + def withDisambiguation(disambi: Disambiguation): this.type = + disambiguate = disambi this - private def inMessageContext(disambiguate: Boolean)(op: Context ?=> String): String = + def withoutDisambiguation(): this.type = withDisambiguation(Disambiguation.None) + + private def inMessageContext(disambiguate: Disambiguation)(op: Context ?=> String): String = if ctx eq NoContext then op else val msgContext = ctx.printer match @@ -373,7 +417,7 @@ abstract class Message(val errorId: ErrorMessageID)(using Context) { self => /** The explanation to report. tags are filtered out */ @threadUnsafe lazy val explanation: String = - inMessageContext(disambiguate = false)(explain) + inMessageContext(disambiguate = Disambiguation.None)(explain) /** The implicit `Context` in messages is a large thing that we don't want * persisted. This method gets around that by duplicating the message, @@ -424,12 +468,17 @@ trait NoDisambiguation extends Message: withoutDisambiguation() /** The fallback `Message` containing no explanation and having no `kind` */ -final class NoExplanation(msgFn: Context ?=> String)(using Context) extends Message(ErrorMessageID.NoExplanationID) { +final class NoExplanation(msgFn: Context ?=> String, actions: List[CodeAction] = List.empty)(using Context) extends Message(ErrorMessageID.NoExplanationID) { def msg(using Context): String = msgFn def explain(using Context): String = "" val kind: MessageKind = MessageKind.NoKind + override def actions(using Context): List[CodeAction] = actions + override def toString(): String = msg + + def withActions(actions: CodeAction*): NoExplanation = + new NoExplanation(msgFn, actions.toList) } /** The extractor for `NoExplanation` can be used to check whether any error diff --git a/compiler/src/dotty/tools/dotc/reporting/MessageKind.scala b/compiler/src/dotty/tools/dotc/reporting/MessageKind.scala index bb02a08d2e46..e09dd1d6e69e 100644 --- a/compiler/src/dotty/tools/dotc/reporting/MessageKind.scala +++ b/compiler/src/dotty/tools/dotc/reporting/MessageKind.scala @@ -23,6 +23,7 @@ enum MessageKind: case PotentialIssue case UnusedSymbol case Staging + case Interpolation /** Human readable message that will end up being shown to the user. * NOTE: This is only used in the situation where you have multiple words diff --git a/compiler/src/dotty/tools/dotc/reporting/messages.scala b/compiler/src/dotty/tools/dotc/reporting/messages.scala index b5d67f0808b2..7e9cff590b69 100644 --- a/compiler/src/dotty/tools/dotc/reporting/messages.scala +++ b/compiler/src/dotty/tools/dotc/reporting/messages.scala @@ -23,7 +23,7 @@ import typer.Implicits.* import typer.Inferencing import scala.util.control.NonFatal import StdNames.nme -import printing.Formatting.hl +import Formatting.{hl, delay} import ast.Trees.* import ast.untpd import ast.tpd @@ -37,6 +37,7 @@ import scala.jdk.CollectionConverters.* import dotty.tools.dotc.util.SourceFile import dotty.tools.dotc.config.SourceVersion import DidYouMean.* +import Message.Disambiguation /** Messages * ======== @@ -61,7 +62,7 @@ trait ShowMatchTrace(tps: Type*)(using Context) extends Message: override def msgPostscript(using Context): String = super.msgPostscript ++ matchReductionAddendum(tps*) -abstract class TypeMismatchMsg(found: Type, expected: Type)(errorId: ErrorMessageID)(using Context) +abstract class TypeMismatchMsg(found: Type, val expected: Type)(errorId: ErrorMessageID)(using Context) extends Message(errorId), ShowMatchTrace(found, expected): def kind = MessageKind.TypeMismatch def explain(using Context) = err.whyNoMatchStr(found, expected) @@ -92,7 +93,7 @@ abstract class CyclicMsg(errorId: ErrorMessageID)(using Context) extends Message protected def debugInfo = if ctx.settings.YdebugCyclic.value then - "\n\nStacktrace:" ++ ex.getStackTrace().nn.mkString("\n ", "\n ", "") + "\n\nStacktrace:" ++ ex.getStackTrace().mkString("\n ", "\n ", "") else "\n\n Run with both -explain-cyclic and -Ydebug-cyclic to see full stack trace." protected def context: String = ex.optTrace match @@ -1172,6 +1173,7 @@ class OverrideError( member: Symbol, other: Symbol, memberTp: Type, otherTp: Type)(using Context) extends DeclarationMsg(OverrideErrorID), NoDisambiguation: + withDisambiguation(Disambiguation.AllExcept(List(member.name.toString))) def msg(using Context) = val isConcreteOverAbstract = (other.owner isSubClass member.owner) && other.is(Deferred) && !member.is(Deferred) @@ -1181,8 +1183,8 @@ extends DeclarationMsg(OverrideErrorID), NoDisambiguation: |(Note that ${err.infoStringWithLocation(other, base)} is abstract, |and is therefore overridden by concrete ${err.infoStringWithLocation(member, base)})""" else "" - i"""error overriding ${err.infoStringWithLocation(other, base)}; - | ${err.infoString(member, base, showLocation = member.owner != base.typeSymbol)} $core$addendum""" + i"""error overriding ${delay(err.infoStringWithLocation(other, base))}; + | ${delay(err.infoString(member, base, showLocation = member.owner != base.typeSymbol))} $core$addendum""" override def canExplain = memberTp.exists && otherTp.exists def explain(using Context) = @@ -1694,7 +1696,7 @@ class OnlyClassesCanHaveDeclaredButUndefinedMembers(sym: Symbol)( def msg(using Context) = i"""Declaration of $sym not allowed here: only classes can have declared but undefined members""" def explain(using Context) = - if sym.is(Mutable) then "Note that variables need to be initialized to be defined." + if sym.isMutableVarOrAccessor then "Note that variables need to be initialized to be defined." else "" } @@ -1813,6 +1815,12 @@ class ValueClassParameterMayNotBeCallByName(valueClass: Symbol, param: Symbol)(u def explain(using Context) = "" } +class ValueClassCannotExtendAliasOfAnyVal(valueClass: Symbol, alias: Symbol)(using Context) + extends SyntaxMsg(ValueClassCannotExtendAliasOfAnyValID) { + def msg(using Context) = i"""A value class cannot extend a type alias ($alias) of ${hl("AnyVal")}""" + def explain(using Context) = "" +} + class SuperCallsNotAllowedInlineable(symbol: Symbol)(using Context) extends SyntaxMsg(SuperCallsNotAllowedInlineableID) { def msg(using Context) = i"Super call not allowed in inlineable $symbol" @@ -2421,13 +2429,15 @@ class ClassCannotExtendEnum(cls: Symbol, parent: Symbol)(using Context) extends } class NotAnExtractor(tree: untpd.Tree)(using Context) extends PatternMatchMsg(NotAnExtractorID) { - def msg(using Context) = i"$tree cannot be used as an extractor in a pattern because it lacks an unapply or unapplySeq method" + def msg(using Context) = i"$tree cannot be used as an extractor in a pattern because it lacks an ${hl("unapply")} or ${hl("unapplySeq")} method with the appropriate signature" def explain(using Context) = - i"""|An ${hl("unapply")} method should be defined in an ${hl("object")} as follow: + i"""|An ${hl("unapply")} method should be in an ${hl("object")}, take a single explicit term parameter, and: | - If it is just a test, return a ${hl("Boolean")}. For example ${hl("case even()")} | - If it returns a single sub-value of type T, return an ${hl("Option[T]")} | - If it returns several sub-values T1,...,Tn, group them in an optional tuple ${hl("Option[(T1,...,Tn)]")} | + |Additionaly, ${hl("unapply")} or ${hl("unapplySeq")} methods cannot take type parameters after their explicit term parameter. + | |Sometimes, the number of sub-values isn't fixed and we would like to return a sequence. |For this reason, you can also define patterns through ${hl("unapplySeq")} which returns ${hl("Option[Seq[T]]")}. |This mechanism is used for instance in pattern ${hl("case List(x1, ..., xn)")}""" @@ -3132,7 +3142,7 @@ extends ReferenceMsg(CannotBeAccessedID): case _ => i"none of the overloaded alternatives named $name can" val where = if (ctx.owner.exists) i" from ${ctx.owner.enclosingClass}" else "" - val whyNot = new StringBuffer + val whyNot = new StringBuilder for alt <- alts do val cls = alt.owner.enclosingSubClass val owner = if cls.exists then cls else alt.owner @@ -3140,10 +3150,10 @@ extends ReferenceMsg(CannotBeAccessedID): if alt.is(Protected) then if alt.privateWithin.exists && alt.privateWithin != owner then if owner.is(Final) then alt.privateWithin.showLocated - else alt.privateWithin.showLocated + ", or " + owner.showLocated + " or one of its subclasses" + else s"${alt.privateWithin.showLocated}, or ${owner.showLocated} or one of its subclasses" else if owner.is(Final) then owner.showLocated - else owner.showLocated + " or one of its subclasses" + else s"${owner.showLocated} or one of its subclasses" else alt.privateWithin.orElse(owner).showLocated val accessMod = if alt.is(Protected) then "protected" else "private" @@ -3302,23 +3312,29 @@ extends TypeMsg(ConstructorProxyNotValueID): |are not values themselves, they can only be referred to in selections.""" class UnusedSymbol(errorText: String, val actions: List[CodeAction] = Nil)(using Context) -extends Message(UnusedSymbolID) { +extends Message(UnusedSymbolID): def kind = MessageKind.UnusedSymbol override def msg(using Context) = errorText override def explain(using Context) = "" override def actions(using Context) = this.actions -} object UnusedSymbol: def imports(actions: List[CodeAction])(using Context): UnusedSymbol = UnusedSymbol(i"unused import", actions) def localDefs(using Context): UnusedSymbol = UnusedSymbol(i"unused local definition") - def explicitParams(using Context): UnusedSymbol = UnusedSymbol(i"unused explicit parameter") - def implicitParams(using Context): UnusedSymbol = UnusedSymbol(i"unused implicit parameter") + def explicitParams(sym: Symbol)(using Context): UnusedSymbol = + UnusedSymbol(i"unused explicit parameter${paramAddendum(sym)}") + def implicitParams(sym: Symbol)(using Context): UnusedSymbol = + UnusedSymbol(i"unused implicit parameter${paramAddendum(sym)}") def privateMembers(using Context): UnusedSymbol = UnusedSymbol(i"unused private member") def patVars(using Context): UnusedSymbol = UnusedSymbol(i"unused pattern variable") - def unsetLocals(using Context): UnusedSymbol = UnusedSymbol(i"unset local variable, consider using an immutable val instead") - def unsetPrivates(using Context): UnusedSymbol = UnusedSymbol(i"unset private variable, consider using an immutable val instead") + def unsetLocals(using Context): UnusedSymbol = + UnusedSymbol(i"unset local variable, consider using an immutable val instead") + def unsetPrivates(using Context): UnusedSymbol = + UnusedSymbol(i"unset private variable, consider using an immutable val instead") + private def paramAddendum(sym: Symbol)(using Context): String = + if sym.denot.owner.is(ExtensionMethod) then i" in extension ${sym.denot.owner}" + else "" class NonNamedArgumentInJavaAnnotation(using Context) extends SyntaxMsg(NonNamedArgumentInJavaAnnotationID): @@ -3430,12 +3446,12 @@ extends DeclarationMsg(IllegalUnrollPlacementID): val isCtor = method.isConstructor def what = if isCtor then i"a ${if method.owner.is(Trait) then "trait" else "class"} constructor" else i"method ${method.name}" val prefix = s"Cannot unroll parameters of $what" - if method.is(Deferred) then - i"$prefix: it must not be abstract" + if method.isLocal then + i"$prefix because it is a local method" + else if !method.isEffectivelyFinal then + i"$prefix because it can be overridden" else if isCtor && method.owner.is(Trait) then i"implementation restriction: $prefix" - else if !(isCtor || method.is(Final) || method.owner.is(ModuleClass)) then - i"$prefix: it is not final" else if method.owner.companionClass.is(CaseClass) then i"$prefix of a case class companion object: please annotate the class constructor instead" else @@ -3444,3 +3460,67 @@ extends DeclarationMsg(IllegalUnrollPlacementID): def explain(using Context) = "" end IllegalUnrollPlacement + +class BadFormatInterpolation(errorText: String)(using Context) extends Message(FormatInterpolationErrorID): + def kind = MessageKind.Interpolation + protected def msg(using Context) = errorText + protected def explain(using Context) = "" + +class MatchIsNotPartialFunction(using Context) extends SyntaxMsg(MatchIsNotPartialFunctionID): + protected def msg(using Context) = + "match expression in result of block will not be used to synthesize partial function" + protected def explain(using Context) = + i"""A `PartialFunction` can be synthesized from a function literal if its body is just a pattern match. + | + |For example, `collect` takes a `PartialFunction`. + | (1 to 10).collect(i => i match { case n if n % 2 == 0 => n }) + |is equivalent to using a "pattern-matching anonymous function" directly: + | (1 to 10).collect { case n if n % 2 == 0 => n } + |Compare an operation that requires a `Function1` instead: + | (1 to 10).map { case n if n % 2 == 0 => n case n => n + 1 } + | + |As a convenience, the "selector expression" of the match can be an arbitrary expression: + | List("1", "two", "3").collect(x => Try(x.toInt) match { case Success(i) => i }) + |In this example, `isDefinedAt` evaluates the selector expression and any guard expressions + |in the pattern match in order to report whether an input is in the domain of the function. + | + |However, blocks of statements are not supported by this idiom: + | List("1", "two", "3").collect: x => + | val maybe = Try(x.toInt) // statements preceding the match + | maybe match + | case Success(i) if i % 2 == 0 => i // throws MatchError on cases not covered + | + |This restriction is enforced to simplify the evaluation semantics of the partial function. + |Otherwise, it might not be clear what is computed by `isDefinedAt`. + | + |Efficient operations will use `applyOrElse` to avoid computing the match twice, + |but the `apply` body would be executed "per element" in the example.""" + +final class PointlessAppliedConstructorType(tpt: untpd.Tree, args: List[untpd.Tree], tpe: Type)(using Context) extends TypeMsg(PointlessAppliedConstructorTypeID): + override protected def msg(using Context): String = + val act = i"$tpt(${args.map(_.show).mkString(", ")})" + i"""|Applied constructor type $act has no effect. + |The resulting type of $act is the same as its base type, namely: $tpe""".stripMargin + + override protected def explain(using Context): String = + i"""|Applied constructor types are used to ascribe specialized types of constructor applications. + |To benefit from this feature, the constructor in question has to have a more specific type than the class itself. + | + |If you want to track a precise type of any of the class parameters, make sure to mark the parameter as `tracked`. + |Otherwise, you can safely remove the argument list from the type. + |""" + +final class OnlyFullyDependentAppliedConstructorType()(using Context) + extends TypeMsg(OnlyFullyDependentAppliedConstructorTypeID): + override protected def msg(using Context): String = + i"Applied constructor type can only be used with classes where all parameters in the first parameter list are tracked" + + override protected def explain(using Context): String = "" + +final class IllegalContextBounds(using Context) extends SyntaxMsg(IllegalContextBoundsID): + override protected def msg(using Context): String = + i"Context bounds are not allowed in this position" + + override protected def explain(using Context): String = "" + +end IllegalContextBounds \ No newline at end of file diff --git a/compiler/src/dotty/tools/dotc/sbt/APIUtils.scala b/compiler/src/dotty/tools/dotc/sbt/APIUtils.scala index 07fa2027fbe6..4ee1fd0f6b68 100644 --- a/compiler/src/dotty/tools/dotc/sbt/APIUtils.scala +++ b/compiler/src/dotty/tools/dotc/sbt/APIUtils.scala @@ -1,7 +1,6 @@ package dotty.tools.dotc package sbt -import scala.language.unsafeNulls import core.* import Contexts.* diff --git a/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala b/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala index c303c40485ce..4d915b57df1b 100644 --- a/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala +++ b/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala @@ -418,7 +418,7 @@ private class ExtractAPICollector(nonLocalClassSymbols: mutable.HashSet[Symbol]) apiClass(sym.asClass) } else if (sym.isType) { apiTypeMember(sym.asType) - } else if (sym.is(Mutable, butNot = Accessor)) { + } else if (sym.isMutableVar) { api.Var.of(sym.name.toString, apiAccess(sym), apiModifiers(sym), apiAnnotations(sym, inlineOrigin).toArray, apiType(sym.info)) } else if (sym.isStableMember && !sym.isRealMethod) { diff --git a/compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala b/compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala index 154d50f8ebc2..8b3b217cb0fc 100644 --- a/compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala +++ b/compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala @@ -577,7 +577,7 @@ class DependencyRecorder { clazz } - private var _responsibleForImports: Symbol = uninitialized + private[dotc] var _responsibleForImports: Symbol | Null = null /** Top level import dependencies are registered as coming from a first top level * class/trait/object declared in the compilation unit. If none exists, issue a warning and return NoSymbol. diff --git a/compiler/src/dotty/tools/dotc/sbt/package.scala b/compiler/src/dotty/tools/dotc/sbt/package.scala index 8efa25569325..e835a4b95fee 100644 --- a/compiler/src/dotty/tools/dotc/sbt/package.scala +++ b/compiler/src/dotty/tools/dotc/sbt/package.scala @@ -39,6 +39,6 @@ extension (sym: Symbol) // names in the global chars array. But we would need to restructure // ExtractDependencies caches to avoid expensive `toString` on // each member reference. - termName(sym.owner.fullName.mangledString.replace(".", ";").nn ++ ";init;") + termName(sym.owner.fullName.mangledString.replace(".", ";") ++ ";init;") else sym.name.stripModuleClassSuffix diff --git a/compiler/src/dotty/tools/dotc/semanticdb/Scala3.scala b/compiler/src/dotty/tools/dotc/semanticdb/Scala3.scala index f49b00089712..a20a0d181a55 100644 --- a/compiler/src/dotty/tools/dotc/semanticdb/Scala3.scala +++ b/compiler/src/dotty/tools/dotc/semanticdb/Scala3.scala @@ -420,7 +420,7 @@ object Scala3: else Descriptor.None def unescapeUnicode = - unicodeEscape.replaceAllIn(symbol, m => String.valueOf(Integer.parseInt(m.group(1), 16).toChar).nn) + unicodeEscape.replaceAllIn(symbol, m => String.valueOf(Integer.parseInt(m.group(1), 16).toChar)) def isJavaIdent = symbol.nonEmpty && isJavaIdentifierStart(symbol.head) && symbol.tail.forall(isJavaIdentifierPart) diff --git a/compiler/src/dotty/tools/dotc/semanticdb/TypeOps.scala b/compiler/src/dotty/tools/dotc/semanticdb/TypeOps.scala index 2d98535657a2..11467e216aba 100644 --- a/compiler/src/dotty/tools/dotc/semanticdb/TypeOps.scala +++ b/compiler/src/dotty/tools/dotc/semanticdb/TypeOps.scala @@ -96,7 +96,11 @@ class TypeOps: // We try to find the "actual" binder of : `inner`, // and register them to the symbol table with `(, inner) -> ` // instead of `("y", outer) -> ` - if lam.paramNames.contains(sym.name) then + // We must also check for parameter shadowing such as def shadowParam(x: Int) = {val x = true} + // We skip param symbol check if owner is not a LambdaType for proper MatchType paramRef entry in the paramRefSymtab + // for more information: https://github.com/scala/scala3/pull/23161#discussion_r2097755983 + + if (sym.is(Flags.Param) || !sym.owner.info.isInstanceOf[LambdaType]) && lam.paramNames.contains(sym.name) then paramRefSymtab((lam, sym.name)) = sym else enterParamRef(lam.resType) diff --git a/compiler/src/dotty/tools/dotc/transform/CapturedVars.scala b/compiler/src/dotty/tools/dotc/transform/CapturedVars.scala index c1725cbd0255..7263bce0478c 100644 --- a/compiler/src/dotty/tools/dotc/transform/CapturedVars.scala +++ b/compiler/src/dotty/tools/dotc/transform/CapturedVars.scala @@ -120,7 +120,7 @@ object CapturedVars: def traverse(tree: Tree)(using Context) = tree match case id: Ident => val sym = id.symbol - if sym.is(Mutable, butNot = Method) && sym.owner.isTerm then + if sym.isMutableVar && sym.owner.isTerm then val enclMeth = ctx.owner.enclosingMethod if sym.enclosingMethod != enclMeth then report.log(i"capturing $sym in ${sym.enclosingMethod}, referenced from $enclMeth") diff --git a/compiler/src/dotty/tools/dotc/transform/CheckReentrant.scala b/compiler/src/dotty/tools/dotc/transform/CheckReentrant.scala index e8a402068bfc..5f52ac82879a 100644 --- a/compiler/src/dotty/tools/dotc/transform/CheckReentrant.scala +++ b/compiler/src/dotty/tools/dotc/transform/CheckReentrant.scala @@ -65,7 +65,7 @@ class CheckReentrant extends MiniPhase { scanning(cls) { for (sym <- cls.classInfo.decls) if (sym.isTerm && !sym.isSetter && !isIgnored(sym)) - if (sym.is(Mutable)) { + if (sym.isMutableVarOrAccessor) { report.error( em"""possible data race involving globally reachable ${sym.showLocated}: ${sym.info} | use -Ylog:checkReentrant+ to find out more about why the variable is reachable.""") diff --git a/compiler/src/dotty/tools/dotc/transform/CheckShadowing.scala b/compiler/src/dotty/tools/dotc/transform/CheckShadowing.scala index 3adb3ab0ce7d..e6fe64fe7b62 100644 --- a/compiler/src/dotty/tools/dotc/transform/CheckShadowing.scala +++ b/compiler/src/dotty/tools/dotc/transform/CheckShadowing.scala @@ -18,7 +18,6 @@ import dotty.tools.dotc.core.Types.NoType import dotty.tools.dotc.core.Types.Type import dotty.tools.dotc.core.Types import dotty.tools.dotc.semanticdb.TypeOps -import dotty.tools.dotc.cc.boxedCaptureSet import dotty.tools.dotc.core.Symbols.{NoSymbol, isParamOrAccessor} import scala.collection.mutable import dotty.tools.dotc.core.Scopes.Scope diff --git a/compiler/src/dotty/tools/dotc/transform/CheckStatic.scala b/compiler/src/dotty/tools/dotc/transform/CheckStatic.scala index 6c74f302b65d..957fd78e9c2c 100644 --- a/compiler/src/dotty/tools/dotc/transform/CheckStatic.scala +++ b/compiler/src/dotty/tools/dotc/transform/CheckStatic.scala @@ -52,7 +52,7 @@ class CheckStatic extends MiniPhase { report.error(MissingCompanionForStatic(defn.symbol), defn.srcPos) else if (clashes.exists) report.error(MemberWithSameNameAsStatic(), defn.srcPos) - else if (defn.symbol.is(Flags.Mutable) && companion.is(Flags.Trait)) + else if (defn.symbol.isMutableVarOrAccessor && companion.is(Flags.Trait)) report.error(TraitCompanionWithMutableStatic(), defn.srcPos) else if (defn.symbol.is(Flags.Lazy)) report.error(LazyStaticField(), defn.srcPos) diff --git a/compiler/src/dotty/tools/dotc/transform/CheckUnused.scala b/compiler/src/dotty/tools/dotc/transform/CheckUnused.scala index 24763abc21b4..ababe3f94479 100644 --- a/compiler/src/dotty/tools/dotc/transform/CheckUnused.scala +++ b/compiler/src/dotty/tools/dotc/transform/CheckUnused.scala @@ -52,6 +52,7 @@ class CheckUnused private (phaseMode: PhaseMode, suffix: String) extends MiniPha tree override def transformIdent(tree: Ident)(using Context): tree.type = + refInfos.isAssignment = tree.hasAttachment(AssignmentTarget) if tree.symbol.exists then // if in an inline expansion, resolve at summonInline (synthetic pos) or in an enclosing call site val resolving = @@ -68,10 +69,12 @@ class CheckUnused private (phaseMode: PhaseMode, suffix: String) extends MiniPha resolveUsage(tree.symbol, tree.name, tree.typeOpt.importPrefix.skipPackageObject) else if tree.hasType then resolveUsage(tree.tpe.classSymbol, tree.name, tree.tpe.importPrefix.skipPackageObject) + refInfos.isAssignment = false tree // import x.y; y may be rewritten x.y, also import x.z as y override def transformSelect(tree: Select)(using Context): tree.type = + refInfos.isAssignment = tree.hasAttachment(AssignmentTarget) val name = tree.removeAttachment(OriginalName).getOrElse(nme.NO_NAME) inline def isImportable = tree.qualifier.srcPos.isSynthetic && tree.qualifier.tpe.match @@ -92,6 +95,7 @@ class CheckUnused private (phaseMode: PhaseMode, suffix: String) extends MiniPha resolveUsage(tree.symbol, name, tree.qualifier.tpe) else if !ignoreTree(tree) then refUsage(tree.symbol) + refInfos.isAssignment = false tree override def transformLiteral(tree: Literal)(using Context): tree.type = @@ -113,13 +117,10 @@ class CheckUnused private (phaseMode: PhaseMode, suffix: String) extends MiniPha ctx override def prepareForAssign(tree: Assign)(using Context): Context = - tree.lhs.putAttachment(Ignore, ()) // don't take LHS reference as a read + tree.lhs.putAttachment(AssignmentTarget, ()) // don't take LHS reference as a read ctx override def transformAssign(tree: Assign)(using Context): tree.type = - tree.lhs.removeAttachment(Ignore) - val sym = tree.lhs.symbol - if sym.exists then - refInfos.asss.addOne(sym) + tree.lhs.removeAttachment(AssignmentTarget) tree override def prepareForMatch(tree: Match)(using Context): Context = @@ -145,14 +146,28 @@ class CheckUnused private (phaseMode: PhaseMode, suffix: String) extends MiniPha refInfos.inlined.push(tree.call.srcPos) ctx override def transformInlined(tree: Inlined)(using Context): tree.type = - transformAllDeep(tree.expansion) // traverse expansion with nonempty inlined stack to avoid registering defs + //transformAllDeep(tree.expansion) // traverse expansion with nonempty inlined stack to avoid registering defs val _ = refInfos.inlined.pop() - transformAllDeep(tree.call) + if !tree.call.isEmpty && phaseMode.eq(PhaseMode.Aggregate) then + transformAllDeep(tree.call) tree override def prepareForBind(tree: Bind)(using Context): Context = refInfos.register(tree) ctx + /* cf QuotePattern + override def transformBind(tree: Bind)(using Context): tree.type = + tree.symbol.info match + case TypeBounds(lo, hi) => + def resolve(tpe: Type): Unit = + val sym = tpe.typeSymbol + if sym.exists then + resolveUsage(sym, sym.name, NoPrefix) + resolve(lo) + resolve(hi) + case _ => + tree + */ override def prepareForValDef(tree: ValDef)(using Context): Context = if !tree.symbol.is(Deferred) && tree.rhs.symbol != defn.Predef_undefined then @@ -201,15 +216,6 @@ class CheckUnused private (phaseMode: PhaseMode, suffix: String) extends MiniPha refInfos.register(tree) tree - override def prepareForTemplate(tree: Template)(using Context): Context = - ctx.fresh.setProperty(resolvedKey, Resolved()) - - override def prepareForPackageDef(tree: PackageDef)(using Context): Context = - ctx.fresh.setProperty(resolvedKey, Resolved()) - - override def prepareForStats(trees: List[Tree])(using Context): Context = - ctx.fresh.setProperty(resolvedKey, Resolved()) - override def transformOther(tree: Tree)(using Context): tree.type = tree match case imp: Import => @@ -221,6 +227,8 @@ class CheckUnused private (phaseMode: PhaseMode, suffix: String) extends MiniPha selector.bound match case untpd.TypedSplice(bound) => transformAllDeep(bound) case _ => + case exp: Export => + transformAllDeep(exp.expr) case AppliedTypeTree(tpt, args) => transformAllDeep(tpt) args.foreach(transformAllDeep) @@ -249,7 +257,17 @@ class CheckUnused private (phaseMode: PhaseMode, suffix: String) extends MiniPha case Splice(expr) => transformAllDeep(expr) case QuotePattern(bindings, body, quotes) => - bindings.foreach(transformAllDeep) + bindings.foreach: + case b @ Bind(_, _) => + b.symbol.info match + case TypeBounds(lo, hi) => + def resolve(tpe: Type): Unit = + val sym = tpe.typeSymbol + if sym.exists then + resolveUsage(sym, sym.name, NoPrefix) + resolve(lo) + resolve(hi) + case _ => transformAllDeep(body) transformAllDeep(quotes) case SplicePattern(body, typeargs, args) => @@ -263,7 +281,6 @@ class CheckUnused private (phaseMode: PhaseMode, suffix: String) extends MiniPha case ByNameTypeTree(result) => transformAllDeep(result) //case _: InferredTypeTree => // do nothing - //case _: Export => // nothing to do //case _ if tree.isType => case _ => tree @@ -275,7 +292,7 @@ class CheckUnused private (phaseMode: PhaseMode, suffix: String) extends MiniPha // if sym is not an enclosing element, record the reference def refUsage(sym: Symbol)(using Context): Unit = if !ctx.outersIterator.exists(cur => cur.owner eq sym) then - refInfos.refs.addOne(sym) + refInfos.addRef(sym) /** Look up a reference in enclosing contexts to determine whether it was introduced by a definition or import. * The binding of highest precedence must then be correct. @@ -291,7 +308,9 @@ class CheckUnused private (phaseMode: PhaseMode, suffix: String) extends MiniPha def matchingSelector(info: ImportInfo): ImportSelector | Null = val qtpe = info.site - def hasAltMember(nm: Name) = qtpe.member(nm).hasAltWith(_.symbol == sym) + def hasAltMember(nm: Name) = qtpe.member(nm).hasAltWith: alt => + alt.symbol == sym + || nm.isTypeName && alt.symbol.isAliasType && alt.info.dealias.typeSymbol == sym def loop(sels: List[ImportSelector]): ImportSelector | Null = sels match case sel :: sels => val matches = @@ -322,56 +341,28 @@ class CheckUnused private (phaseMode: PhaseMode, suffix: String) extends MiniPha && ctxsym.thisType.baseClasses.contains(sym.owner) && ctxsym.thisType.member(sym.name).hasAltWith(d => d.containsSym(sym) && !name.exists(_ != d.name)) - // Attempt to cache a result at the given context. Not all contexts bear a cache, including NoContext. - // If there is already any result for the name and prefix, do nothing. - def addCached(where: Context, result: Precedence): Unit = - if where.moreProperties ne null then - where.property(resolvedKey) match - case Some(resolved) => - resolved.record(sym, name, prefix, result) - case none => - // Avoid spurious NoSymbol and also primary ctors which are never warned about. - // Selections C.this.toString should be already excluded, but backtopped here for eq, etc. + // Selections C.this.toString should be already excluded, but backstopped here for eq, etc. if !sym.exists || sym.isPrimaryConstructor || sym.isEffectiveRoot || defn.topClasses(sym.owner) then return // Find the innermost, highest precedence. Contexts have no nesting levels but assume correctness. // If the sym is an enclosing definition (the owner of a context), it does not count toward usages. val isLocal = sym.isLocalToBlock var candidate: Context = NoContext - var cachePoint: Context = NoContext // last context with Resolved cache var importer: ImportSelector | Null = null // non-null for import context var precedence = NoPrecedence // of current resolution + var enclosed = false // true if sym is owner of an enclosing context var done = false - var cached = false val ctxs = ctx.outersIterator while !done && ctxs.hasNext do val cur = ctxs.next() - if cur.owner eq sym then - addCached(cachePoint, Definition) - return // found enclosing definition - else if isLocal then + if cur.owner.userSymbol == sym && !sym.is(Package) then + enclosed = true // found enclosing definition, don't register the reference + if isLocal then if cur.owner eq sym.owner then done = true // for local def, just checking that it is not enclosing else - val cachedPrecedence = - cur.property(resolvedKey) match - case Some(resolved) => - // conservative, cache must be nested below the result context - if precedence.isNone then - cachePoint = cur // no result yet, and future result could be cached here - resolved.hasRecord(sym, name, prefix) - case none => NoPrecedence - cached = !cachedPrecedence.isNone - if cached then - // if prefer cached precedence, then discard previous result - if precedence.weakerThan(cachedPrecedence) then - candidate = NoContext - importer = null - cachePoint = cur // actual cache context - precedence = cachedPrecedence // actual cached precedence - done = true - else if cur.isImportContext then + if cur.isImportContext then val sel = matchingSelector(cur.importInfo.nn) if sel != null then if cur.importInfo.nn.isRootImport then @@ -391,7 +382,7 @@ class CheckUnused private (phaseMode: PhaseMode, suffix: String) extends MiniPha candidate = cur importer = sel else if checkMember(cur.owner) then - if sym.srcPos.sourcePos.source == ctx.source then + if sym.is(Package) || sym.srcPos.sourcePos.source == ctx.source then precedence = Definition candidate = cur importer = null // ignore import in same scope; we can't check nesting level @@ -401,16 +392,10 @@ class CheckUnused private (phaseMode: PhaseMode, suffix: String) extends MiniPha candidate = cur end while // record usage and possibly an import - refInfos.refs.addOne(sym) + if !enclosed then + refInfos.addRef(sym) if candidate != NoContext && candidate.isImportContext && importer != null then refInfos.sels.put(importer, ()) - // possibly record that we have performed this look-up - // if no result was found, take it as Definition (local or rooted head of fully qualified path) - val adjusted = if precedence.isNone then Definition else precedence - if !cached && (cachePoint ne NoContext) then - addCached(cachePoint, adjusted) - if cachePoint ne ctx then - addCached(ctx, adjusted) // at this ctx, since cachePoint may be far up the outer chain end resolveUsage end CheckUnused @@ -422,15 +407,8 @@ object CheckUnused: val refInfosKey = Property.StickyKey[RefInfos] - val resolvedKey = Property.Key[Resolved] - inline def refInfos(using Context): RefInfos = ctx.property(refInfosKey).get - inline def resolved(using Context): Resolved = - ctx.property(resolvedKey) match - case Some(res) => res - case _ => throw new MatchError("no Resolved for context") - /** Attachment holding the name of an Ident as written by the user. */ val OriginalName = Property.StickyKey[Name] @@ -440,6 +418,9 @@ object CheckUnused: /** Ignore reference. */ val Ignore = Property.StickyKey[Unit] + /** Tree is LHS of Assign. */ + val AssignmentTarget = Property.StickyKey[Unit] + class PostTyper extends CheckUnused(PhaseMode.Aggregate, "PostTyper") class PostInlining extends CheckUnused(PhaseMode.Report, "PostInlining") @@ -459,7 +440,7 @@ object CheckUnused: if inliners == 0 && languageImport(imp.expr).isEmpty && !imp.isGeneratedByEnum - && !ctx.outer.owner.name.isReplWrapperName + && !ctx.owner.name.isReplWrapperName then imps.put(imp, ()) case tree: Bind => @@ -484,25 +465,15 @@ object CheckUnused: val inlined = Stack.empty[SrcPos] // enclosing call.srcPos of inlined code (expansions) var inliners = 0 // depth of inline def (not inlined yet) - end RefInfos - // Symbols already resolved in the given Context (with name and prefix of lookup). - class Resolved: - import PrecedenceLevels.* - private val seen = mutable.Map.empty[Symbol, List[(Name, Type, Precedence)]].withDefaultValue(Nil) - // if a result has been recorded, return it; otherwise, NoPrecedence. - def hasRecord(symbol: Symbol, name: Name, prefix: Type)(using Context): Precedence = - seen(symbol).find((n, p, _) => n == name && p =:= prefix) match - case Some((_, _, r)) => r - case none => NoPrecedence - // "record" the look-up result, if there is not already a result for the name and prefix. - def record(symbol: Symbol, name: Name, prefix: Type, result: Precedence)(using Context): Unit = - require(NoPrecedence.weakerThan(result)) - seen.updateWith(symbol): - case svs @ Some(vs) => - if vs.exists((n, p, _) => n == name && p =:= prefix) then svs - else Some((name, prefix, result) :: vs) - case none => Some((name, prefix, result) :: Nil) + // instead of refs.addOne, use addRef to distinguish a read from a write to var + var isAssignment = false + def addRef(sym: Symbol): Unit = + if isAssignment then + asss.addOne(sym) + else + refs.addOne(sym) + end RefInfos // Names are resolved by definitions and imports, which have four precedence levels: object PrecedenceLevels: @@ -529,6 +500,8 @@ object CheckUnused: val warnings = ArrayBuilder.make[MessageInfo] def warnAt(pos: SrcPos)(msg: UnusedSymbol, origin: String = ""): Unit = warnings.addOne((msg, pos, origin)) val infos = refInfos + //println(infos.defs.mkString("DEFS\n", "\n", "\n---")) + //println(infos.refs.mkString("REFS\n", "\n", "\n---")) def checkUnassigned(sym: Symbol, pos: SrcPos) = if sym.isLocalToBlock then @@ -569,7 +542,7 @@ object CheckUnused: if aliasSym.isAllOf(PrivateParamAccessor, butNot = CaseAccessor) && !infos.refs(alias.symbol) then if aliasSym.is(Local) then if ctx.settings.WunusedHas.explicits then - warnAt(pos)(UnusedSymbol.explicitParams) + warnAt(pos)(UnusedSymbol.explicitParams(aliasSym)) else if ctx.settings.WunusedHas.privates then warnAt(pos)(UnusedSymbol.privateMembers) @@ -583,11 +556,11 @@ object CheckUnused: && !sym.name.isInstanceOf[DerivedName] && !ctx.platform.isMainMethod(m) then - warnAt(pos)(UnusedSymbol.explicitParams) + warnAt(pos)(UnusedSymbol.explicitParams(sym)) end checkExplicit // begin if !infos.skip(m) - && !m.nextOverriddenSymbol.exists + && !m.isEffectivelyOverride && !allowed then checkExplicit() @@ -599,18 +572,20 @@ object CheckUnused: val dd = defn m.isDeprecated || m.is(Synthetic) - || sym.name.is(ContextFunctionParamName) // a ubiquitous parameter - || sym.name.is(ContextBoundParamName) && sym.info.typeSymbol.isMarkerTrait // a ubiquitous parameter || m.hasAnnotation(dd.UnusedAnnot) // param of unused method - || sym.info.typeSymbol.match // more ubiquity + || sym.name.is(ContextFunctionParamName) // a ubiquitous parameter + || sym.isCanEqual + || sym.info.dealias.typeSymbol.match // more ubiquity case dd.DummyImplicitClass | dd.SubTypeClass | dd.SameTypeClass => true - case _ => false + case tps => + tps.isMarkerTrait // no members to use; was only if sym.name.is(ContextBoundParamName) + || // but consider NotGiven + tps.hasAnnotation(dd.LanguageFeatureMetaAnnot) || sym.info.isSingleton // DSL friendly - || sym.isCanEqual - || sym.info.typeSymbol.hasAnnotation(dd.LanguageFeatureMetaAnnot) - || sym.info.isInstanceOf[RefinedType] // can't be expressed as a context bound + || sym.info.dealias.isInstanceOf[RefinedType] // can't be expressed as a context bound if ctx.settings.WunusedHas.implicits && !infos.skip(m) + && !m.isEffectivelyOverride && !allowed then if m.isPrimaryConstructor then @@ -621,9 +596,9 @@ object CheckUnused: aliasSym.isAllOf(PrivateParamAccessor, butNot = CaseAccessor) || aliasSym.isAllOf(Protected | ParamAccessor, butNot = CaseAccessor) && m.owner.is(Given) if checking && !infos.refs(alias.symbol) then - warnAt(pos)(UnusedSymbol.implicitParams) + warnAt(pos)(UnusedSymbol.implicitParams(aliasSym)) else - warnAt(pos)(UnusedSymbol.implicitParams) + warnAt(pos)(UnusedSymbol.implicitParams(sym)) def checkLocal(sym: Symbol, pos: SrcPos) = if ctx.settings.WunusedHas.locals @@ -672,6 +647,7 @@ object CheckUnused: else // If the rest of the line is blank, include it in the final edit position. (Delete trailing whitespace.) // If for deletion, and the prefix of the line is also blank, then include that, too. (Del blank line.) + // If deleting a blank line and surrounded by blank lines, remove an adjoining blank line. def editPosAt(srcPos: SrcPos, forDeletion: Boolean): SrcPos = val start = srcPos.span.start val end = srcPos.span.end @@ -684,7 +660,21 @@ object CheckUnused: val bump = if (deleteLine) 1 else 0 // todo improve to include offset of next line, endline + 1 val p0 = srcPos.span val p1 = if (next >= 0 && emptyRight) p0.withEnd(next + bump) else p0 - val p2 = if (deleteLine) p1.withStart(prev + 1) else p1 + val p2 = + if deleteLine then + var newStart = prev + 1 + if srcPos.line > 1 then + val source = srcPos.sourcePos.source + import source.{lineToOffset, lineToOffsetOpt, offsetToLine} + val startLine = offsetToLine(start) + val endLine = offsetToLine(end) + val preceding = lineToOffset(startLine - 1) + lineToOffsetOpt(endLine + 2) match + case Some(succeeding) if lineToOffset(startLine) - preceding == 1 && succeeding - end == 2 => + newStart = preceding + case _ => + p1.withStart(newStart) + else p1 srcPos.sourcePos.withSpan(p2) def actionsOf(actions: (SrcPos, String)*): List[CodeAction] = val patches = actions.map((srcPos, replacement) => ActionPatch(srcPos.sourcePos, replacement)).toList @@ -852,7 +842,7 @@ object CheckUnused: val ok = fun.symbol.info match case PolyType(tycon, MethodTpe(_, _, AppliedType(_, tprefs))) => tprefs.collect: - case ref: TypeParamRef => termName(ref.binder.paramNames(ref.paramNum).toString.toLowerCase.nn) + case ref: TypeParamRef => termName(ref.binder.paramNames(ref.paramNum).toString.toLowerCase) case _ => Nil allowVariableBindings(ok, args) else if fun.symbol == defn.TypeTest_unapply then @@ -888,41 +878,49 @@ object CheckUnused: inline def exists(p: Name => Boolean): Boolean = nm.ne(nme.NO_NAME) && p(nm) inline def isWildcard: Boolean = nm == nme.WILDCARD || nm.is(WildcardParamName) - extension (tp: Type) - def importPrefix(using Context): Type = tp match + extension (tp: Type)(using Context) + def importPrefix: Type = tp match case tp: NamedType => tp.prefix case tp: ClassInfo => tp.prefix case tp: TypeProxy => tp.superType.normalizedPrefix case _ => NoType - def underlyingPrefix(using Context): Type = tp match + def underlyingPrefix: Type = tp match case tp: NamedType => tp.prefix case tp: ClassInfo => tp.prefix case tp: TypeProxy => tp.underlying.underlyingPrefix case _ => NoType - def skipPackageObject(using Context): Type = + def skipPackageObject: Type = if tp.typeSymbol.isPackageObject then tp.underlyingPrefix else tp - def underlying(using Context): Type = tp match + def underlying: Type = tp match case tp: TypeProxy => tp.underlying case _ => tp private val serializationNames: Set[TermName] = Set("readResolve", "readObject", "readObjectNoData", "writeObject", "writeReplace").map(termName(_)) - extension (sym: Symbol) - def isSerializationSupport(using Context): Boolean = + extension (sym: Symbol)(using Context) + def isSerializationSupport: Boolean = sym.is(Method) && serializationNames(sym.name.toTermName) && sym.owner.isClass && sym.owner.derivesFrom(defn.JavaSerializableClass) - def isCanEqual(using Context): Boolean = + def isCanEqual: Boolean = sym.isOneOf(GivenOrImplicit) && sym.info.finalResultType.baseClasses.exists(_.derivesFrom(defn.CanEqualClass)) - def isMarkerTrait(using Context): Boolean = - sym.isClass && sym.info.allMembers.forall: d => + def isMarkerTrait: Boolean = + sym.info.hiBound.allMembers.forall: d => val m = d.symbol !m.isTerm || m.isSelfSym || m.is(Method) && (m.owner == defn.AnyClass || m.owner == defn.ObjectClass) - def isEffectivelyPrivate(using Context): Boolean = + def isEffectivelyPrivate: Boolean = sym.is(Private, butNot = ParamAccessor) - || sym.owner.isAnonymousClass && !sym.nextOverriddenSymbol.exists + || sym.owner.isAnonymousClass && !sym.isEffectivelyOverride + def isEffectivelyOverride: Boolean = + sym.is(Override) + || + sym.canMatchInheritedSymbols && { // inline allOverriddenSymbols using owner.info or thisType + val owner = sym.owner.asClass + val base = if owner.classInfo.selfInfo != NoType then owner.thisType else owner.info + base.baseClasses.drop(1).iterator.exists(sym.overriddenSymbol(_).exists) + } // pick the symbol the user wrote for purposes of tracking - inline def userSymbol(using Context): Symbol= + inline def userSymbol: Symbol= if sym.denot.is(ModuleClass) then sym.denot.companionModule else sym extension (sel: ImportSelector) @@ -936,13 +934,13 @@ object CheckUnused: case untpd.Ident(nme.WILDCARD) => true case _ => false - extension (imp: Import) + extension (imp: Import)(using Context) /** Is it the first import clause in a statement? `a.x` in `import a.x, b.{y, z}` */ - def isPrimaryClause(using Context): Boolean = + def isPrimaryClause: Boolean = imp.srcPos.span.pointDelta > 0 // primary clause starts at `import` keyword with point at clause proper /** Generated import of cases from enum companion. */ - def isGeneratedByEnum(using Context): Boolean = + def isGeneratedByEnum: Boolean = imp.symbol.exists && imp.symbol.owner.is(Enum, butNot = Case) /** Under -Wunused:strict-no-implicit-warn, avoid false positives @@ -950,7 +948,7 @@ object CheckUnused: * specifically does import an implicit. * Similarly, import of CanEqual must not warn, as it is always witness. */ - def isLoose(sel: ImportSelector)(using Context): Boolean = + def isLoose(sel: ImportSelector): Boolean = if ctx.settings.WunusedHas.strictNoImplicitWarn then if sel.isWildcard || imp.expr.tpe.member(sel.name.toTermName).hasAltWith(_.symbol.isOneOf(GivenOrImplicit)) diff --git a/compiler/src/dotty/tools/dotc/transform/Constructors.scala b/compiler/src/dotty/tools/dotc/transform/Constructors.scala index 9a0df830c6d7..b373565489f0 100644 --- a/compiler/src/dotty/tools/dotc/transform/Constructors.scala +++ b/compiler/src/dotty/tools/dotc/transform/Constructors.scala @@ -155,7 +155,7 @@ class Constructors extends MiniPhase with IdentityDenotTransformer { thisPhase = case Ident(_) | Select(This(_), _) => var sym = tree.symbol def isOverridableSelect = tree.isInstanceOf[Select] && !sym.isEffectivelyFinal - def switchOutsideSupercall = !sym.is(Mutable) && !isOverridableSelect + def switchOutsideSupercall = !sym.isMutableVarOrAccessor && !isOverridableSelect // If true, switch to constructor parameters also in the constructor body // that follows the super call. // Variables need to go through the getter since they might have been updated. diff --git a/compiler/src/dotty/tools/dotc/transform/ExpandSAMs.scala b/compiler/src/dotty/tools/dotc/transform/ExpandSAMs.scala index 67bf1bebed87..68f911f06963 100644 --- a/compiler/src/dotty/tools/dotc/transform/ExpandSAMs.scala +++ b/compiler/src/dotty/tools/dotc/transform/ExpandSAMs.scala @@ -126,10 +126,16 @@ class ExpandSAMs extends MiniPhase: // The right hand side from which to construct the partial function. This is always a Match. // If the original rhs is already a Match (possibly in braces), return that. // Otherwise construct a match `x match case _ => rhs` where `x` is the parameter of the closure. - def partialFunRHS(tree: Tree): Match = tree match + def partialFunRHS(tree: Tree): Match = + inline def checkMatch(): Unit = + tree match + case Block(_, m: Match) => report.warning(reporting.MatchIsNotPartialFunction(), m.srcPos) + case _ => + tree match case m: Match => m case Block(Nil, expr) => partialFunRHS(expr) case _ => + checkMatch() Match(ref(param.symbol), CaseDef(untpd.Ident(nme.WILDCARD).withType(param.symbol.info), EmptyTree, tree) :: Nil) diff --git a/compiler/src/dotty/tools/dotc/transform/FirstTransform.scala b/compiler/src/dotty/tools/dotc/transform/FirstTransform.scala index 8d01d2415340..8fc9f02c1e38 100644 --- a/compiler/src/dotty/tools/dotc/transform/FirstTransform.scala +++ b/compiler/src/dotty/tools/dotc/transform/FirstTransform.scala @@ -19,10 +19,14 @@ import NameKinds.OuterSelectName import StdNames.* import config.Feature import inlines.Inlines.inInlineMethod +import util.Property object FirstTransform { val name: String = "firstTransform" val description: String = "some transformations to put trees into a canonical form" + + /** Attachment key for named argument patterns */ + val WasNamedArg: Property.StickyKey[Unit] = Property.StickyKey() } /** The first tree transform @@ -38,6 +42,7 @@ object FirstTransform { */ class FirstTransform extends MiniPhase with SymTransformer { thisPhase => import ast.tpd.* + import FirstTransform.* override def phaseName: String = FirstTransform.name @@ -156,7 +161,13 @@ class FirstTransform extends MiniPhase with SymTransformer { thisPhase => override def transformOther(tree: Tree)(using Context): Tree = tree match { case tree: Export => EmptyTree - case tree: NamedArg => transformAllDeep(tree.arg) + case tree: NamedArg => + val res = transformAllDeep(tree.arg) + if ctx.mode.is(Mode.Pattern) then + // Need to keep NamedArg status for pattern matcher to work correctly when faced + // with single-element named tuples. + res.pushAttachment(WasNamedArg, ()) + res case tree => if (tree.isType) toTypeTree(tree) else tree } diff --git a/compiler/src/dotty/tools/dotc/transform/GenericSignatures.scala b/compiler/src/dotty/tools/dotc/transform/GenericSignatures.scala index 1798d938272c..26ede05ba607 100644 --- a/compiler/src/dotty/tools/dotc/transform/GenericSignatures.scala +++ b/compiler/src/dotty/tools/dotc/transform/GenericSignatures.scala @@ -171,7 +171,7 @@ object GenericSignatures { def fullNameInSig(sym: Symbol): Unit = { assert(sym.isClass) val name = atPhase(genBCodePhase) { sanitizeName(sym.fullName).replace('.', '/') } - builder.append('L').nn.append(name) + builder.append('L').append(name) } def classSig(sym: Symbol, pre: Type = NoType, args: List[Type] = Nil): Unit = { @@ -216,7 +216,7 @@ object GenericSignatures { // TODO revisit this. Does it align with javac for code that can be expressed in both languages? val delimiter = if (builder.charAt(builder.length() - 1) == '>') '.' else '$' - builder.append(delimiter).nn.append(sanitizeName(sym.name)) + builder.append(delimiter).append(sanitizeName(sym.name)) } else fullNameInSig(sym) } diff --git a/compiler/src/dotty/tools/dotc/transform/InstrumentCoverage.scala b/compiler/src/dotty/tools/dotc/transform/InstrumentCoverage.scala index 0229284a1b5f..491f3d3d2572 100644 --- a/compiler/src/dotty/tools/dotc/transform/InstrumentCoverage.scala +++ b/compiler/src/dotty/tools/dotc/transform/InstrumentCoverage.scala @@ -50,9 +50,9 @@ class InstrumentCoverage extends MacroTransform with IdentityDenotTransformer: if !newlyCreated then // If the directory existed before, let's clean it up. - dataDir.listFiles.nn - .filter(_.nn.getName.nn.startsWith("scoverage")) - .foreach(_.nn.delete()) + dataDir.listFiles + .filter(_.getName.startsWith("scoverage")) + .foreach(_.delete()) end if // Initialise a coverage object if it does not exist yet @@ -70,13 +70,13 @@ class InstrumentCoverage extends MacroTransform with IdentityDenotTransformer: private def isClassIncluded(sym: Symbol)(using Context): Boolean = val fqn = sym.fullName.toText(ctx.printerFn(ctx)).show coverageExcludeClasslikePatterns.isEmpty || !coverageExcludeClasslikePatterns.exists( - _.matcher(fqn).nn.matches + _.matcher(fqn).matches ) private def isFileIncluded(file: SourceFile)(using Context): Boolean = val normalizedPath = file.path.replace(".scala", "") coverageExcludeFilePatterns.isEmpty || !coverageExcludeFilePatterns.exists( - _.matcher(normalizedPath).nn.matches + _.matcher(normalizedPath).matches ) override protected def newTransformer(using Context) = @@ -122,7 +122,7 @@ class InstrumentCoverage extends MacroTransform with IdentityDenotTransformer: line = pos.line + 1, desc = sourceFile.content.slice(pos.start, pos.end).mkString, symbolName = tree.symbol.name.toSimpleName.show, - treeName = tree.getClass.getSimpleName.nn, + treeName = tree.getClass.getSimpleName, branch ) ctx.base.coverage.nn.addStatement(statement) diff --git a/compiler/src/dotty/tools/dotc/transform/LazyVals.scala b/compiler/src/dotty/tools/dotc/transform/LazyVals.scala index e2712a7d6302..2fd777f715d9 100644 --- a/compiler/src/dotty/tools/dotc/transform/LazyVals.scala +++ b/compiler/src/dotty/tools/dotc/transform/LazyVals.scala @@ -255,7 +255,7 @@ class LazyVals extends MiniPhase with IdentityDenotTransformer { def transformMemberDefThreadUnsafe(x: ValOrDefDef)(using Context): Thicket = { val claz = x.symbol.owner.asClass val tpe = x.tpe.widen.resultType.widen - assert(!(x.symbol is Mutable)) + assert(!x.symbol.isMutableVarOrAccessor) val containerName = LazyLocalName.fresh(x.name.asTermName) val containerSymbol = newSymbol(claz, containerName, x.symbol.flags &~ containerFlagsMask | containerFlags | Private, @@ -447,7 +447,7 @@ class LazyVals extends MiniPhase with IdentityDenotTransformer { } def transformMemberDefThreadSafe(x: ValOrDefDef)(using Context): Thicket = { - assert(!(x.symbol is Mutable)) + assert(!x.symbol.isMutableVarOrAccessor) if ctx.settings.YlegacyLazyVals.value then transformMemberDefThreadSafeLegacy(x) else diff --git a/compiler/src/dotty/tools/dotc/transform/Mixin.scala b/compiler/src/dotty/tools/dotc/transform/Mixin.scala index 9a19c0dc414f..ce3f26071b77 100644 --- a/compiler/src/dotty/tools/dotc/transform/Mixin.scala +++ b/compiler/src/dotty/tools/dotc/transform/Mixin.scala @@ -250,7 +250,7 @@ class Mixin extends MiniPhase with SymTransformer { thisPhase => case Some((_, _, args)) => args.iterator case _ => Iterator.empty def nextArgument() = - if argsIt.hasNext then argsIt.next + if argsIt.hasNext then argsIt.next() else assert( impl.parents.forall(_.tpe.typeSymbol != mixin), diff --git a/compiler/src/dotty/tools/dotc/transform/MoveStatics.scala b/compiler/src/dotty/tools/dotc/transform/MoveStatics.scala index 95975ad9e6b8..b3ec05501b5b 100644 --- a/compiler/src/dotty/tools/dotc/transform/MoveStatics.scala +++ b/compiler/src/dotty/tools/dotc/transform/MoveStatics.scala @@ -28,7 +28,7 @@ class MoveStatics extends MiniPhase with SymTransformer { def transformSym(sym: SymDenotation)(using Context): SymDenotation = if (sym.hasAnnotation(defn.ScalaStaticAnnot) && sym.owner.is(Flags.Module) && sym.owner.companionClass.exists && - (sym.is(Flags.Method) || !(sym.is(Flags.Mutable) && sym.owner.companionClass.is(Flags.Trait)))) { + (sym.is(Flags.Method) || !(sym.isMutableVarOrAccessor && sym.owner.companionClass.is(Flags.Trait)))) { sym.owner.asClass.delete(sym.symbol) sym.owner.companionClass.asClass.enter(sym.symbol) sym.copySymDenotation(owner = sym.owner.companionClass) diff --git a/compiler/src/dotty/tools/dotc/transform/OverridingPairs.scala b/compiler/src/dotty/tools/dotc/transform/OverridingPairs.scala index a9a17f6db464..16c50b9cd474 100644 --- a/compiler/src/dotty/tools/dotc/transform/OverridingPairs.scala +++ b/compiler/src/dotty/tools/dotc/transform/OverridingPairs.scala @@ -8,7 +8,7 @@ import NameKinds.DefaultGetterName import NullOpsDecorator.* import collection.immutable.BitSet import scala.annotation.tailrec -import cc.isCaptureChecking +import cc.{isCaptureChecking, CCState} import scala.compiletime.uninitialized @@ -210,8 +210,7 @@ object OverridingPairs: * @param isSubType A function to be used for checking subtype relationships * between term fields. */ - def isOverridingPair(member: Symbol, memberTp: Type, other: Symbol, otherTp: Type, fallBack: => Boolean = false, - isSubType: (Type, Type) => Context ?=> Boolean = (tp1, tp2) => tp1 frozen_<:< tp2)(using Context): Boolean = + def isOverridingPair(member: Symbol, memberTp: Type, other: Symbol, otherTp: Type, fallBack: => Boolean = false)(using Context): Boolean = if member.isType then // intersection of bounds to refined types must be nonempty memberTp.bounds.hi.hasSameKindAs(otherTp.bounds.hi) && ( @@ -226,6 +225,7 @@ object OverridingPairs: ) else member.name.is(DefaultGetterName) // default getters are not checked for compatibility - || memberTp.overrides(otherTp, member.matchNullaryLoosely || other.matchNullaryLoosely || fallBack, isSubType = isSubType) + || + memberTp.overrides(otherTp, member.matchNullaryLoosely || other.matchNullaryLoosely || fallBack) end OverridingPairs diff --git a/compiler/src/dotty/tools/dotc/transform/PatternMatcher.scala b/compiler/src/dotty/tools/dotc/transform/PatternMatcher.scala index 250d4844d2b3..e2505144abda 100644 --- a/compiler/src/dotty/tools/dotc/transform/PatternMatcher.scala +++ b/compiler/src/dotty/tools/dotc/transform/PatternMatcher.scala @@ -386,9 +386,20 @@ object PatternMatcher { } else letAbstract(get) { getResult => - val selectors = - if (args.tail.isEmpty) ref(getResult) :: Nil - else productSelectors(getResult.info).map(ref(getResult).select(_)) + def isUnaryNamedTupleSelectArg(arg: Tree) = + get.tpe.widenDealias.isNamedTupleType + && arg.removeAttachment(FirstTransform.WasNamedArg).isDefined + // Special case: Normally, we pull out the argument wholesale if + // there is only one. But if the argument is a named argument for + // a single-element named tuple, we have to select the field instead. + // NamedArg trees are eliminated in FirstTransform but for named arguments + // of patterns we add a WasNamedArg attachment, which is used to guide the + // logic here. See i22900.scala for test cases. + val selectors = args match + case arg :: Nil if !isUnaryNamedTupleSelectArg(arg) => + ref(getResult) :: Nil + case _ => + productSelectors(getResult.info).map(ref(getResult).select(_)) matchArgsPlan(selectors, args, onSuccess) } } diff --git a/compiler/src/dotty/tools/dotc/transform/PickleQuotes.scala b/compiler/src/dotty/tools/dotc/transform/PickleQuotes.scala index 624ec9628d87..d9a1ea9ad9af 100644 --- a/compiler/src/dotty/tools/dotc/transform/PickleQuotes.scala +++ b/compiler/src/dotty/tools/dotc/transform/PickleQuotes.scala @@ -139,7 +139,7 @@ class PickleQuotes extends MacroTransform { /** Get the holeContents of the transformed tree */ def getContents() = - val res = holeContents.result + val res = holeContents.result() holeContents.clear() res end HoleContentExtractor diff --git a/compiler/src/dotty/tools/dotc/transform/Pickler.scala b/compiler/src/dotty/tools/dotc/transform/Pickler.scala index fcf1b384fda1..7aeaeb4e319d 100644 --- a/compiler/src/dotty/tools/dotc/transform/Pickler.scala +++ b/compiler/src/dotty/tools/dotc/transform/Pickler.scala @@ -258,7 +258,7 @@ class Pickler extends Phase { } private def computeInternalName(cls: ClassSymbol)(using Context): String = - if cls.is(Module) then cls.binaryClassName.stripSuffix(str.MODULE_SUFFIX).nn + if cls.is(Module) then cls.binaryClassName.stripSuffix(str.MODULE_SUFFIX) else cls.binaryClassName override def run(using Context): Unit = { @@ -291,7 +291,7 @@ class Pickler extends Phase { val isOutline = isJavaAttr // TODO: later we may want outline for Scala sources too val attributes = Attributes( sourceFile = sourceRelativePath, - scala2StandardLibrary = ctx.settings.YcompileScala2Library.value, + scala2StandardLibrary = Feature.shouldBehaveAsScala2, explicitNulls = ctx.settings.YexplicitNulls.value, captureChecked = Feature.ccEnabled, withPureFuns = Feature.pureFunsEnabled, @@ -413,11 +413,11 @@ class Pickler extends Phase { ) if ctx.isBestEffort then val outpath = - ctx.settings.outputDir.value.jpath.toAbsolutePath.nn.normalize.nn - .resolve("META-INF").nn + ctx.settings.outputDir.value.jpath.nn.toAbsolutePath.normalize + .resolve("META-INF") .resolve("best-effort") Files.createDirectories(outpath) - BestEffortTastyWriter.write(outpath.nn, result) + BestEffortTastyWriter.write(outpath, result) result } diff --git a/compiler/src/dotty/tools/dotc/transform/PostTyper.scala b/compiler/src/dotty/tools/dotc/transform/PostTyper.scala index df74e102f693..f970e75177e3 100644 --- a/compiler/src/dotty/tools/dotc/transform/PostTyper.scala +++ b/compiler/src/dotty/tools/dotc/transform/PostTyper.scala @@ -101,7 +101,7 @@ class PostTyper extends MacroTransform with InfoTransformer { thisPhase => private var compilingScala2StdLib = false override def initContext(ctx: FreshContext): Unit = initContextCalled = true - compilingScala2StdLib = ctx.settings.YcompileScala2Library.value(using ctx) + compilingScala2StdLib = Feature.shouldBehaveAsScala2(using ctx) val superAcc: SuperAccessors = new SuperAccessors(thisPhase) val synthMbr: SyntheticMembers = new SyntheticMembers(thisPhase) @@ -132,9 +132,9 @@ class PostTyper extends MacroTransform with InfoTransformer { thisPhase => then false // not an error, but not an expandable unrolled method else if - method.is(Deferred) + method.isLocal + || !method.isEffectivelyFinal || isCtor && method.owner.is(Trait) - || !(isCtor || method.is(Final) || method.owner.is(ModuleClass)) || method.owner.companionClass.is(CaseClass) && (method.name == nme.apply || method.name == nme.fromProduct) || method.owner.is(CaseClass) && method.name == nme.copy @@ -247,8 +247,10 @@ class PostTyper extends MacroTransform with InfoTransformer { thisPhase => if sym.is(Param) then registerIfUnrolledParam(sym) // @unused is getter/setter but we want it on ordinary method params - if !sym.owner.is(Method) || sym.owner.isConstructor then - sym.keepAnnotationsCarrying(thisPhase, Set(defn.ParamMetaAnnot), orNoneOf = defn.NonBeanMetaAnnots) + // @param should be consulted only for fields + val unusing = sym.getAnnotation(defn.UnusedAnnot) + sym.keepAnnotationsCarrying(thisPhase, Set(defn.ParamMetaAnnot), orNoneOf = defn.NonBeanMetaAnnots) + unusing.foreach(sym.addAnnotation) else if sym.is(ParamAccessor) then // @publicInBinary is not a meta-annotation and therefore not kept by `keepAnnotationsCarrying` val publicInBinaryAnnotOpt = sym.getAnnotation(defn.PublicInBinaryAnnot) @@ -702,6 +704,7 @@ class PostTyper extends MacroTransform with InfoTransformer { thisPhase => case _ => if parents1 ne info.parents then info.derivedClassInfo(declaredParents = parents1) else tp + case _ if sym.is(ConstructorProxy) => NoType case _ => tp private def argTypeOfCaseClassThatNeedsAbstractFunction1(sym: Symbol)(using Context): Option[List[Type]] = diff --git a/compiler/src/dotty/tools/dotc/transform/Recheck.scala b/compiler/src/dotty/tools/dotc/transform/Recheck.scala index 8936c460de81..cdc5a47b2788 100644 --- a/compiler/src/dotty/tools/dotc/transform/Recheck.scala +++ b/compiler/src/dotty/tools/dotc/transform/Recheck.scala @@ -19,9 +19,7 @@ import typer.ErrorReporting.{Addenda, NothingToAdd} import config.Printers.recheckr import util.Property import StdNames.nme -import reporting.trace import annotation.constructorOnly -import cc.CaptureSet.IdempotentCaptRefMap import annotation.tailrec import dotty.tools.dotc.cc.boxed @@ -77,7 +75,7 @@ object Recheck: * as by-name arguments of applied types. See note in doc comment for * ElimByName phase. Test case is bynamefun.scala. */ - private def mapExprType(tp: Type)(using Context): Type = tp match + def mapExprType(tp: Type)(using Context): Type = tp match case ExprType(rt) => defn.ByNameFunction(rt) case _ => tp @@ -167,7 +165,11 @@ abstract class Recheck extends Phase, SymTransformer: * from the current type. */ def setNuType(tpe: Type): Unit = - if nuTypes.lookup(tree) == null && (tpe ne tree.tpe) then nuTypes(tree) = tpe + if nuTypes.lookup(tree) == null then updNuType(tpe) + + /** Set new type of the tree unconditionally. */ + def updNuType(tpe: Type): Unit = + if tpe ne tree.tpe then nuTypes(tree) = tpe /** The new type of the tree, or if none was installed, the original type */ def nuType(using Context): Type = @@ -186,6 +188,9 @@ abstract class Recheck extends Phase, SymTransformer: def keepNuTypes(using Context): Boolean = ctx.settings.Xprint.value.containsPhase(thisPhase) + def resetNuTypes()(using Context): Unit = + nuTypes.clear(resetToInitial = false) + /** A map from NamedTypes to the denotations they had before this phase. * Needed so that we can `reset` them after this phase. */ @@ -285,7 +290,7 @@ abstract class Recheck extends Phase, SymTransformer: * The invocation is currently disabled in recheckApply. */ private def mapJavaArgs(formals: List[Type])(using Context): List[Type] = - val tm = new TypeMap with IdempotentCaptRefMap: + val tm = new TypeMap: def apply(t: Type) = t match case t: TypeRef if t.symbol == defn.ObjectClass => defn.FromJavaObjectType @@ -299,7 +304,7 @@ abstract class Recheck extends Phase, SymTransformer: /** A hook to massage the type of an applied method */ protected def prepareFunction(funtpe: MethodType, meth: Symbol)(using Context): MethodType = funtpe - protected def recheckArg(arg: Tree, formal: Type)(using Context): Type = + protected def recheckArg(arg: Tree, formal: Type, pref: ParamRef, app: Apply)(using Context): Type = recheck(arg, formal) /** A hook to check all the parts of an application: @@ -331,7 +336,7 @@ abstract class Recheck extends Phase, SymTransformer: else fntpe.paramInfos def recheckArgs(args: List[Tree], formals: List[Type], prefs: List[ParamRef]): List[Type] = args match case arg :: args1 => - val argType = recheckArg(arg, normalizeByName(formals.head)) + val argType = recheckArg(arg, normalizeByName(formals.head), prefs.head, tree) val formals1 = if fntpe.isParamDependent then formals.tail.map(_.substParam(prefs.head, argType)) @@ -444,9 +449,11 @@ abstract class Recheck extends Phase, SymTransformer: defn.UnitType def recheckTry(tree: Try, pt: Type)(using Context): Type = - val bodyType = recheck(tree.expr, pt) - val casesTypes = tree.cases.map(recheckCase(_, defn.ThrowableType, pt)) - val finalizerType = recheck(tree.finalizer, defn.UnitType) + recheckTryRest(recheck(tree.expr, pt), tree.cases, tree.finalizer, pt) + + protected def recheckTryRest(bodyType: Type, cases: List[CaseDef], finalizer: Tree, pt: Type)(using Context): Type = + val casesTypes = cases.map(recheckCase(_, defn.ThrowableType, pt)) + val finalizerType = recheck(finalizer, defn.UnitType) TypeComparer.lub(bodyType :: casesTypes) def seqLiteralElemProto(tree: SeqLiteral, pt: Type, declared: Type)(using Context): Type = @@ -519,12 +526,14 @@ abstract class Recheck extends Phase, SymTransformer: if !skipRecheck(sym) then recheckDef(tree, sym) sym.termRef case tree: TypeDef => - // TODO: Should we allow for completers as for ValDefs or DefDefs? - tree.rhs match - case impl: Template => - recheckClassDef(tree, impl, sym.asClass)(using ctx.localContext(tree, sym)) - case _ => - recheckTypeDef(tree, sym)(using ctx.localContext(tree, sym)) + if !skipRecheck(sym) then + // TODO: Should we allow for completers as for ValDefs or DefDefs? + tree.rhs match + case impl: Template => + recheckClassDef(tree, impl, sym.asClass)(using ctx.localContext(tree, sym)) + case _ => + recheckTypeDef(tree, sym)(using ctx.localContext(tree, sym)) + sym.typeRef case tree: Labeled => recheckLabeled(tree, pt) def recheckUnnamed(tree: Tree, pt: Type): Type = tree match @@ -580,7 +589,7 @@ abstract class Recheck extends Phase, SymTransformer: * Otherwise, `tp` itself */ def widenSkolems(tp: Type)(using Context): Type = - object widenSkolems extends TypeMap, IdempotentCaptRefMap: + object widenSkolems extends TypeMap: var didWiden: Boolean = false def apply(t: Type): Type = t match case t: SkolemType if variance >= 0 => @@ -603,6 +612,7 @@ abstract class Recheck extends Phase, SymTransformer: case _ => checkConformsExpr(tpe.widenExpr, pt.widenExpr, tree) def isCompatible(actual: Type, expected: Type)(using Context): Boolean = + try actual <:< expected || expected.isRepeatedParam && isCompatible(actual, @@ -611,6 +621,9 @@ abstract class Recheck extends Phase, SymTransformer: val widened = widenSkolems(expected) (widened ne expected) && isCompatible(actual, widened) } + catch case ex: AssertionError => + println(i"fail while $actual iscompat $expected") + throw ex def checkConformsExpr(actual: Type, expected: Type, tree: Tree, addenda: Addenda = NothingToAdd)(using Context): Type = //println(i"check conforms $actual <:< $expected") diff --git a/compiler/src/dotty/tools/dotc/transform/SpecializeApplyMethods.scala b/compiler/src/dotty/tools/dotc/transform/SpecializeApplyMethods.scala index fd314b94e50c..c85f06e6075f 100644 --- a/compiler/src/dotty/tools/dotc/transform/SpecializeApplyMethods.scala +++ b/compiler/src/dotty/tools/dotc/transform/SpecializeApplyMethods.scala @@ -5,6 +5,7 @@ import ast.Trees.*, ast.tpd, core.* import Contexts.*, Types.*, Decorators.*, Symbols.*, DenotTransformers.* import SymDenotations.*, Scopes.*, StdNames.*, NameOps.*, Names.* import MegaPhase.MiniPhase +import config.Feature import scala.collection.mutable @@ -25,7 +26,7 @@ class SpecializeApplyMethods extends MiniPhase with InfoTransformer { override def description: String = SpecializeApplyMethods.description override def isEnabled(using Context): Boolean = - !ctx.settings.scalajs.value && !ctx.settings.YcompileScala2Library.value + !ctx.settings.scalajs.value && !Feature.shouldBehaveAsScala2 private def specApplySymbol(sym: Symbol, args: List[Type], ret: Type)(using Context): Symbol = { val name = nme.apply.specializedFunction(ret, args) diff --git a/compiler/src/dotty/tools/dotc/transform/Splicer.scala b/compiler/src/dotty/tools/dotc/transform/Splicer.scala index b5386d5bd1df..c53f174600db 100644 --- a/compiler/src/dotty/tools/dotc/transform/Splicer.scala +++ b/compiler/src/dotty/tools/dotc/transform/Splicer.scala @@ -1,7 +1,6 @@ package dotty.tools.dotc package transform -import scala.language.unsafeNulls import java.io.{PrintWriter, StringWriter} import java.lang.reflect.{InvocationTargetException, Method => JLRMethod} diff --git a/compiler/src/dotty/tools/dotc/transform/SyntheticMembers.scala b/compiler/src/dotty/tools/dotc/transform/SyntheticMembers.scala index 926a19224e79..5f1039abec7b 100644 --- a/compiler/src/dotty/tools/dotc/transform/SyntheticMembers.scala +++ b/compiler/src/dotty/tools/dotc/transform/SyntheticMembers.scala @@ -10,11 +10,13 @@ import NameOps.* import Annotations.Annotation import typer.ProtoTypes.constrained import ast.untpd +import config.Feature import util.Property import util.Spans.Span import config.Printers.derive import NullOpsDecorator.* +import scala.runtime.Statics object SyntheticMembers { @@ -78,11 +80,11 @@ class SyntheticMembers(thisPhase: DenotTransformer) { private def existingDef(sym: Symbol, clazz: ClassSymbol)(using Context): Symbol = val existing = sym.matchingMember(clazz.thisType) - if ctx.settings.YcompileScala2Library.value && clazz.isValueClass && (sym == defn.Any_equals || sym == defn.Any_hashCode) then + if Feature.shouldBehaveAsScala2 && clazz.isValueClass && (sym == defn.Any_equals || sym == defn.Any_hashCode) then NoSymbol - else if existing != sym && !existing.is(Deferred) then - existing - else + else if existing != sym && !existing.is(Deferred) then + existing + else NoSymbol end existingDef @@ -101,6 +103,7 @@ class SyntheticMembers(thisPhase: DenotTransformer) { val isSimpleEnumValue = isEnumValue && !clazz.owner.isAllOf(EnumCase) val isJavaEnumValue = isEnumValue && clazz.derivesFrom(defn.JavaEnumClass) val isNonJavaEnumValue = isEnumValue && !isJavaEnumValue + val ownName = clazz.name.stripModuleClassSuffix.toString val symbolsToSynthesize: List[Symbol] = if clazz.is(Case) then @@ -124,8 +127,7 @@ class SyntheticMembers(thisPhase: DenotTransformer) { def forwardToRuntime(vrefs: List[Tree]): Tree = ref(defn.runtimeMethodRef("_" + sym.name.toString)).appliedToTermArgs(This(clazz) :: vrefs) - def ownName: Tree = - Literal(Constant(clazz.name.stripModuleClassSuffix.toString)) + def ownNameLit: Tree = Literal(Constant(ownName)) def nameRef: Tree = if isJavaEnumValue then @@ -152,7 +154,7 @@ class SyntheticMembers(thisPhase: DenotTransformer) { Literal(Constant(candidate.get)) def toStringBody(vrefss: List[List[Tree]]): Tree = - if (clazz.is(ModuleClass)) ownName + if (clazz.is(ModuleClass)) ownNameLit else if (isNonJavaEnumValue) identifierRef else forwardToRuntime(vrefss.head) @@ -165,9 +167,9 @@ class SyntheticMembers(thisPhase: DenotTransformer) { case nme.ordinal => ordinalRef case nme.productArity => Literal(Constant(accessors.length)) case nme.productPrefix if isEnumValue => nameRef - case nme.productPrefix => ownName + case nme.productPrefix => ownNameLit case nme.productElement => - if ctx.settings.YcompileScala2Library.value then productElementBodyForScala2Compat(accessors.length, vrefss.head.head) + if Feature.shouldBehaveAsScala2 then productElementBodyForScala2Compat(accessors.length, vrefss.head.head) else productElementBody(accessors.length, vrefss.head.head) case nme.productElementName => productElementNameBody(accessors.length, vrefss.head.head) } @@ -335,39 +337,36 @@ class SyntheticMembers(thisPhase: DenotTransformer) { ref(accessors.head).select(nme.hashCode_).ensureApplied } - /** The class - * - * ``` - * case object C - * ``` - * - * gets the `hashCode` method: - * - * ``` - * def hashCode: Int = "C".hashCode // constant folded - * ``` - * - * The class - * - * ``` - * case class C(x: T, y: U) - * ``` - * - * if none of `T` or `U` are primitive types, gets the `hashCode` method: - * - * ``` - * def hashCode: Int = ScalaRunTime._hashCode(this) - * ``` - * - * else if either `T` or `U` are primitive, gets the `hashCode` method implemented by [[caseHashCodeBody]] + /** + * A `case object C` or a `case class C()` without parameters gets the `hashCode` method + * ``` + * def hashCode: Int = "C".hashCode // constant folded + * ``` + * + * Otherwise, if none of the parameters are primitive types: + * ``` + * def hashCode: Int = MurmurHash3.productHash( + * this, + * Statics.mix(0xcafebabe, "C".hashCode), // constant folded + * ignorePrefix = true) + * ``` + * + * The implementation used to invoke `ScalaRunTime._hashCode`, but that implementation mixes in the result + * of `productPrefix`, which causes scala/bug#13033. By setting `ignorePrefix = true` and mixing in the case + * name into the seed, the bug can be fixed and the generated code works with the unchanged Scala library. + * + * For case classes with primitive paramters, see [[caseHashCodeBody]]. */ def chooseHashcode(using Context) = - if (clazz.is(ModuleClass)) - Literal(Constant(clazz.name.stripModuleClassSuffix.toString.hashCode)) + if (accessors.isEmpty) Literal(Constant(ownName.hashCode)) else if (accessors.exists(_.info.finalResultType.classSymbol.isPrimitiveValueClass)) caseHashCodeBody else - ref(defn.ScalaRuntime__hashCode).appliedTo(This(clazz)) + ref(defn.MurmurHash3Module).select(defn.MurmurHash3_productHash).appliedTo( + This(clazz), + Literal(Constant(Statics.mix(0xcafebabe, ownName.hashCode))), + Literal(Constant(true)) + ) /** The class * @@ -380,7 +379,7 @@ class SyntheticMembers(thisPhase: DenotTransformer) { * ``` * def hashCode: Int = { * var acc: Int = 0xcafebabe - * acc = Statics.mix(acc, this.productPrefix.hashCode()); + * acc = Statics.mix(acc, "C".hashCode); * acc = Statics.mix(acc, x); * acc = Statics.mix(acc, Statics.this.anyHash(y)); * Statics.finalizeHash(acc, 2) @@ -391,7 +390,7 @@ class SyntheticMembers(thisPhase: DenotTransformer) { val acc = newSymbol(ctx.owner, nme.acc, Mutable | Synthetic, defn.IntType, coord = ctx.owner.span) val accDef = ValDef(acc, Literal(Constant(0xcafebabe))) val mixPrefix = Assign(ref(acc), - ref(defn.staticsMethod("mix")).appliedTo(ref(acc), This(clazz).select(defn.Product_productPrefix).select(defn.Any_hashCode).appliedToNone)) + ref(defn.staticsMethod("mix")).appliedTo(ref(acc), Literal(Constant(ownName.hashCode)))) val mixes = for (accessor <- accessors) yield Assign(ref(acc), ref(defn.staticsMethod("mix")).appliedTo(ref(acc), hashImpl(accessor))) val finish = ref(defn.staticsMethod("finalizeHash")).appliedTo(ref(acc), Literal(Constant(accessors.size))) @@ -571,8 +570,9 @@ class SyntheticMembers(thisPhase: DenotTransformer) { newSymbol(ctx.owner, pref.paramName.freshened, Synthetic, pref.underlying.translateFromRepeated(toArray = false), coord = ctx.owner.span.focus) val bindingRefs = bindingSyms.map(TermRef(NoPrefix, _)) - // Fix the infos for dependent parameters - if constrMeth.isParamDependent then + // Fix the infos for dependent parameters. We also need to include false dependencies that would + // be fixed by de-aliasing since we do no such de-aliasing here. See i22944.scala. + if constrMeth.looksParamDependent then bindingSyms.foreach: bindingSym => bindingSym.info = bindingSym.info.substParams(constrMeth, bindingRefs) @@ -721,7 +721,7 @@ class SyntheticMembers(thisPhase: DenotTransformer) { val syntheticMembers = serializableObjectMethod(clazz) ::: serializableEnumValueMethod(clazz) ::: caseAndValueMethods(clazz) checkInlining(syntheticMembers) val impl1 = cpy.Template(impl)(body = syntheticMembers ::: impl.body) - if ctx.settings.YcompileScala2Library.value then impl1 + if Feature.shouldBehaveAsScala2 then impl1 else addMirrorSupport(impl1) } diff --git a/compiler/src/dotty/tools/dotc/transform/TailRec.scala b/compiler/src/dotty/tools/dotc/transform/TailRec.scala index b8052721ff27..472ed78bfb6b 100644 --- a/compiler/src/dotty/tools/dotc/transform/TailRec.scala +++ b/compiler/src/dotty/tools/dotc/transform/TailRec.scala @@ -196,7 +196,8 @@ class TailRec extends MiniPhase { def isInfiniteRecCall(tree: Tree): Boolean = { def tailArgOrPureExpr(stat: Tree): Boolean = stat match { case stat: ValDef if stat.name.is(TailTempName) || !stat.symbol.is(Mutable) => tailArgOrPureExpr(stat.rhs) - case Assign(lhs: Ident, rhs) if lhs.symbol.name.is(TailLocalName) => tailArgOrPureExpr(rhs) + case Assign(lhs: Ident, rhs) if lhs.symbol.name.is(TailLocalName) => + tailArgOrPureExpr(rhs) || varForRewrittenThis.exists(_ == lhs.symbol && rhs.tpe.isStable) case Assign(lhs: Ident, rhs: Ident) => lhs.symbol == rhs.symbol case stat: Ident if stat.symbol.name.is(TailLocalName) => true case _ => tpd.isPureExpr(stat) @@ -345,6 +346,9 @@ class TailRec extends MiniPhase { case prefix: This if prefix.symbol == enclosingClass => // Avoid assigning `this = this` assignParamPairs + case prefix if prefix.symbol.is(Module) && prefix.symbol.moduleClass == enclosingClass => + // Avoid assigning `this = MyObject` + assignParamPairs case _ => (getVarForRewrittenThis(), noTailTransform(prefix)) :: assignParamPairs diff --git a/compiler/src/dotty/tools/dotc/transform/TreeChecker.scala b/compiler/src/dotty/tools/dotc/transform/TreeChecker.scala index c35dc80c04a5..ac1ef34f5ef8 100644 --- a/compiler/src/dotty/tools/dotc/transform/TreeChecker.scala +++ b/compiler/src/dotty/tools/dotc/transform/TreeChecker.scala @@ -244,7 +244,7 @@ object TreeChecker { private val everDefinedSyms = MutableSymbolMap[untpd.Tree]() // don't check value classes after typer, as the constraint about constructors doesn't hold after transform - override def checkDerivedValueClass(clazz: Symbol, stats: List[Tree])(using Context): Unit = () + override def checkDerivedValueClass(cdef: untpd.TypeDef, clazz: Symbol, stats: List[Tree])(using Context): Unit = () def withDefinedSyms[T](trees: List[untpd.Tree])(op: => T)(using Context): T = { var locally = List.empty[Symbol] @@ -854,30 +854,34 @@ object TreeChecker { val phases = ctx.base.allPhases.toList val treeChecker = new LocalChecker(previousPhases(phases)) + def reportMalformedMacroTree(msg: String | Null, err: Throwable) = + val stack = + if !ctx.settings.Ydebug.value then "\nstacktrace available when compiling with `-Ydebug`" + else if err.getStackTrace == null then " no stacktrace" + else err.getStackTrace.mkString(" ", " \n", "") + report.error( + em"""Malformed tree was found while expanding macro with -Xcheck-macros. + |The tree does not conform to the compiler's tree invariants. + | + |Macro was: + |${scala.quoted.runtime.impl.QuotesImpl.showDecompiledTree(original)} + | + |The macro returned: + |${scala.quoted.runtime.impl.QuotesImpl.showDecompiledTree(expansion)} + | + |Error: + |$msg + |$stack + |""", + original + ) + try treeChecker.typed(expansion)(using checkingCtx) catch case err: java.lang.AssertionError => - val stack = - if !ctx.settings.Ydebug.value then "\nstacktrace available when compiling with `-Ydebug`" - else if err.getStackTrace == null then " no stacktrace" - else err.getStackTrace.nn.mkString(" ", " \n", "") - - report.error( - em"""Malformed tree was found while expanding macro with -Xcheck-macros. - |The tree does not conform to the compiler's tree invariants. - | - |Macro was: - |${scala.quoted.runtime.impl.QuotesImpl.showDecompiledTree(original)} - | - |The macro returned: - |${scala.quoted.runtime.impl.QuotesImpl.showDecompiledTree(expansion)} - | - |Error: - |${err.getMessage} - |$stack - |""", - original - ) + reportMalformedMacroTree(err.getMessage(), err) + case err: UnhandledError => + reportMalformedMacroTree(err.diagnostic.message, err) private[TreeChecker] def previousPhases(phases: List[Phase])(using Context): List[Phase] = phases match { case (phase: MegaPhase) :: phases1 => @@ -890,4 +894,4 @@ object TreeChecker { case _ => Nil } -} +} \ No newline at end of file diff --git a/compiler/src/dotty/tools/dotc/transform/UninitializedDefs.scala b/compiler/src/dotty/tools/dotc/transform/UninitializedDefs.scala index f22fc53e9b6e..7531b6e41c19 100644 --- a/compiler/src/dotty/tools/dotc/transform/UninitializedDefs.scala +++ b/compiler/src/dotty/tools/dotc/transform/UninitializedDefs.scala @@ -33,7 +33,7 @@ class UninitializedDefs extends MiniPhase: def recur(rhs: Tree): Boolean = rhs match case rhs: RefTree => rhs.symbol == defn.Compiletime_uninitialized - && tree.symbol.is(Mutable) && tree.symbol.owner.isClass + && tree.symbol.isMutableVarOrAccessor && tree.symbol.owner.isClass case closureDef(ddef) if defn.isContextFunctionType(tree.tpt.tpe.dealias) => recur(ddef.rhs) case _ => diff --git a/compiler/src/dotty/tools/dotc/transform/UnrollDefinitions.scala b/compiler/src/dotty/tools/dotc/transform/UnrollDefinitions.scala index 44379b88bf16..bf9e20e68930 100644 --- a/compiler/src/dotty/tools/dotc/transform/UnrollDefinitions.scala +++ b/compiler/src/dotty/tools/dotc/transform/UnrollDefinitions.scala @@ -79,7 +79,8 @@ class UnrollDefinitions extends MacroTransform, IdentityDenotTransformer { else Some((paramClauseIndex, annotationIndices)) if indices.nonEmpty then // pre-validation should have occurred in posttyper - assert(annotated.is(Final, butNot = Deferred) || annotated.isConstructor || annotated.owner.is(ModuleClass) || annotated.name.is(DefaultGetterName), + assert(!annotated.isLocal, i"$annotated is local") + assert(annotated.isEffectivelyFinal || annotated.name.is(DefaultGetterName), i"$annotated is not final&concrete, or a constructor") indices }) diff --git a/compiler/src/dotty/tools/dotc/transform/init/Errors.scala b/compiler/src/dotty/tools/dotc/transform/init/Errors.scala index 85feb609c90a..0b843b4b33fd 100644 --- a/compiler/src/dotty/tools/dotc/transform/init/Errors.scala +++ b/compiler/src/dotty/tools/dotc/transform/init/Errors.scala @@ -31,7 +31,7 @@ object Errors: report.warning(show, this.pos) end Error - override def toString() = this.getClass.getName.nn + override def toString() = this.getClass.getName /** Access non-initialized field */ case class AccessNonInit(field: Symbol)(val trace: Trace) extends Error: diff --git a/compiler/src/dotty/tools/dotc/transform/init/Objects.scala b/compiler/src/dotty/tools/dotc/transform/init/Objects.scala index 328446a02e23..cc17f62780e5 100644 --- a/compiler/src/dotty/tools/dotc/transform/init/Objects.scala +++ b/compiler/src/dotty/tools/dotc/transform/init/Objects.scala @@ -18,6 +18,7 @@ import util.{ SourcePosition, NoSourcePosition } import config.Printers.init as printer import reporting.StoreReporter import reporting.trace as log +import reporting.trace.force as forcelog import typer.Applications.* import Errors.* @@ -55,16 +56,9 @@ import dotty.tools.dotc.util.SrcPos * This principle not only put initialization of static objects on a solid foundation, but also * avoids whole-program analysis. * - * 2. The design is based on the concept of "Top" --- a Top value may not be actively - * used during initialization, i.e., it's forbidden to call methods or access fields of a Top. - * Method arguments are widened to Top by default unless specified to be sensitive. - * Method parameters captured in lambdas or inner classes are always widened to Top. + * 2. It is inter-procedural and flow-sensitive. * - * 3. It is inter-procedural and flow-sensitive. - * - * 4. It is object-sensitive by default and parameter-sensitive on-demand. - * - * 5. The check is modular in the sense that each object is checked separately and there is no + * 3. The check is modular in the sense that each object is checked separately and there is no * whole-program analysis. However, the check is not modular in terms of project boundaries. * */ @@ -90,117 +84,115 @@ class Objects(using Context @constructorOnly): /** Syntax for the data structure abstraction used in abstract domain: * * ve ::= ObjectRef(class) // global object - * | OfClass(class, vs[outer], ctor, args, env) // instance of a class - * | OfArray(object[owner], regions) - * | Fun(..., env) // value elements that can be contained in ValueSet + * | OfClass(class, ownerObject, ctor, regions) // instance of a class + * | OfArray(ownerObject, regions) // represents values of native array class in Array.scala + * | Fun(code, LocalEnv) // value elements that can be contained in ValueSet * | SafeValue // values on which method calls and field accesses won't cause warnings. Int, String, etc. + * | UnknownValue // values whose source are unknown at compile time * vs ::= ValueSet(ve) // set of abstract values - * Bottom ::= ValueSet(Empty) - * val ::= ve | Top | UnknownValue | vs | Package // all possible abstract values in domain - * Ref ::= ObjectRef | OfClass // values that represent a reference to some (global or instance) object - * ThisValue ::= Ref | Top // possible values for 'this' - * - * refMap = Ref -> ( valsMap, varsMap, outersMap ) // refMap stores field informations of an object or instance - * valsMap = valsym -> val // maps immutable fields to their values - * varsMap = valsym -> addr // each mutable field has an abstract address - * outersMap = class -> val // maps outer objects to their values - * - * arrayMap = OfArray -> addr // an array has one address that stores the join value of every element - * - * heap = addr -> val // heap is mutable + * Bottom ::= ValueSet(Empty) // unreachable code + * val ::= ve | vs | Package + * Ref ::= ObjectRef | OfClass | OfArray // values that represent a reference to some (global or instance) object + * ThisValue ::= Ref | Set(Ref) // possible values for 'this' + * LocalEnv(meth, ownerObject) // represents environments for methods or functions + * Scope ::= Ref | LocalEnv + * ScopeSet ::= Set(Scope) * - * env = (valsMap, Option[env]) // stores local variables in the residing method, and possibly outer environments - * - * addr ::= localVarAddr(regions, valsym, owner) - * | fieldVarAddr(regions, valsym, owner) // independent of OfClass/ObjectRef - * | arrayAddr(regions, owner) // independent of array element type + * valsMap = sym -> val // maps variables to their values + * outersMap = sym -> ScopeSet // maps the possible outer scopes for a corresponding (parent) class + * heap.MutableData = Scope -> (valsMap, outersMap) // heap is mutable * * regions ::= List(sourcePosition) */ - sealed abstract class Value: + sealed trait Value: def show(using Context): String - /** ValueElement are elements that can be contained in a RefSet */ - sealed abstract class ValueElement extends Value + /** ValueElement are elements that can be contained in a ValueSet */ + sealed trait ValueElement extends Value /** * A reference caches the values for outers and immutable fields. */ - sealed abstract class Ref( - valsMap: mutable.Map[Symbol, Value], - varsMap: mutable.Map[Symbol, Heap.Addr], - outersMap: mutable.Map[ClassSymbol, Value]) - extends ValueElement: - protected val vals: mutable.Map[Symbol, Value] = valsMap - protected val vars: mutable.Map[Symbol, Heap.Addr] = varsMap - protected val outers: mutable.Map[ClassSymbol, Value] = outersMap - + sealed abstract class Scope(using trace: Trace): // TODO: rename it to reflect that it is key to the heap def isObjectRef: Boolean = this.isInstanceOf[ObjectRef] - def klass: ClassSymbol + def getTrace: Trace = trace + + def isRef = this.isInstanceOf[Ref] - def valValue(sym: Symbol): Value = vals(sym) + def isEnv = this.isInstanceOf[Env.LocalEnv] - def varAddr(sym: Symbol): Heap.Addr = vars(sym) + def asRef: Ref = this.asInstanceOf[Ref] - def outerValue(cls: ClassSymbol): Value = outers(cls) + def asEnv: Env.LocalEnv = this.asInstanceOf[Env.LocalEnv] - def hasVal(sym: Symbol): Boolean = vals.contains(sym) + def owner: ClassSymbol - def hasVar(sym: Symbol): Boolean = vars.contains(sym) + def show(using Context): String + + def outer(using Heap.MutableData): ScopeSet + + def valValue(sym: Symbol)(using Heap.MutableData): Value = Heap.readVal(this, sym) + + def varValue(sym: Symbol)(using Heap.MutableData): Value = Heap.readVal(this, sym) + + def hasVal(sym: Symbol)(using Heap.MutableData): Boolean = Heap.containsVal(this, sym) - def hasOuter(cls: ClassSymbol): Boolean = outers.contains(cls) + def hasVar(sym: Symbol)(using Heap.MutableData): Boolean = Heap.containsVal(this, sym) - def initVal(field: Symbol, value: Value)(using Context) = log("Initialize " + field.show + " = " + value + " for " + this, printer) { + def initVal(field: Symbol, value: Value)(using Context, Heap.MutableData) = log("Initialize " + field.show + " = " + value + " for " + this, printer) { assert(!field.is(Flags.Mutable), "Field is mutable: " + field.show) - assert(!vals.contains(field), "Field already set: " + field.show) - vals(field) = value + Heap.writeJoinVal(this, field, value) } - def initVar(field: Symbol, addr: Heap.Addr)(using Context) = log("Initialize " + field.show + " = " + addr + " for " + this, printer) { + def initVar(field: Symbol, value: Value)(using Context, Heap.MutableData) = log("Initialize " + field.show + " = " + value + " for " + this, printer) { assert(field.is(Flags.Mutable), "Field is not mutable: " + field.show) - assert(!vars.contains(field), "Field already set: " + field.show) - vars(field) = addr + Heap.writeJoinVal(this, field, value) } - def initOuter(cls: ClassSymbol, value: Value)(using Context) = log("Initialize outer " + cls.show + " = " + value + " for " + this, printer) { - assert(!outers.contains(cls), "Outer already set: " + cls) - outers(cls) = value + def initOuter(sym: Symbol, outerScope: ScopeSet)(using Context, Heap.MutableData) = log("Initialize outer " + sym.show + " = " + outerScope + " for " + this, printer) { + Heap.writeJoinOuter(this, sym, outerScope) } + sealed abstract class Ref(using Trace) extends Scope with ValueElement: + def klass: ClassSymbol + + def outerValue(sym: Symbol)(using Heap.MutableData): ScopeSet = Heap.readOuter(this, sym) + + def outer(using Heap.MutableData): ScopeSet = this.outerValue(klass) + /** A reference to a static object */ - case class ObjectRef(klass: ClassSymbol) - extends Ref(valsMap = mutable.Map.empty, varsMap = mutable.Map.empty, outersMap = mutable.Map.empty): - val owner = klass + case class ObjectRef private (klass: ClassSymbol)(using Trace) extends Ref: + def owner = klass def show(using Context) = "ObjectRef(" + klass.show + ")" + object ObjectRef: + def apply(klass: ClassSymbol)(using Context, Heap.MutableData, Trace): ObjectRef = + val obj = new ObjectRef(klass) + obj.initOuter(klass, Env.NoEnv) + obj + /** * Represents values that are instances of the specified class. * * Note that the 2nd parameter block does not take part in the definition of equality. */ case class OfClass private ( - klass: ClassSymbol, outer: Value, ctor: Symbol, args: List[Value], env: Env.Data)( - valsMap: mutable.Map[Symbol, Value], varsMap: mutable.Map[Symbol, Heap.Addr], outersMap: mutable.Map[ClassSymbol, Value]) - extends Ref(valsMap, varsMap, outersMap): - def widenedCopy(outer: Value, args: List[Value], env: Env.Data): OfClass = - new OfClass(klass, outer, ctor, args, env)(this.valsMap, this.varsMap, this.outersMap) - + klass: ClassSymbol, owner: ClassSymbol, ctor: Symbol, regions: Regions.Data)(using Trace) + extends Ref: def show(using Context) = - val valFields = vals.map(_.show + " -> " + _.show) - "OfClass(" + klass.show + ", outer = " + outer + ", args = " + args.map(_.show) + " env = " + env.show + ", vals = " + valFields + ")" + "OfClass(" + klass.show + ", ctor = " + ctor.show + ", owner = " + owner + ")" object OfClass: def apply( - klass: ClassSymbol, outer: Value, ctor: Symbol, args: List[Value], env: Env.Data)( - using Context + klass: ClassSymbol, outerScope: ScopeSet, ctor: Symbol)( + using Context, Heap.MutableData, State.Data, Regions.Data, Trace ): OfClass = - val instance = new OfClass(klass, outer, ctor, args, env)( - valsMap = mutable.Map.empty, varsMap = mutable.Map.empty, outersMap = mutable.Map.empty - ) - instance.initOuter(klass, outer) + val owner = State.currentObject + val instance = new OfClass(klass, owner, ctor, summon[Regions.Data]) + instance.initOuter(klass, outerScope) instance /** @@ -215,17 +207,30 @@ class Objects(using Context @constructorOnly): * * @param owner The static object whose initialization creates the array. */ - case class OfArray(owner: ClassSymbol, regions: Regions.Data)(using @constructorOnly ctx: Context, @constructorOnly trace: Trace) extends ValueElement: - val klass: ClassSymbol = defn.ArrayClass - val addr: Heap.Addr = Heap.arrayAddr(regions, owner) + case class OfArray private (owner: ClassSymbol, regions: Regions.Data)(using Trace) extends Ref: + val elementSymbol = defn.ArrayConstructor + + def klass: ClassSymbol = defn.ArrayClass + def show(using Context) = "OfArray(owner = " + owner.show + ")" + def readElement(using Heap.MutableData) = valValue(elementSymbol) + + def writeElement(value: Value)(using Heap.MutableData) = Heap.writeJoinVal(this, elementSymbol, value) + + object OfArray: + def apply(owner: ClassSymbol, regions: Regions.Data)(using Context, Trace, Heap.MutableData): OfArray = + val arr = new OfArray(owner, regions) + arr.initVal(arr.elementSymbol, Bottom) + arr.initOuter(arr.klass, Env.NoEnv) + arr + /** * Represents a lambda expression * @param klass The enclosing class of the anonymous function's creation site */ - case class Fun(code: Tree, thisV: ThisValue, klass: ClassSymbol, env: Env.Data) extends ValueElement: - def show(using Context) = "Fun(" + code.show + ", " + thisV.show + ", " + klass.show + ")" + case class Fun(code: Tree, thisV: ThisValue, klass: ClassSymbol, scope: Scope) extends ValueElement: + def show(using Context) = "Fun(" + code.show + ", " + scope.show + ", " + klass.show + ")" /** * Represents common base values like Int, String, etc. @@ -264,6 +269,11 @@ class Objects(using Context @constructorOnly): assert(typeSymbol.isDefined, "Invalid creation of SafeValue with type " + tpe) new SafeValue(typeSymbol.get) + /** Represents values unknown to the checker, such as values loaded without source + */ + case object UnknownValue extends ValueElement: + def show(using Context): String = "UnknownValue" + /** * Represents a set of values * @@ -272,7 +282,22 @@ class Objects(using Context @constructorOnly): case class ValueSet(values: Set[ValueElement]) extends Value: def show(using Context) = values.map(_.show).mkString("[", ",", "]") - case class Package(packageModuleClass: ClassSymbol) extends Value: + def isRefSet = values.forall(_.isInstanceOf[Ref]) + + def toScopeSet: ScopeSet = ScopeSet(values.asInstanceOf[Set[Scope]]) + + case class ScopeSet(scopes: Set[Scope]): + assert(scopes.forall(_.isRef) || scopes.forall(_.isEnv), "All scopes should have the same type!") + + def show(using Context) = scopes.map(_.show).mkString("[", ",", "]") + + def toValueSet: ValueSet = ValueSet(scopes.asInstanceOf[Set[ValueElement]]) + + def lookupSymbol(sym: Symbol)(using Heap.MutableData) = scopes.map(_.valValue(sym)).join + + def outers(using Heap.MutableData): ScopeSet = scopes.map(_.outer).join + + case class Package(packageModuleClass: ClassSymbol) extends Value: // TODO: try to remove packages def show(using Context): String = "Package(" + packageModuleClass.show + ")" object Package: @@ -280,29 +305,10 @@ class Objects(using Context @constructorOnly): assert(packageSym.is(Flags.Package), "Invalid symbol to create Package!") Package(packageSym.moduleClass.asClass) - /** Represents values unknown to the checker, such as values loaded without source - * UnknownValue is not ValueElement since RefSet containing UnknownValue - * is equivalent to UnknownValue - */ - case object UnknownValue extends Value: - def show(using Context): String = "UnknownValue" - - /** Represents values lost due to widening - * - * This is the top of the abstract domain lattice, which should not - * be used during initialization. - * - * Top is not ValueElement since RefSet containing Top - * is equivalent to Top - */ - - case object Top extends Value: - def show(using Context): String = "Top" - val Bottom = ValueSet(ListSet.empty) /** Possible types for 'this' */ - type ThisValue = Ref | Top.type + type ThisValue = Ref | ValueSet /** Checking state */ object State: @@ -315,7 +321,7 @@ class Objects(using Context @constructorOnly): def currentObject(using data: Data): ClassSymbol = data.checkingObjects.last.klass - private def doCheckObject(classSym: ClassSymbol)(using ctx: Context, data: Data) = + private def doCheckObject(classSym: ClassSymbol)(using ctx: Context, data: Data, heap: Heap.MutableData) = val tpl = classSym.defTree.asInstanceOf[TypeDef].rhs.asInstanceOf[Template] var count = 0 @@ -326,12 +332,12 @@ class Objects(using Context @constructorOnly): count += 1 given Trace = Trace.empty.add(classSym.defTree) - given Env.Data = Env.emptyEnv(tpl.constr.symbol) - given Heap.MutableData = Heap.empty() + // given Heap.MutableData = Heap.empty given returns: Returns.Data = Returns.empty() given regions: Regions.Data = Regions.empty // explicit name to avoid naming conflict val obj = ObjectRef(classSym) + given Scope = obj log("Iteration " + count) { data.checkingObjects += obj init(tpl, obj, classSym) @@ -356,8 +362,8 @@ class Objects(using Context @constructorOnly): obj end doCheckObject - def checkObjectAccess(clazz: ClassSymbol)(using data: Data, ctx: Context, pendingTrace: Trace): ObjectRef = - val index = data.checkingObjects.indexOf(ObjectRef(clazz)) + def checkObjectAccess(clazz: ClassSymbol)(using data: Data, ctx: Context, pendingTrace: Trace, heap: Heap.MutableData): ObjectRef = + val index = data.checkingObjects.indexWhere(_.klass == clazz) if index != -1 then if data.checkingObjects.size - 1 > index then @@ -384,121 +390,103 @@ class Objects(using Context @constructorOnly): /** Environment for parameters */ object Env: - abstract class Data: - private[Env] def getVal(x: Symbol)(using Context): Option[Value] - private[Env] def getVar(x: Symbol)(using Context): Option[Heap.Addr] - - def widen(height: Int)(using Context): Data - - def level: Int - - def show(using Context): String - /** Local environments can be deeply nested, therefore we need `outer`. * * For local variables in rhs of class field definitions, the `meth` is the primary constructor. */ - private case class LocalEnv - (private[Env] val params: Map[Symbol, Value], meth: Symbol, outer: Data) - (valsMap: mutable.Map[Symbol, Value], varsMap: mutable.Map[Symbol, Heap.Addr]) - (using Context) - extends Data: - val level = outer.level + 1 - - if (level > 3) - report.warning("[Internal error] Deeply nested environment, level = " + level + ", " + meth.show + " in " + meth.enclosingClass.show, meth.defTree) - - private[Env] val vals: mutable.Map[Symbol, Value] = valsMap - private[Env] val vars: mutable.Map[Symbol, Heap.Addr] = varsMap - - private[Env] def getVal(x: Symbol)(using Context): Option[Value] = - if x.is(Flags.Param) then params.get(x) - else vals.get(x) - - private[Env] def getVar(x: Symbol)(using Context): Option[Heap.Addr] = - vars.get(x) - - def widen(height: Int)(using Context): Data = - new LocalEnv(params.map(_ -> _.widen(height)), meth, outer.widen(height))(this.vals, this.vars) - + case class LocalEnv(meth: Symbol, owner: ClassSymbol)(using Trace) extends Scope: def show(using Context) = - "owner: " + meth.show + "\n" + - "params: " + params.map(_.show + " ->" + _.show).mkString("{", ", ", "}") + "\n" + - "vals: " + vals.map(_.show + " ->" + _.show).mkString("{", ", ", "}") + "\n" + - "vars: " + vars.map(_.show + " ->" + _).mkString("{", ", ", "}") + "\n" + - "outer = {\n" + outer.show + "\n}" + "meth: " + meth.show + "\n" + + "owner: " + owner.show + def outer(using Heap.MutableData): ScopeSet = Heap.readOuter(this, meth) end LocalEnv - object NoEnv extends Data: - val level = 0 - - private[Env] def getVal(x: Symbol)(using Context): Option[Value] = - throw new RuntimeException("Invalid usage of non-existent env") - - private[Env] def getVar(x: Symbol)(using Context): Option[Heap.Addr] = - throw new RuntimeException("Invalid usage of non-existent env") - - def widen(height: Int)(using Context): Data = this - - def show(using Context): String = "NoEnv" - end NoEnv + val NoEnv = ScopeSet(Set.empty) /** An empty environment can be used for non-method environments, e.g., field initializers. * * The owner for the local environment for field initializers is the primary constructor of the * enclosing class. */ - def emptyEnv(meth: Symbol)(using Context): Data = - new LocalEnv(Map.empty, meth, NoEnv)(valsMap = mutable.Map.empty, varsMap = mutable.Map.empty) + def emptyEnv(meth: Symbol)(using Context, State.Data, Heap.MutableData, Trace): LocalEnv = + _of(Map.empty, meth, NoEnv) - def valValue(x: Symbol)(using data: Data, ctx: Context, trace: Trace): Value = - data.getVal(x) match - case Some(theValue) => - theValue - case _ => - report.warning("[Internal error] Value not found " + x.show + "\nenv = " + data.show + ". " + Trace.show, Trace.position) + def valValue(x: Symbol)(using scope: Scope, ctx: Context, trace: Trace, heap: Heap.MutableData): Value = + if scope.hasVal(x) then + scope.valValue(x) + else + report.warning("[Internal error] Value not found " + x.show + "\nscope = " + scope.show + ". " + Trace.show, Trace.position) Bottom - def getVal(x: Symbol)(using data: Data, ctx: Context): Option[Value] = data.getVal(x) + private[Env] def _of(argMap: Map[Symbol, Value], meth: Symbol, outerSet: ScopeSet) + (using State.Data, Heap.MutableData, Trace): LocalEnv = + val env = LocalEnv(meth, State.currentObject) + argMap.foreach(env.initVal(_, _)) + env.initOuter(meth, outerSet) + env - def getVar(x: Symbol)(using data: Data, ctx: Context): Option[Heap.Addr] = data.getVar(x) + /** + * The main procedure for searching through the outer chain + * @param target The symbol to search for if `bySymbol = true`; otherwise the method symbol of the target environment + * @param scopeSet The set of scopes as starting point + * @return The scopes that contains symbol `target` or whose method is `target`, + * and the value for `C.this` where C is the enclosing class of the result scopes + */ + private[Env] def resolveEnvRecur( + target: Symbol, scopeSet: ScopeSet, bySymbol: Boolean = true) + : Contextual[Option[(ThisValue, ScopeSet)]] = + if scopeSet == Env.NoEnv then None + else + val targetClass = target.owner.lexicallyEnclosingClass.asClass + val head = scopeSet.scopes.head + val filter = + if bySymbol then + scopeSet.scopes.filter(_.hasVal(target)) + else + scopeSet.scopes.filter(s => s.isEnv && s.asEnv.meth == target) + + assert(filter.isEmpty || filter.size == scopeSet.scopes.size, "Either all scopes or no scopes contain " + target) + if (!filter.isEmpty) then + val resultSet = ScopeSet(filter) + val outerThis = resolveThisRecur(targetClass, resultSet) + Some((outerThis, resultSet)) + else + val outerScopes = scopeSet.outers + resolveEnvRecur(target, outerScopes, bySymbol) - private[Env] def _of(argMap: Map[Symbol, Value], meth: Symbol, outer: Data): Data = - new LocalEnv(argMap, meth, outer)(valsMap = mutable.Map.empty, varsMap = mutable.Map.empty) - def ofDefDef(ddef: DefDef, args: List[Value], outer: Data)(using Context): Data = + def ofDefDef(ddef: DefDef, args: List[Value], outer: ScopeSet) + (using State.Data, Heap.MutableData, Trace): LocalEnv = val params = ddef.termParamss.flatten.map(_.symbol) assert(args.size == params.size, "arguments = " + args.size + ", params = " + params.size) - assert(ddef.symbol.owner.isClass ^ (outer != NoEnv), "ddef.owner = " + ddef.symbol.owner.show + ", outer = " + outer + ", " + ddef.source) + // assert(ddef.symbol.owner.isClass ^ (outer != NoEnv), "ddef.owner = " + ddef.symbol.owner.show + ", outer = " + outer + ", " + ddef.source) _of(params.zip(args).toMap, ddef.symbol, outer) - def ofByName(byNameParam: Symbol, outer: Data): Data = + def ofByName(byNameParam: Symbol, outer: Scope)(using State.Data, Heap.MutableData, Trace): LocalEnv = assert(byNameParam.is(Flags.Param) && byNameParam.info.isInstanceOf[ExprType]); - _of(Map.empty, byNameParam, outer) + _of(Map.empty, byNameParam, ScopeSet(Set(outer))) - def setLocalVal(x: Symbol, value: Value)(using data: Data, ctx: Context): Unit = + def setLocalVal(x: Symbol, value: Value)(using scope: Scope, ctx: Context, heap: Heap.MutableData): Unit = assert(!x.isOneOf(Flags.Param | Flags.Mutable), "Only local immutable variable allowed") - data match + scope match case localEnv: LocalEnv => - assert(!localEnv.vals.contains(x), "Already initialized local " + x.show) - localEnv.vals(x) = value - case _ => - throw new RuntimeException("Incorrect local environment for initializing " + x.show) + localEnv.initVal(x, value) + case ref: Ref => + ref.initVal(x, value) // TODO: This is possible for match statement in class body. Report warning? - def setLocalVar(x: Symbol, addr: Heap.Addr)(using data: Data, ctx: Context): Unit = + def setLocalVar(x: Symbol, value: Value)(using scope: Scope, ctx: Context, heap: Heap.MutableData): Unit = assert(x.is(Flags.Mutable, butNot = Flags.Param), "Only local mutable variable allowed") - data match + scope match case localEnv: LocalEnv => - assert(!localEnv.vars.contains(x), "Already initialized local " + x.show) - localEnv.vars(x) = addr - case _ => - throw new RuntimeException("Incorrect local environment for initializing " + x.show) + localEnv.initVar(x, value) + case ref: Ref => + ref.initVar(x, value) // TODO: This is possible for match statement in class body. Report warning? /** * Resolve the environment by searching for a given symbol. * - * Searches for the environment that owns `target`, starting from `env` as the innermost. + * Searches for the environment that defines `target`, starting from `env` as the innermost. * * Due to widening, the corresponding environment might not exist. As a result reading the local * variable will return `Cold` and it's forbidden to write to the local variable. @@ -507,29 +495,15 @@ class Objects(using Context @constructorOnly): * @param thisV The value for `this` of the enclosing class where the local variable is referenced. * @param env The local environment where the local variable is referenced. * - * @return the environment that owns the `target` and value for `this` owned by the given method. + * @return the environment that owns the `target` and value for `this` that owns the owner of target. */ - def resolveEnvByValue(target: Symbol, thisV: ThisValue, env: Data)(using Context): Option[(ThisValue, Data)] = log("Resolving env by value for " + target.show + ", this = " + thisV.show + ", env = " + env.show, printer) { - env match - case localEnv: LocalEnv => - if localEnv.getVal(target).isDefined then Some(thisV -> localEnv) - else if localEnv.getVar(target).isDefined then Some(thisV -> localEnv) - else resolveEnvByValue(target, thisV, localEnv.outer) - case NoEnv => - thisV match - case ref: OfClass => - ref.outer match - case outer : ThisValue => - resolveEnvByValue(target, outer, ref.env) - case _ => - // TODO: properly handle the case where ref.outer is ValueSet - None - case _ => - None + def resolveEnvByValue(target: Symbol, thisV: ThisValue, scope: Scope) + (using Context, Heap.MutableData): Contextual[Option[(ThisValue, ScopeSet)]] = log("Resolving env by value for " + target.show + ", this = " + thisV.show + ", scope = " + scope.show, printer) { + resolveEnvRecur(target, ScopeSet(Set(scope))) } /** - * Resolve the environment owned by the given method `enclosing`. + * Resolve the environment associated by the given method `enclosing`, starting from `env` as the innermost. * * The method could be located in outer scope with intermixed classes between its definition * site and usage site. @@ -544,42 +518,28 @@ class Objects(using Context @constructorOnly): * * @return the environment and value for `this` owned by the given method. */ - def resolveEnvByOwner(enclosing: Symbol, thisV: ThisValue, env: Data)(using Context): Option[(ThisValue, Data)] = log("Resolving env by owner for " + enclosing.show + ", this = " + thisV.show + ", env = " + env.show, printer) { + def resolveEnvByMethod(enclosing: Symbol, thisV: ThisValue, scope: Scope)(using Context, Heap.MutableData): Contextual[(ThisValue, ScopeSet)] = log("Resolving env which corresponds to method " + enclosing.show + ", this = " + thisV.show + ", scope = " + scope.show, printer) { assert(enclosing.is(Flags.Method), "Only method symbols allows, got " + enclosing.show) - env match - case localEnv: LocalEnv => - if localEnv.meth == enclosing then Some(thisV -> env) - else resolveEnvByOwner(enclosing, thisV, localEnv.outer) - case NoEnv => - thisV match - case ref: OfClass => - ref.outer match - case outer : ThisValue => - resolveEnvByOwner(enclosing, outer, ref.env) - case _ => - // TODO: properly handle the case where ref.outer is ValueSet - None - case _ => - None + val result = resolveEnvRecur(enclosing, ScopeSet(Set(scope)), bySymbol = false) + assert(!result.isEmpty, "Failed to find environment for " + enclosing + "!") + result.get } - def withEnv[T](env: Data)(fn: Data ?=> T): T = fn(using env) + def withEnv[T](env: LocalEnv)(fn: LocalEnv ?=> T): T = fn(using env) end Env /** Abstract heap for mutable fields */ object Heap: - abstract class Addr: - /** The static object which owns the mutable slot */ - def owner: ClassSymbol - def getTrace: Trace = Trace.empty + private case class ScopeBody( + valsMap: Map[Symbol, Value], + outersMap: Map[Symbol, ScopeSet] + ) - /** The address for mutable fields of objects. */ - private case class FieldAddr(regions: Regions.Data, field: Symbol, owner: ClassSymbol)(trace: Trace) extends Addr: - override def getTrace: Trace = trace - - /** The address for mutable local variables . */ - private case class LocalVarAddr(regions: Regions.Data, sym: Symbol, owner: ClassSymbol) extends Addr + private def emptyScopeBody(): ScopeBody = ScopeBody( + valsMap = Map.empty, + outersMap = Map.empty + ) /** Immutable heap data used in the cache. * @@ -587,40 +547,65 @@ class Objects(using Context @constructorOnly): * * TODO: speed up equality check for heap. */ - opaque type Data = Map[Addr, Value] + opaque type Data = Map[Scope, ScopeBody] /** Store the heap as a mutable field to avoid threading it through the program. */ class MutableData(private[Heap] var heap: Data): - private[Heap] def writeJoin(addr: Addr, value: Value): Unit = - heap.get(addr) match + private[Heap] def writeJoinVal(scope: Scope, valSymbol: Symbol, value: Value): Unit = + heap.get(scope) match case None => - heap = heap.updated(addr, value) + heap = heap.updated(scope, Heap.emptyScopeBody()) + writeJoinVal(scope, valSymbol, value) case Some(current) => - val value2 = value.join(current) - if value2 != current then - heap = heap.updated(addr, value2) + val newValsMap = current.valsMap.join(valSymbol, value) + heap = heap.updated(scope, new ScopeBody( + valsMap = newValsMap, + outersMap = current.outersMap + )) + + private[Heap] def writeJoinOuter(scope: Scope, outerSymbol: Symbol, outerScope: ScopeSet): Unit = + heap.get(scope) match + case None => + heap = heap.updated(scope, Heap.emptyScopeBody()) + writeJoinOuter(scope, outerSymbol, outerScope) + + case Some(current) => + val newOutersMap = current.outersMap.join(outerSymbol, outerScope) + heap = heap.updated(scope, new ScopeBody( + valsMap = current.valsMap, + outersMap = newOutersMap + )) end MutableData - def empty(): MutableData = new MutableData(Map.empty) + def empty: MutableData = new MutableData(Map.empty) - def contains(addr: Addr)(using mutable: MutableData): Boolean = - mutable.heap.contains(addr) + def contains(scope: Scope)(using mutable: MutableData): Boolean = + mutable.heap.contains(scope) + + def containsVal(scope: Scope, value: Symbol)(using mutable: MutableData): Boolean = + if mutable.heap.contains(scope) then + mutable.heap(scope).valsMap.contains(value) + else + false - def read(addr: Addr)(using mutable: MutableData): Value = - mutable.heap(addr) + def containsOuter(scope: Scope, outer: Symbol)(using mutable: MutableData): Boolean = + if mutable.heap.contains(scope) then + mutable.heap(scope).outersMap.contains(outer) + else + false - def writeJoin(addr: Addr, value: Value)(using mutable: MutableData): Unit = - mutable.writeJoin(addr, value) + def readVal(scope: Scope, value: Symbol)(using mutable: MutableData): Value = + mutable.heap(scope).valsMap(value) - def localVarAddr(regions: Regions.Data, sym: Symbol, owner: ClassSymbol): Addr = - LocalVarAddr(regions, sym, owner) + def readOuter(scope: Scope, outer: Symbol)(using mutable: MutableData): ScopeSet = + mutable.heap(scope).outersMap(outer) - def fieldVarAddr(regions: Regions.Data, sym: Symbol, owner: ClassSymbol)(using Trace): Addr = - FieldAddr(regions, sym, owner)(summon[Trace]) + def writeJoinVal(scope: Scope, valSymbol: Symbol, value: Value)(using mutable: MutableData): Unit = + mutable.writeJoinVal(scope, valSymbol, value) - def arrayAddr(regions: Regions.Data, owner: ClassSymbol)(using Trace, Context): Addr = - FieldAddr(regions, defn.ArrayClass, owner)(summon[Trace]) + def writeJoinOuter(scope: Scope, outer: Symbol, outerScope: ScopeSet)(using mutable: MutableData): Unit = + mutable.writeJoinOuter(scope, outer, outerScope) def getHeapData()(using mutable: MutableData): Data = mutable.heap @@ -628,16 +613,16 @@ class Objects(using Context @constructorOnly): /** Cache used to terminate the check */ object Cache: - case class Config(thisV: Value, env: Env.Data, heap: Heap.Data) + case class Config(thisV: Value, scope: Scope, heap: Heap.Data) case class Res(value: Value, heap: Heap.Data) class Data extends Cache[Config, Res]: - def get(thisV: Value, expr: Tree)(using Heap.MutableData, Env.Data): Option[Value] = - val config = Config(thisV, summon[Env.Data], Heap.getHeapData()) + def get(thisV: Value, expr: Tree)(using Heap.MutableData, Scope): Option[Value] = + val config = Config(thisV, summon[Scope], Heap.getHeapData()) super.get(config, expr).map(_.value) - def cachedEval(thisV: ThisValue, expr: Tree, cacheResult: Boolean)(fun: Tree => Value)(using Heap.MutableData, Env.Data): Value = - val config = Config(thisV, summon[Env.Data], Heap.getHeapData()) + def cachedEval(thisV: ThisValue, expr: Tree, cacheResult: Boolean)(fun: Tree => Value)(using Heap.MutableData, Scope): Value = + val config = Config(thisV, summon[Scope], Heap.getHeapData()) val result = super.cachedEval(config, expr, cacheResult, default = Res(Bottom, Heap.getHeapData())) { expr => Res(fun(expr), Heap.getHeapData()) } @@ -685,28 +670,30 @@ class Objects(using Context @constructorOnly): case None => report.warning("[Internal error] Unhandled return for method " + meth + " in " + meth.owner.show + ". Trace:\n" + Trace.show, Trace.position) - type Contextual[T] = (Context, State.Data, Env.Data, Cache.Data, Heap.MutableData, Regions.Data, Returns.Data, Trace) ?=> T + type Contextual[T] = (Context, State.Data, Scope, Cache.Data, Heap.MutableData, Regions.Data, Returns.Data, Trace) ?=> T // --------------------------- domain operations ----------------------------- case class ArgInfo(value: Value, trace: Trace, tree: Tree) - extension (a: Value) - def join(b: Value): Value = - assert(!a.isInstanceOf[Package] && !b.isInstanceOf[Package], "Unexpected join between " + a + " and " + b) - (a, b) match - case (Top, _) => Top - case (_, Top) => Top - case (UnknownValue, _) => UnknownValue - case (_, UnknownValue) => UnknownValue - case (Bottom, b) => b - case (a, Bottom) => a - case (ValueSet(values1), ValueSet(values2)) => ValueSet(values1 ++ values2) - case (a : ValueElement, ValueSet(values)) => ValueSet(values + a) - case (ValueSet(values), b : ValueElement) => ValueSet(values + b) - case (a : ValueElement, b : ValueElement) => ValueSet(Set(a, b)) - case _ => Bottom + trait Join[V]: + extension (v1: V) + def join(v2: V): V + + given Join[Value] with + extension (a: Value) + def join(b: Value): Value = + assert(!a.isInstanceOf[Package] && !b.isInstanceOf[Package], "Unexpected join between " + a + " and " + b) + (a, b) match + case (Bottom, b) => b + case (a, Bottom) => a + case (ValueSet(values1), ValueSet(values2)) => ValueSet(values1 ++ values2) + case (a : ValueElement, ValueSet(values)) => ValueSet(values + a) + case (ValueSet(values), b : ValueElement) => ValueSet(values + b) + case (a : ValueElement, b : ValueElement) => ValueSet(Set(a, b)) + case _ => Bottom + extension (a: Value) def remove(b: Value): Value = (a, b) match case (ValueSet(values1), b: ValueElement) => ValueSet(values1 - b) case (ValueSet(values1), ValueSet(values2)) => ValueSet(values1.removedAll(values2)) @@ -715,27 +702,6 @@ class Objects(using Context @constructorOnly): case (a: Package, b: Package) if a == b => Bottom case _ => a - def widen(height: Int)(using Context): Value = log("widening value " + a.show + " down to height " + height, printer, (_: Value).show) { - if height == 0 then Top - else - a match - case Bottom => Bottom - - case ValueSet(values) => - values.map(ref => ref.widen(height)).join - - case Fun(code, thisV, klass, env) => - Fun(code, thisV.widenThisValue(height), klass, env.widen(height - 1)) - - case ref @ OfClass(klass, outer, _, args, env) => - val outer2 = outer.widen(height - 1) - val args2 = args.map(_.widen(height - 1)) - val env2 = env.widen(height - 1) - ref.widenedCopy(outer2, args2, env2) - - case _ => a - } - def filterType(tpe: Type)(using Context): Value = tpe match case t @ SAMType(_, _) if a.isInstanceOf[Fun] => a // if tpe is SAMType and a is Fun, allow it @@ -751,7 +717,7 @@ class Objects(using Context @constructorOnly): else val klass = sym.asClass a match - case UnknownValue | Top => a + case UnknownValue => a case Package(packageModuleClass) => // the typer might mistakenly set the receiver to be a package instead of package object. // See pos/packageObjectStringInterpolator.scala @@ -759,19 +725,31 @@ class Objects(using Context @constructorOnly): case v: SafeValue => if v.typeSymbol.asClass.isSubClass(klass) then a else Bottom case ref: Ref => if ref.klass.isSubClass(klass) then ref else Bottom case ValueSet(values) => values.map(v => v.filterClass(klass)).join - case arr: OfArray => if defn.ArrayClass.isSubClass(klass) then arr else Bottom case fun: Fun => if klass.isOneOf(AbstractOrTrait) && klass.baseClasses.exists(defn.isFunctionClass) then fun else Bottom - extension (value: ThisValue) - def widenThisValue(height : Int)(using Context) : ThisValue = - assert(height > 0, "Cannot call widenThisValue with height 0!") - value.widen(height).asInstanceOf[ThisValue] + given Join[ScopeSet] with + extension (a: ScopeSet) + def join(b: ScopeSet): ScopeSet = ScopeSet(a.scopes ++ b.scopes) extension (values: Iterable[Value]) - def join: Value = if values.isEmpty then Bottom else values.reduce { (v1, v2) => v1.join(v2) } + def join: Value = + if values.isEmpty then + Bottom + else + values.reduce { (v1, v2) => v1.join(v2) } - def widen(height: Int): Contextual[List[Value]] = values.map(_.widen(height)).toList + extension (scopes: Iterable[ScopeSet]) + def join: ScopeSet = + if scopes.isEmpty then + Env.NoEnv + else + scopes.reduce { (s1, s2) => s1.join(s2) } + + extension [V : Join](map: Map[Symbol, V]) + def join(sym: Symbol, value: V): Map[Symbol, V] = + if !map.contains(sym) then map.updated(sym, value) + else map.updated(sym, map(sym).join(value)) /** Check if the checker option reports warnings about unknown code */ @@ -795,9 +773,6 @@ class Objects(using Context @constructorOnly): */ def call(value: Value, meth: Symbol, args: List[ArgInfo], receiver: Type, superType: Type, needResolve: Boolean = true): Contextual[Value] = log("call " + meth.show + ", this = " + value.show + ", args = " + args.map(_.value.show), printer, (_: Value).show) { value.filterClass(meth.owner) match - case Top => - report.warning("Value is unknown to the checker due to widening. " + Trace.show, Trace.position) - Bottom case UnknownValue => reportWarningForUnknownValue("Using unknown value. " + Trace.show, Trace.position) @@ -842,7 +817,7 @@ class Objects(using Context @constructorOnly): val ddef = target.defTree.asInstanceOf[DefDef] val cls = target.owner.enclosingClass.asClass // convert SafeType to an OfClass before analyzing method body - val ref = OfClass(cls, Bottom, NoSymbol, Nil, Env.NoEnv) + val ref = OfClass(cls, Env.NoEnv, NoSymbol) call(ref, meth, args, receiver, superType, needResolve) case Bottom => @@ -856,24 +831,23 @@ class Objects(using Context @constructorOnly): val target = resolve(defn.ArrayClass, meth) if target == defn.Array_apply || target == defn.Array_clone then - if arr.addr.owner == State.currentObject then - Heap.read(arr.addr) + if arr.owner == State.currentObject then + arr.readElement else - errorReadOtherStaticObject(State.currentObject, arr.addr) + errorReadOtherStaticObject(State.currentObject, arr) Bottom else if target == defn.Array_update then assert(args.size == 2, "Incorrect number of arguments for Array update, found = " + args.size) - if arr.addr.owner != State.currentObject then - errorMutateOtherStaticObject(State.currentObject, arr.addr) + if arr.owner != State.currentObject then + errorMutateOtherStaticObject(State.currentObject, arr) else - Heap.writeJoin(arr.addr, args.tail.head.value) + arr.writeElement(args.tail.head.value) Bottom else // Array.length is OK SafeValue(defn.IntType) case ref: Ref => - val isLocal = !meth.owner.isClass val target = if !needResolve then meth @@ -887,7 +861,7 @@ class Objects(using Context @constructorOnly): if target.isOneOf(Flags.Method) then if target.owner == defn.ArrayModuleClass && target.name == nme.apply then val arr = OfArray(State.currentObject, summon[Regions.Data]) - Heap.writeJoin(arr.addr, args.map(_.value).join) + arr.writeElement(args.map(_.value).join) arr else if target.equals(defn.Predef_classOf) then // Predef.classOf is a stub method in tasty and is replaced in backend @@ -896,16 +870,17 @@ class Objects(using Context @constructorOnly): val cls = target.owner.enclosingClass.asClass val ddef = target.defTree.asInstanceOf[DefDef] val meth = ddef.symbol - val (thisV : ThisValue, outerEnv) = - if meth.owner.isClass then - (ref, Env.NoEnv) + if meth.owner.enclosingMethod == cls.primaryConstructor then + // meth is top-level method, outer is a ref + (ref, ScopeSet(Set(ref))) else - Env.resolveEnvByOwner(meth.owner.enclosingMethod, ref, summon[Env.Data]).getOrElse(Top -> Env.NoEnv) + val enclosingMethod = meth.owner.enclosingMethod + Env.resolveEnvByMethod(enclosingMethod, ref, summon[Scope]) val env2 = Env.ofDefDef(ddef, args.map(_.value), outerEnv) extendTrace(ddef) { - given Env.Data = env2 + given Scope = env2 cache.cachedEval(ref, ddef.rhs, cacheResult = true) { expr => Returns.installHandler(meth) val res = cases(expr, thisV, cls) @@ -934,7 +909,7 @@ class Objects(using Context @constructorOnly): code match case ddef: DefDef => if meth.name == nme.apply then - given Env.Data = Env.ofDefDef(ddef, args.map(_.value), env) + given Scope = Env.ofDefDef(ddef, args.map(_.value), ScopeSet(Set(env))) extendTrace(code) { eval(ddef.rhs, thisV, klass, cacheResult = true) } else // The methods defined in `Any` and `AnyRef` are trivial and don't affect initialization. @@ -968,11 +943,13 @@ class Objects(using Context @constructorOnly): if ctor.hasSource then val cls = ctor.owner.enclosingClass.asClass val ddef = ctor.defTree.asInstanceOf[DefDef] - val argValues = args.map(_.value) - given Env.Data = Env.ofDefDef(ddef, argValues, Env.NoEnv) + given Scope = ref if ctor.isPrimaryConstructor then val tpl = cls.defTree.asInstanceOf[TypeDef].rhs.asInstanceOf[Template] + val params = tpl.constr.termParamss.flatten.map(_.symbol) + val paramMap = params.zip(args.map(_.value)) + paramMap.foreach(ref.initVal(_, _)) extendTrace(cls.defTree) { eval(tpl, ref, cls, cacheResult = true) } else extendTrace(ddef) { // The return values for secondary constructors can be ignored @@ -985,6 +962,9 @@ class Objects(using Context @constructorOnly): // no source code available UnknownValue + case ValueSet(values) if values.size == 1 => + callConstructor(values.head, ctor, args) + case _ => report.warning("[Internal error] unexpected constructor call, meth = " + ctor + ", this = " + value + Trace.show, Trace.position) Bottom @@ -999,12 +979,13 @@ class Objects(using Context @constructorOnly): */ def select(value: Value, field: Symbol, receiver: Type, needResolve: Boolean = true): Contextual[Value] = log("select " + field.show + ", this = " + value.show, printer, (_: Value).show) { value.filterClass(field.owner) match - case Top => - report.warning("Value is unknown to the checker due to widening. " + Trace.show, Trace.position) - Bottom case UnknownValue => reportWarningForUnknownValue("Using unknown value. " + Trace.show, Trace.position) + case arr: OfArray => + report.warning("[Internal error] unexpected tree in selecting an array, array = " + arr.show + Trace.show, Trace.position) + Bottom + case v @ SafeValue(_) => if v.typeSymbol != defn.NullClass then // selection on Null is sensible on AST level; no warning for it @@ -1024,7 +1005,7 @@ class Objects(using Context @constructorOnly): case ref: Ref => val target = if needResolve then resolve(ref.klass, field) else field if target.is(Flags.Lazy) then - given Env.Data = Env.emptyEnv(target.owner.asInstanceOf[ClassSymbol].primaryConstructor) + given Scope = Env.emptyEnv(target.owner.asInstanceOf[ClassSymbol].primaryConstructor) if target.hasSource then val rhs = target.defTree.asInstanceOf[ValDef].rhs eval(rhs, ref, target.owner.asClass, cacheResult = true) @@ -1032,13 +1013,12 @@ class Objects(using Context @constructorOnly): UnknownValue else if target.exists then def isNextFieldOfColonColon: Boolean = ref.klass == defn.ConsClass && target.name.toString == "next" - if target.isOneOf(Flags.Mutable) && !isNextFieldOfColonColon then + if target.isMutableVarOrAccessor && !isNextFieldOfColonColon then if ref.hasVar(target) then - val addr = ref.varAddr(target) - if addr.owner == State.currentObject then - Heap.read(addr) + if ref.owner == State.currentObject then + ref.varValue(target) else - errorReadOtherStaticObject(State.currentObject, addr) + errorReadOtherStaticObject(State.currentObject, ref) Bottom else if ref.isObjectRef && ref.klass.hasSource then report.warning("Access uninitialized field " + field.show + ". " + Trace.show, Trace.position) @@ -1068,10 +1048,6 @@ class Objects(using Context @constructorOnly): report.warning("[Internal error] unexpected tree in selecting a function, fun = " + fun.code.show + Trace.show, fun.code) Bottom - case arr: OfArray => - report.warning("[Internal error] unexpected tree in selecting an array, array = " + arr.show + Trace.show, Trace.position) - Bottom - case Bottom => Bottom case ValueSet(values) => @@ -1087,8 +1063,6 @@ class Objects(using Context @constructorOnly): */ def assign(lhs: Value, field: Symbol, rhs: Value, rhsTyp: Type): Contextual[Value] = log("Assign" + field.show + " of " + lhs.show + ", rhs = " + rhs.show, printer, (_: Value).show) { lhs.filterClass(field.owner) match - case Top => - report.warning("Value is unknown to the checker due to widening. " + Trace.show, Trace.position) case UnknownValue => val _ = reportWarningForUnknownValue("Assigning to unknown value. " + Trace.show, Trace.position) case p: Package => @@ -1106,11 +1080,10 @@ class Objects(using Context @constructorOnly): case ref: Ref => if ref.hasVar(field) then - val addr = ref.varAddr(field) - if addr.owner != State.currentObject then - errorMutateOtherStaticObject(State.currentObject, addr) + if ref.owner != State.currentObject then + errorMutateOtherStaticObject(State.currentObject, ref) else - Heap.writeJoin(addr, rhs) + Heap.writeJoinVal(ref, field, rhs) else report.warning("Mutating a field before its initialization: " + field.show + ". " + Trace.show, Trace.position) end match @@ -1137,7 +1110,7 @@ class Objects(using Context @constructorOnly): case UnknownValue => reportWarningForUnknownValue("Instantiating when outer is unknown. " + Trace.show, Trace.position) - case outer: (Ref | Top.type | Package) => + case outer: (Ref | Package) => if klass == defn.ArrayClass then args.head.tree.tpe match case ConstantType(Constants.Constant(0)) => @@ -1145,26 +1118,29 @@ class Objects(using Context @constructorOnly): Bottom case _ => val arr = OfArray(State.currentObject, summon[Regions.Data]) - Heap.writeJoin(arr.addr, Bottom) arr else // Widen the outer to finitize the domain. Arguments already widened in `evalArgs`. - val (outerWidened, envWidened) = + val envWidened: ScopeSet = outer match case Package(_) => // For top-level classes - (outer, Env.NoEnv) - case thisV : ThisValue => - if klass.owner.isClass then - if klass.owner.is(Flags.Package) then - report.warning("[Internal error] top-level class should have `Package` as outer, class = " + klass.show + ", outer = " + outer.show + ", " + Trace.show, Trace.position) - (Bottom, Env.NoEnv) - else - (thisV.widenThisValue(1), Env.NoEnv) + Env.NoEnv + case outer : ThisValue => + if klass.owner.is(Flags.Package) then + report.warning("[Internal error] top-level class should have `Package` as outer, class = " + klass.show + ", outer = " + outer.show + ", " + Trace.show, Trace.position) + Env.NoEnv else - // klass.enclosingMethod returns its primary constructor - Env.resolveEnvByOwner(klass.owner.enclosingMethod, thisV, summon[Env.Data]).getOrElse(UnknownValue -> Env.NoEnv) + val outerCls = klass.owner.enclosingClass.asClass + // When `klass` is directly nested in `outerCls`, `outerCls`.enclosingMethod returns its primary constructor + if klass.owner.enclosingMethod == outerCls.primaryConstructor then + // Don't use the parameter `outer` as the outer value, but uses `outerCls.this` + // This eliminates infinite outer chain caused by inner classes extending outer classes. + // See `inner-extends-outer.scala` + resolveThis(outerCls, outer).toScopeSet + else + Env.resolveEnvByMethod(klass.owner.enclosingMethod, outer, summon[Scope])._2 - val instance = OfClass(klass, outerWidened, ctor, args.map(_.value), envWidened) + val instance = OfClass(klass, envWidened, ctor) callConstructor(instance, ctor, args) case ValueSet(values) => @@ -1178,9 +1154,7 @@ class Objects(using Context @constructorOnly): */ def initLocal(sym: Symbol, value: Value): Contextual[Unit] = log("initialize local " + sym.show + " with " + value.show, printer) { if sym.is(Flags.Mutable) then - val addr = Heap.localVarAddr(summon[Regions.Data], sym, State.currentObject) - Env.setLocalVar(sym, addr) - Heap.writeJoin(addr, value) + Env.setLocalVar(sym, value) else Env.setLocalVal(sym, value) } @@ -1192,46 +1166,42 @@ class Objects(using Context @constructorOnly): */ def readLocal(thisV: ThisValue, sym: Symbol): Contextual[Value] = log("reading local " + sym.show, printer, (_: Value).show) { def isByNameParam(sym: Symbol) = sym.is(Flags.Param) && sym.info.isInstanceOf[ExprType] + def evalByNameParam(value: Value): Contextual[Value] = value match + case fun: Fun => + given Scope = Env.ofByName(sym, fun.scope) + eval(fun.code, fun.thisV, fun.klass) + case UnknownValue => + reportWarningForUnknownValue("Calling on unknown value. " + Trace.show, Trace.position) + case Bottom => Bottom + case ValueSet(values) if values.size == 1 => + evalByNameParam(values.head) + case _: ValueSet | _: Ref | _: OfArray | _: Package | SafeValue(_) => + report.warning("[Internal error] Unexpected by-name value " + value.show + ". " + Trace.show, Trace.position) + Bottom + end evalByNameParam + // Can't use enclosingMethod here because values defined in a by-name closure will have the wrong enclosingMethod, // since our phase is before elimByName. - Env.resolveEnvByValue(sym, thisV, summon[Env.Data]) match - case Some(thisV -> env) => + Env.resolveEnvByValue(sym, thisV, summon[Scope]) match + case Some(thisV -> scopeSet) => if sym.is(Flags.Mutable) then // Assume forward reference check is doing a good job - given Env.Data = env - Env.getVar(sym) match - case Some(addr) => - if addr.owner == State.currentObject then - Heap.read(addr) - else - errorReadOtherStaticObject(State.currentObject, addr) - Bottom - end if - case _ => - // Only vals can be lazy - report.warning("[Internal error] Variable not found " + sym.show + "\nenv = " + env.show + ". " + Trace.show, Trace.position) + val scopesOwnedByOthers = scopeSet.scopes.filter(_.owner != State.currentObject) + if scopesOwnedByOthers.isEmpty then + scopeSet.lookupSymbol(sym) + else + errorReadOtherStaticObject(State.currentObject, scopesOwnedByOthers.head) Bottom + end if else - given Env.Data = env if sym.is(Flags.Lazy) then val rhs = sym.defTree.asInstanceOf[ValDef].rhs eval(rhs, thisV, sym.enclosingClass.asClass, cacheResult = true) else // Assume forward reference check is doing a good job - val value = Env.valValue(sym) + val value = scopeSet.lookupSymbol(sym) if isByNameParam(sym) then - value match - case fun: Fun => - given Env.Data = Env.ofByName(sym, fun.env) - eval(fun.code, fun.thisV, fun.klass) - case UnknownValue => - reportWarningForUnknownValue("Calling on unknown value. " + Trace.show, Trace.position) - case Top => - report.warning("Calling on value lost due to widening. " + Trace.show, Trace.position) - Bottom - case _: ValueSet | _: Ref | _: OfArray | _: Package | SafeValue(_) => - report.warning("[Internal error] Unexpected by-name value " + value.show + ". " + Trace.show, Trace.position) - Bottom + evalByNameParam(value) else value @@ -1253,17 +1223,13 @@ class Objects(using Context @constructorOnly): assert(sym.is(Flags.Mutable), "Writing to immutable variable " + sym.show) // Can't use enclosingMethod here because values defined in a by-name closure will have the wrong enclosingMethod, // since our phase is before elimByName. - Env.resolveEnvByValue(sym, thisV, summon[Env.Data]) match - case Some(thisV -> env) => - given Env.Data = env - Env.getVar(sym) match - case Some(addr) => - if addr.owner != State.currentObject then - errorMutateOtherStaticObject(State.currentObject, addr) - else - Heap.writeJoin(addr, value) - case _ => - report.warning("[Internal error] Variable not found " + sym.show + "\nenv = " + env.show + ". " + Trace.show, Trace.position) + Env.resolveEnvByValue(sym, thisV, summon[Scope]) match + case Some(thisV -> scopeSet) => + val scopesOwnedByOthers = scopeSet.scopes.filter(_.owner != State.currentObject) + if !scopesOwnedByOthers.isEmpty then + errorMutateOtherStaticObject(State.currentObject, scopesOwnedByOthers.head) + else + scopeSet.scopes.foreach(Heap.writeJoinVal(_, sym, value)) case _ => report.warning("Assigning to variables in outer scope. " + Trace.show, Trace.position) @@ -1274,7 +1240,7 @@ class Objects(using Context @constructorOnly): // -------------------------------- algorithm -------------------------------- /** Check an individual object */ - private def accessObject(classSym: ClassSymbol)(using Context, State.Data, Trace): ObjectRef = log("accessing " + classSym.show, printer, (_: Value).show) { + private def accessObject(classSym: ClassSymbol)(using Context, State.Data, Trace, Heap.MutableData): ObjectRef = log("accessing " + classSym.show, printer, (_: Value).show) { if classSym.hasSource then State.checkObjectAccess(classSym) else @@ -1285,6 +1251,7 @@ class Objects(using Context @constructorOnly): def checkClasses(classes: List[ClassSymbol])(using Context): Unit = given State.Data = new State.Data given Trace = Trace.empty + given Heap.MutableData = Heap.empty // TODO: do garbage collection on the heap for classSym <- classes if classSym.isStaticObject @@ -1393,7 +1360,7 @@ class Objects(using Context @constructorOnly): case TermRef(NoPrefix, _) => // resolve this for the local method val enclosingClass = id.symbol.owner.enclosingClass.asClass - val thisValue2 = extendTrace(ref) { resolveThis(enclosingClass, thisV, klass) } + val thisValue2 = extendTrace(ref) { resolveThis(enclosingClass, thisV) } // local methods are not a member, but we can reuse the method `call` withTrace(trace2) { call(thisValue2, id.symbol, args, receiver = NoType, superType = NoType, needResolve = false) } case TermRef(prefix, _) => @@ -1410,7 +1377,7 @@ class Objects(using Context @constructorOnly): case OuterSelectName(_, _) => val current = qualifier.tpe.classSymbol val target = expr.tpe.widenSingleton.classSymbol.asClass - withTrace(trace2) { resolveThis(target, qual, current.asClass) } + withTrace(trace2) { resolveThis(target, qual) } case _ => withTrace(trace2) { select(qual, expr.symbol, receiver = qualifier.tpe) } @@ -1444,18 +1411,17 @@ class Objects(using Context @constructorOnly): extendTrace(id) { evalType(prefix, thisV, klass) } val value = eval(rhs, thisV, klass) - val widened = widenEscapedValue(value, rhs) if isLocal then - writeLocal(thisV, lhs.symbol, widened) + writeLocal(thisV, lhs.symbol, value) else - withTrace(trace2) { assign(receiver, lhs.symbol, widened, rhs.tpe) } + withTrace(trace2) { assign(receiver, lhs.symbol, value, rhs.tpe) } case closureDef(ddef) => - Fun(ddef, thisV, klass, summon[Env.Data]) + Fun(ddef, thisV, klass, summon[Scope]) case PolyFun(ddef) => - Fun(ddef, thisV, klass, summon[Env.Data]) + Fun(ddef, thisV, klass, summon[Scope]) case Block(stats, expr) => evalExprs(stats, thisV, klass) @@ -1499,7 +1465,7 @@ class Objects(using Context @constructorOnly): val module = defn.getWrapVarargsArrayModule.moduleClass.asClass val args = evalArgs(elems.map(Arg.apply), thisV, klass) val arr = OfArray(State.currentObject, summon[Regions.Data]) - Heap.writeJoin(arr.addr, args.map(_.value).join) + arr.writeElement(args.map(_.value).join) call(ObjectRef(module), meth, List(ArgInfo(arr, summon[Trace], EmptyTree)), module.typeRef, NoType) case Inlined(call, bindings, expansion) => @@ -1577,6 +1543,7 @@ class Objects(using Context @constructorOnly): case bind @ Bind(_, pat) => val (tpe, value) = evalPattern(scrutinee, pat) + initLocal(bind.symbol, value) (tpe, value) @@ -1803,54 +1770,23 @@ class Objects(using Context @constructorOnly): accessObject(sym.moduleClass.asClass) else - resolveThis(tref.classSymbol.asClass, thisV, klass) + resolveThis(tref.classSymbol.asClass, thisV) case _ => throw new Exception("unexpected type: " + tp + ", Trace:\n" + Trace.show) } - /** Widen the escaped value (a method argument or rhs of an assignment) - * - * The default widening is 1 for most values, 2 for function values. - * User-specified widening annotations are repected. - */ - def widenEscapedValue(value: Value, annotatedTree: Tree): Contextual[Value] = - def parseAnnotation: Option[Int] = - annotatedTree.tpe.getAnnotation(defn.InitWidenAnnot).flatMap: annot => - annot.argument(0).get match - case arg @ Literal(c: Constants.Constant) => - val height = c.intValue - if height < 0 then - report.warning("The argument should be positive", arg) - None - else - Some(height) - case arg => - report.warning("The argument should be a constant integer value", arg) - None - end parseAnnotation - - parseAnnotation match - case Some(i) => - value.widen(i) - - case None => - if value.isInstanceOf[Fun] - then value.widen(2) - else value.widen(1) - /** Evaluate arguments of methods and constructors */ def evalArgs(args: List[Arg], thisV: ThisValue, klass: ClassSymbol): Contextual[List[ArgInfo]] = val argInfos = new mutable.ArrayBuffer[ArgInfo] args.foreach { arg => val res = if arg.isByName then - Fun(arg.tree, thisV, klass, summon[Env.Data]) + Fun(arg.tree, thisV, klass, summon[Scope]) else eval(arg.tree, thisV, klass) - val widened = widenEscapedValue(res, arg.tree) - argInfos += ArgInfo(widened, trace.add(arg.tree), arg.tree) + argInfos += ArgInfo(res, trace.add(arg.tree), arg.tree) } argInfos.toList @@ -1869,9 +1805,7 @@ class Objects(using Context @constructorOnly): klass.paramGetters.foreach { acc => val value = paramsMap(acc.name.toTermName) if acc.is(Flags.Mutable) then - val addr = Heap.fieldVarAddr(summon[Regions.Data], acc, State.currentObject) - thisV.initVar(acc, addr) - Heap.writeJoin(addr, value) + thisV.initVar(acc, value) else thisV.initVal(acc, value) printer.println(acc.show + " initialized with " + value) @@ -1884,7 +1818,17 @@ class Objects(using Context @constructorOnly): val cls = tref.classSymbol.asClass // update outer for super class val res = outerValue(tref, thisV, klass) - thisV.initOuter(cls, res) + res match { + case ref: Ref => thisV.initOuter(cls, ScopeSet(Set(ref))) + case vs: ValueSet if vs.isRefSet => + thisV.initOuter(cls, vs.toScopeSet) + case _: Package => + thisV.initOuter(cls, Env.NoEnv) + case _ => + val error = "[Internal error] Invalid outer value, cls = " + cls + ", value = " + res + Trace.show + report.warning(error, Trace.position) + return + } // follow constructor if cls.hasSource then @@ -1895,7 +1839,7 @@ class Objects(using Context @constructorOnly): } // parents - def initParent(parent: Tree, tasks: Tasks) = + def initParent(parent: Tree, tasks: Tasks) = // TODO: store the parent objects and resolve `p.this` for parent classes `p` parent match case tree @ Block(stats, NewExpr(tref, New(tpt), ctor, argss)) => // can happen evalExprs(stats, thisV, klass) @@ -1964,9 +1908,7 @@ class Objects(using Context @constructorOnly): val sym = vdef.symbol val res = if (allowList.contains(sym)) Bottom else eval(vdef.rhs, thisV, klass) if sym.is(Flags.Mutable) then - val addr = Heap.fieldVarAddr(summon[Regions.Data], sym, State.currentObject) - thisV.initVar(sym, addr) - Heap.writeJoin(addr, res) + thisV.initVar(sym, res) else thisV.initVal(sym, res) @@ -1980,42 +1922,52 @@ class Objects(using Context @constructorOnly): } - /** Resolve C.this that appear in `klass` + /** Resolve C.this by recursively searching through the outer chain + * @param target The class symbol for `C` for which `C.this` is to be resolved. + * @param scopeSet The scopes as the starting point. + */ + def resolveThisRecur(target: ClassSymbol, scopeSet: ScopeSet): Contextual[ValueSet] = + if scopeSet == Env.NoEnv then + Bottom + else + val head = scopeSet.scopes.head + if head.isInstanceOf[Ref] then + val klass = head.asInstanceOf[Ref].klass + assert(scopeSet.scopes.forall(_.asInstanceOf[Ref].klass == klass), "Multiple possible outer class?") + if klass == target then + scopeSet.toValueSet + else + resolveThisRecur(target, scopeSet.outers) + else + resolveThisRecur(target, scopeSet.outers) + + /** Resolve C.this that appear in `D.this` * * @param target The class symbol for `C` for which `C.this` is to be resolved. - * @param thisV The value for `D.this` where `D` is represented by the parameter `klass`. - * @param klass The enclosing class where the type `C.this` is located. + * @param thisV The value for `D.this`. * @param elideObjectAccess Whether object access should be omitted. * * Object access elision happens when the object access is used as a prefix * in `new o.C` and `C` does not need an outer. */ - def resolveThis(target: ClassSymbol, thisV: Value, klass: ClassSymbol, elideObjectAccess: Boolean = false): Contextual[Value] = log("resolveThis target = " + target.show + ", this = " + thisV.show, printer, (_: Value).show) { - if target == klass then - thisV - else if target.is(Flags.Package) then - Package(target) // TODO: What is the semantics for package.this? + def resolveThis(target: ClassSymbol, thisV: Value, elideObjectAccess: Boolean = false): Contextual[ValueSet] = log("resolveThis target = " + target.show + ", this = " + thisV.show, printer, (_: Value).show) { + if target.is(Flags.Package) then + val error = "[Internal error] target cannot be packages, target = " + target + Trace.show + report.warning(error, Trace.position) + Bottom else if target.isStaticObject then val res = ObjectRef(target.moduleClass.asClass) - if elideObjectAccess then res - else accessObject(target) + if elideObjectAccess then ValueSet(Set(res)) + else ValueSet(Set(accessObject(target))) else thisV match case Bottom => Bottom - case UnknownValue => UnknownValue - case Top => Top case ref: Ref => - val outerCls = klass.owner.lexicallyEnclosingClass.asClass - if !ref.hasOuter(klass) then - val error = "[Internal error] outer not yet initialized, target = " + target + ", klass = " + klass + Trace.show - report.warning(error, Trace.position) - Bottom - else - resolveThis(target, ref.outerValue(klass), outerCls) - case ValueSet(values) => - values.map(ref => resolveThis(target, ref, klass)).join - case _: Fun | _ : OfArray | _: Package | SafeValue(_) => - report.warning("[Internal error] unexpected thisV = " + thisV + ", target = " + target.show + ", klass = " + klass.show + Trace.show, Trace.position) + resolveThisRecur(target, ScopeSet(Set(ref))) + case vs: ValueSet if vs.isRefSet => + resolveThisRecur(target, vs.toScopeSet) + case _ => + report.warning("[Internal error] unexpected thisV = " + thisV + ", target = " + target.show + Trace.show, Trace.position) Bottom } @@ -2029,7 +1981,7 @@ class Objects(using Context @constructorOnly): val cls = tref.classSymbol.asClass if tref.prefix == NoPrefix then val enclosing = cls.owner.lexicallyEnclosingClass.asClass - resolveThis(enclosing, thisV, klass, elideObjectAccess = cls.isStatic) + resolveThis(enclosing, thisV, elideObjectAccess = cls.isStatic) else if cls.isAllOf(Flags.JavaInterface) then Bottom else evalType(tref.prefix, thisV, klass, elideObjectAccess = cls.isStatic) @@ -2040,25 +1992,25 @@ class Objects(using Context @constructorOnly): else "" val mutateErrorSet: mutable.Set[(ClassSymbol, ClassSymbol)] = mutable.Set.empty - def errorMutateOtherStaticObject(currentObj: ClassSymbol, addr: Heap.Addr)(using Trace, Context) = - val otherObj = addr.owner - val addr_trace = addr.getTrace + def errorMutateOtherStaticObject(currentObj: ClassSymbol, scope: Scope)(using Trace, Context) = + val otherObj = scope.owner + val scope_trace = scope.getTrace if mutateErrorSet.add((currentObj, otherObj)) then val msg = s"Mutating ${otherObj.show} during initialization of ${currentObj.show}.\n" + "Mutating other static objects during the initialization of one static object is forbidden. " + Trace.show + - printTraceWhenMultiple(addr_trace) + printTraceWhenMultiple(scope_trace) report.warning(msg, Trace.position) val readErrorSet: mutable.Set[(ClassSymbol, ClassSymbol)] = mutable.Set.empty - def errorReadOtherStaticObject(currentObj: ClassSymbol, addr: Heap.Addr)(using Trace, Context) = - val otherObj = addr.owner - val addr_trace = addr.getTrace + def errorReadOtherStaticObject(currentObj: ClassSymbol, scope: Scope)(using Trace, Context) = + val otherObj = scope.owner + val scope_trace = scope.getTrace if readErrorSet.add((currentObj, otherObj)) then val msg = "Reading mutable state of " + otherObj.show + " during initialization of " + currentObj.show + ".\n" + "Reading mutable state of other static objects is forbidden as it breaks initialization-time irrelevance. " + Trace.show + - printTraceWhenMultiple(addr_trace) + printTraceWhenMultiple(scope_trace) report.warning(msg, Trace.position) diff --git a/compiler/src/dotty/tools/dotc/transform/init/Semantic.scala b/compiler/src/dotty/tools/dotc/transform/init/Semantic.scala index adb2370bb1e0..a8a855b6ae5b 100644 --- a/compiler/src/dotty/tools/dotc/transform/init/Semantic.scala +++ b/compiler/src/dotty/tools/dotc/transform/init/Semantic.scala @@ -448,6 +448,18 @@ object Semantic: object TreeCache: class CacheData: private val emptyTrees = mutable.Set[ValOrDefDef]() + private val templatesToSkip = mutable.Set[Template]() + + def checkTemplateBodyValidity(tpl: Template, className: String)(using Context): Unit = + if (templatesToSkip.contains(tpl)) + throw new TastyTreeException(className) + + val errorCount = ctx.reporter.errorCount + tpl.forceFields() + + if (ctx.reporter.errorCount > errorCount) + templatesToSkip.add(tpl) + throw new TastyTreeException(className) extension (tree: ValOrDefDef) def getRhs(using Context): Tree = @@ -465,7 +477,9 @@ object Semantic: if (emptyTrees.contains(tree)) EmptyTree else getTree end TreeCache - + + inline def treeCache(using t: TreeCache.CacheData): TreeCache.CacheData = t + // ----- Operations on domains ----------------------------- extension (a: Value) def join(b: Value): Value = @@ -654,6 +668,8 @@ object Semantic: val methodType = atPhaseBeforeTransforms { meth.info.stripPoly } var allArgsHot = true val allParamTypes = methodType.paramInfoss.flatten.map(_.repeatedToSingle) + if(allParamTypes.size != args.size) + report.warning("[Internal error] Number of parameters do not match number of arguments in " + meth.name) val errors = allParamTypes.zip(args).flatMap { (info, arg) => val tryReporter = Reporter.errorsIn { arg.promote } allArgsHot = allArgsHot && tryReporter.errors.isEmpty @@ -1173,7 +1189,10 @@ object Semantic: given Cache.Data() given TreeCache.CacheData() for classSym <- classes if isConcreteClass(classSym) && !classSym.isStaticObject do - checkClass(classSym) + try + checkClass(classSym) + catch + case TastyTreeException(className) => report.warning("Skipping the analysis of " + classSym.show + " due to an error reading the body of " + className + "'s TASTy.") // ----- Semantic definition -------------------------------- type ArgInfo = TraceValue[Value] @@ -1520,6 +1539,8 @@ object Semantic: * @param klass The class to which the template belongs. */ def init(tpl: Template, thisV: Ref, klass: ClassSymbol): Contextual[Value] = log("init " + klass.show, printer, (_: Value).show) { + treeCache.checkTemplateBodyValidity(tpl, klass.show) + val paramsMap = tpl.constr.termParamss.flatten.map { vdef => vdef.name -> thisV.objekt.field(vdef.symbol) }.toMap diff --git a/compiler/src/dotty/tools/dotc/transform/init/Trace.scala b/compiler/src/dotty/tools/dotc/transform/init/Trace.scala index ffaccad963af..9a00589197d5 100644 --- a/compiler/src/dotty/tools/dotc/transform/init/Trace.scala +++ b/compiler/src/dotty/tools/dotc/transform/init/Trace.scala @@ -51,7 +51,7 @@ object Trace: val line = if pos.source.exists then val loc = "[ " + pos.source.file.name + ":" + (pos.line + 1) + " ]" - val code = SyntaxHighlighting.highlight(pos.lineContent.trim.nn) + val code = SyntaxHighlighting.highlight(pos.lineContent.trim) i"$code\t$loc" else tree match @@ -59,7 +59,7 @@ object Trace: // The definition can be huge, avoid printing the whole definition. defDef.symbol.showFullName case _ => - tree.show.split(System.lineSeparator(), 2).nn.head.nn + tree.show.split(System.lineSeparator(), 2).head val positionMarkerLine = if pos.exists && pos.source.exists then @@ -86,7 +86,7 @@ object Trace: */ private def positionMarker(pos: SourcePosition): String = val trimmed = pos.source.lineContent(pos.start).takeWhile(c => c.isWhitespace).length - val padding = pos.startColumnPadding.substring(trimmed).nn + val padding = pos.startColumnPadding.substring(trimmed) val carets = if (pos.startLine == pos.endLine) "^" * math.max(1, pos.endColumn - pos.startColumn) diff --git a/compiler/src/dotty/tools/dotc/transform/init/Util.scala b/compiler/src/dotty/tools/dotc/transform/init/Util.scala index e11d0e1e21a5..3280c289f926 100644 --- a/compiler/src/dotty/tools/dotc/transform/init/Util.scala +++ b/compiler/src/dotty/tools/dotc/transform/init/Util.scala @@ -15,6 +15,9 @@ import config.Printers.init as printer import Trace.* object Util: + /** Exception used for errors encountered when reading TASTy. */ + case class TastyTreeException(msg: String) extends RuntimeException(msg) + /** Utility definition used for better error-reporting of argument errors */ case class TraceValue[T](value: T, trace: Trace) @@ -43,6 +46,8 @@ object Util: case Apply(fn, args) => val argTps = fn.tpe.widen match case mt: MethodType => mt.paramInfos + if (args.size != argTps.size) + report.warning("[Internal error] Number of arguments do not match number of argument types in " + tree.symbol.name) val normArgs: List[Arg] = args.zip(argTps).map { case (arg, _: ExprType) => ByNameArg(arg) case (arg, _) => arg @@ -112,5 +117,5 @@ object Util: /** Whether the class or its super class/trait contains any mutable fields? */ def isMutable(cls: ClassSymbol)(using Context): Boolean = - cls.classInfo.decls.exists(_.is(Flags.Mutable)) || + cls.classInfo.decls.exists(_.isMutableVarOrAccessor) || cls.parentSyms.exists(parentCls => isMutable(parentCls.asClass)) diff --git a/compiler/src/dotty/tools/dotc/transform/localopt/FormatChecker.scala b/compiler/src/dotty/tools/dotc/transform/localopt/FormatChecker.scala index 4922024b6c35..42e5f0acb3f6 100644 --- a/compiler/src/dotty/tools/dotc/transform/localopt/FormatChecker.scala +++ b/compiler/src/dotty/tools/dotc/transform/localopt/FormatChecker.scala @@ -5,13 +5,12 @@ import scala.annotation.tailrec import scala.collection.mutable.ListBuffer import scala.util.matching.Regex.Match -import PartialFunction.cond - import dotty.tools.dotc.ast.tpd.{Match => _, *} import dotty.tools.dotc.core.Contexts.* import dotty.tools.dotc.core.Symbols.* import dotty.tools.dotc.core.Types.* import dotty.tools.dotc.core.Phases.typerPhase +import dotty.tools.dotc.reporting.BadFormatInterpolation import dotty.tools.dotc.util.Spans.Span import dotty.tools.dotc.util.chaining.* @@ -29,8 +28,9 @@ class TypedFormatChecker(partsElems: List[Tree], parts: List[String], args: List def argType(argi: Int, types: Type*): Type = require(argi < argc, s"$argi out of range picking from $types") val tpe = argTypes(argi) - types.find(t => argConformsTo(argi, tpe, t)) - .orElse(types.find(t => argConvertsTo(argi, tpe, t))) + types.find(t => t != defn.AnyType && argConformsTo(argi, tpe, t)) + .orElse(types.find(t => t != defn.AnyType && argConvertsTo(argi, tpe, t))) + .orElse(types.find(t => t == defn.AnyType && argConformsTo(argi, tpe, t))) .getOrElse { report.argError(s"Found: ${tpe.show}, Required: ${types.map(_.show).mkString(", ")}", argi) actuals += args(argi) @@ -63,50 +63,57 @@ class TypedFormatChecker(partsElems: List[Tree], parts: List[String], args: List /** For N part strings and N-1 args to interpolate, normalize parts and check arg types. * - * Returns normalized part strings and args, where args correcpond to conversions in tail of parts. + * Returns normalized part strings and args, where args correspond to conversions in tail of parts. */ def checked: (List[String], List[Tree]) = val amended = ListBuffer.empty[String] val convert = ListBuffer.empty[Conversion] + def checkPart(part: String, n: Int): Unit = + val matches = formatPattern.findAllMatchIn(part) + + def insertStringConversion(): Unit = + amended += "%s" + part + val cv = Conversion.stringXn(n) + cv.accepts(argType(n-1, defn.AnyType)) + convert += cv + cv.lintToString(argTypes(n-1)) + + def errorLeading(op: Conversion) = op.errorAt(Spec): + s"conversions must follow a splice; ${Conversion.literalHelp}" + + def accept(op: Conversion): Unit = + if !op.isLeading then errorLeading(op) + op.accepts(argType(n-1, op.acceptableVariants*)) + amended += part + convert += op + op.lintToString(argTypes(n-1)) + + // after the first part, a leading specifier is required for the interpolated arg; %s is supplied if needed + if n == 0 then amended += part + else if !matches.hasNext then insertStringConversion() + else + val cv = Conversion(matches.next(), n) + if cv.isLiteral then insertStringConversion() + else if cv.isIndexed then + if cv.index.getOrElse(-1) == n then accept(cv) else insertStringConversion() + else if !cv.isError then accept(cv) + + // any remaining conversions in this part must be either literals or indexed + while matches.hasNext do + val cv = Conversion(matches.next(), n) + if n == 0 && cv.hasFlag('<') then cv.badFlag('<', "No last arg") + else if !cv.isLiteral && !cv.isIndexed then errorLeading(cv) + end checkPart + @tailrec - def loop(remaining: List[String], n: Int): Unit = - remaining match - case part0 :: more => - def badPart(t: Throwable): String = "".tap(_ => report.partError(t.getMessage.nn, index = n, offset = 0)) - val part = try StringContext.processEscapes(part0) catch badPart - val matches = formatPattern.findAllMatchIn(part) - - def insertStringConversion(): Unit = - amended += "%s" + part - convert += Conversion(formatPattern.findAllMatchIn("%s").next(), n) // improve - argType(n-1, defn.AnyType) - def errorLeading(op: Conversion) = op.errorAt(Spec)(s"conversions must follow a splice; ${Conversion.literalHelp}") - def accept(op: Conversion): Unit = - if !op.isLeading then errorLeading(op) - op.accepts(argType(n-1, op.acceptableVariants*)) - amended += part - convert += op - - // after the first part, a leading specifier is required for the interpolated arg; %s is supplied if needed - if n == 0 then amended += part - else if !matches.hasNext then insertStringConversion() - else - val cv = Conversion(matches.next(), n) - if cv.isLiteral then insertStringConversion() - else if cv.isIndexed then - if cv.index.getOrElse(-1) == n then accept(cv) else insertStringConversion() - else if !cv.isError then accept(cv) - - // any remaining conversions in this part must be either literals or indexed - while matches.hasNext do - val cv = Conversion(matches.next(), n) - if n == 0 && cv.hasFlag('<') then cv.badFlag('<', "No last arg") - else if !cv.isLiteral && !cv.isIndexed then errorLeading(cv) - - loop(more, n + 1) - case Nil => () - end loop + def loop(remaining: List[String], n: Int): Unit = remaining match + case part0 :: remaining => + def badPart(t: Throwable): String = "".tap(_ => report.partError(t.getMessage, index = n, offset = 0)) + val part = try StringContext.processEscapes(part0) catch badPart + checkPart(part, n) + loop(remaining, n + 1) + case Nil => loop(parts, n = 0) if reported then (Nil, Nil) @@ -124,10 +131,8 @@ class TypedFormatChecker(partsElems: List[Tree], parts: List[String], args: List def intOf(g: SpecGroup): Option[Int] = group(g).map(_.toInt) extension (inline value: Boolean) - inline def or(inline body: => Unit): Boolean = value || { body ; false } - inline def orElse(inline body: => Unit): Boolean = value || { body ; true } - inline def and(inline body: => Unit): Boolean = value && { body ; true } - inline def but(inline body: => Unit): Boolean = value && { body ; false } + inline infix def or(inline body: => Unit): Boolean = value || { body; false } + inline infix def and(inline body: => Unit): Boolean = value && { body; true } enum Kind: case StringXn, HashXn, BooleanXn, CharacterXn, IntegralXn, FloatingPointXn, DateTimeXn, LiteralXn, ErrorXn @@ -146,9 +151,10 @@ class TypedFormatChecker(partsElems: List[Tree], parts: List[String], args: List // the conversion char is the head of the op string (but see DateTimeXn) val cc: Char = kind match - case ErrorXn => if op.isEmpty then '?' else op(0) - case DateTimeXn => if op.length > 1 then op(1) else '?' - case _ => op(0) + case ErrorXn => if op.isEmpty then '?' else op(0) + case DateTimeXn => if op.length <= 1 then '?' else op(1) + case StringXn => if op.isEmpty then 's' else op(0) // accommodate the default %s + case _ => op(0) def isIndexed: Boolean = index.nonEmpty || hasFlag('<') def isError: Boolean = kind == ErrorXn @@ -208,18 +214,28 @@ class TypedFormatChecker(partsElems: List[Tree], parts: List[String], args: List // is the specifier OK with the given arg def accepts(arg: Type): Boolean = kind match - case BooleanXn => arg == defn.BooleanType orElse warningAt(CC)("Boolean format is null test for non-Boolean") - case IntegralXn => - arg == BigIntType || !cond(cc) { - case 'o' | 'x' | 'X' if hasAnyFlag("+ (") => "+ (".filter(hasFlag).foreach(bad => badFlag(bad, s"only use '$bad' for BigInt conversions to o, x, X")) ; true - } + case BooleanXn if arg != defn.BooleanType => + warningAt(CC): + """non-Boolean value formats as "true" for non-null references and boxed primitives, otherwise "false"""" + true + case IntegralXn if arg != BigIntType => + cc match + case 'o' | 'x' | 'X' if hasAnyFlag("+ (") => + "+ (".filter(hasFlag).foreach: bad => + badFlag(bad, s"only use '$bad' for BigInt conversions to o, x, X") + false case _ => true + case _ => true + + def lintToString(arg: Type): Unit = + if ctx.settings.Whas.toStringInterpolated && kind == StringXn && !(arg.widen =:= defn.StringType) && !arg.isPrimitiveValueType + then warningAt(CC)("interpolation uses toString") // what arg type if any does the conversion accept def acceptableVariants: List[Type] = kind match case StringXn => if hasFlag('#') then FormattableType :: Nil else defn.AnyType :: Nil - case BooleanXn => defn.BooleanType :: defn.NullType :: Nil + case BooleanXn => defn.BooleanType :: defn.NullType :: defn.AnyType :: Nil // warn if not boolean case HashXn => defn.AnyType :: Nil case CharacterXn => defn.CharType :: defn.ByteType :: defn.ShortType :: defn.IntType :: Nil case IntegralXn => defn.IntType :: defn.LongType :: defn.ByteType :: defn.ShortType :: BigIntType :: Nil @@ -248,25 +264,30 @@ class TypedFormatChecker(partsElems: List[Tree], parts: List[String], args: List object Conversion: def apply(m: Match, i: Int): Conversion = - def kindOf(cc: Char) = cc match - case 's' | 'S' => StringXn - case 'h' | 'H' => HashXn - case 'b' | 'B' => BooleanXn - case 'c' | 'C' => CharacterXn - case 'd' | 'o' | - 'x' | 'X' => IntegralXn - case 'e' | 'E' | - 'f' | - 'g' | 'G' | - 'a' | 'A' => FloatingPointXn - case 't' | 'T' => DateTimeXn - case '%' | 'n' => LiteralXn - case _ => ErrorXn - end kindOf m.group(CC) match - case Some(cc) => new Conversion(m, i, kindOf(cc(0))).tap(_.verify) - case None => new Conversion(m, i, ErrorXn).tap(_.errorAt(Spec)(s"Missing conversion operator in '${m.matched}'; $literalHelp")) + case Some(cc) => + val xn = cc(0) match + case 's' | 'S' => StringXn + case 'h' | 'H' => HashXn + case 'b' | 'B' => BooleanXn + case 'c' | 'C' => CharacterXn + case 'd' | 'o' | + 'x' | 'X' => IntegralXn + case 'e' | 'E' | + 'f' | + 'g' | 'G' | + 'a' | 'A' => FloatingPointXn + case 't' | 'T' => DateTimeXn + case '%' | 'n' => LiteralXn + case _ => ErrorXn + new Conversion(m, i, xn) + .tap(_.verify) + case None => + new Conversion(m, i, ErrorXn) + .tap(_.errorAt(Spec)(s"Missing conversion operator in '${m.matched}'; $literalHelp")) end apply + // construct a default %s conversion + def stringXn(i: Int): Conversion = new Conversion(formatPattern.findAllMatchIn("%").next(), i, StringXn) val literalHelp = "use %% for literal %, %n for newline" end Conversion @@ -276,10 +297,16 @@ class TypedFormatChecker(partsElems: List[Tree], parts: List[String], args: List val pos = partsElems(index).sourcePos val bgn = pos.span.start + offset val fin = if end < 0 then pos.span.end else pos.span.start + end - pos.withSpan(Span(bgn, fin, bgn)) + pos.withSpan(Span(start = bgn, end = fin, point = bgn)) extension (r: report.type) - def argError(message: String, index: Int): Unit = r.error(message, args(index).srcPos).tap(_ => reported = true) - def partError(message: String, index: Int, offset: Int, end: Int = -1): Unit = r.error(message, partPosAt(index, offset, end)).tap(_ => reported = true) - def partWarning(message: String, index: Int, offset: Int, end: Int = -1): Unit = r.warning(message, partPosAt(index, offset, end)).tap(_ => reported = true) + def argError(message: String, index: Int): Unit = + r.error(BadFormatInterpolation(message), args(index).srcPos) + .tap(_ => reported = true) + def partError(message: String, index: Int, offset: Int, end: Int = -1): Unit = + r.error(BadFormatInterpolation(message), partPosAt(index, offset, end)) + .tap(_ => reported = true) + def partWarning(message: String, index: Int, offset: Int, end: Int): Unit = + r.warning(BadFormatInterpolation(message), partPosAt(index, offset, end)) + .tap(_ => reported = true) end TypedFormatChecker diff --git a/compiler/src/dotty/tools/dotc/transform/localopt/StringInterpolatorOpt.scala b/compiler/src/dotty/tools/dotc/transform/localopt/StringInterpolatorOpt.scala index 7743054f5487..1afcfbac6206 100644 --- a/compiler/src/dotty/tools/dotc/transform/localopt/StringInterpolatorOpt.scala +++ b/compiler/src/dotty/tools/dotc/transform/localopt/StringInterpolatorOpt.scala @@ -96,16 +96,22 @@ class StringInterpolatorOpt extends MiniPhase: def mkConcat(strs: List[Literal], elems: List[Tree]): Tree = val stri = strs.iterator val elemi = elems.iterator - var result: Tree = stri.next + var result: Tree = stri.next() def concat(tree: Tree): Unit = result = result.select(defn.String_+).appliedTo(tree).withSpan(tree.span) while elemi.hasNext do - concat(elemi.next) - val str = stri.next + val elem = elemi.next() + lintToString(elem) + concat(elem) + val str = stri.next() if !str.const.stringValue.isEmpty then concat(str) result end mkConcat + def lintToString(t: Tree): Unit = + val arg: Type = t.tpe + if ctx.settings.Whas.toStringInterpolated && !(arg.widen =:= defn.StringType) && !arg.isPrimitiveValueType + then report.warning("interpolation uses toString", t.srcPos) val sym = tree.symbol // Test names first to avoid loading scala.StringContext if not used, and common names first val isInterpolatedMethod = diff --git a/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala b/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala index 30b892ece470..ab5885e6278c 100644 --- a/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala +++ b/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala @@ -279,7 +279,7 @@ object SpaceEngine { || unappResult <:< ConstantType(Constant(true)) // only for unapply || (unapp.symbol.is(Synthetic) && unapp.symbol.owner.linkedClass.is(Case)) // scala2 compatibility || unapplySeqTypeElemTp(unappResult).exists // only for unapplySeq - || isProductMatch(unappResult, argLen) + || isProductMatch(unappResult.stripNamedTuple, argLen) || extractorMemberType(unappResult, nme.isEmpty, NoSourcePosition) <:< ConstantType(Constant(false)) || unappResult.derivesFrom(defn.NonEmptyTupleClass) || unapp.symbol == defn.TupleXXL_unapplySeq // Fixes TupleXXL.unapplySeq which returns Some but declares Option diff --git a/compiler/src/dotty/tools/dotc/transform/sjs/PrepJSExports.scala b/compiler/src/dotty/tools/dotc/transform/sjs/PrepJSExports.scala index 5aa35a277cb5..43b29a224564 100644 --- a/compiler/src/dotty/tools/dotc/transform/sjs/PrepJSExports.scala +++ b/compiler/src/dotty/tools/dotc/transform/sjs/PrepJSExports.scala @@ -21,7 +21,7 @@ import dotty.tools.backend.sjs.JSDefinitions.jsdefn import JSExportUtils.* import JSSymUtils.* -import dotty.tools.sjs.ir.Names.DefaultModuleID +import dotty.tools.sjs.ir.WellKnownNames.DefaultModuleID import dotty.tools.sjs.ir.Trees.TopLevelExportDef.isValidTopLevelExportName object PrepJSExports { diff --git a/compiler/src/dotty/tools/dotc/transform/sjs/PrepJSInterop.scala b/compiler/src/dotty/tools/dotc/transform/sjs/PrepJSInterop.scala index c7316482c193..13fcbe542448 100644 --- a/compiler/src/dotty/tools/dotc/transform/sjs/PrepJSInterop.scala +++ b/compiler/src/dotty/tools/dotc/transform/sjs/PrepJSInterop.scala @@ -651,7 +651,7 @@ class PrepJSInterop extends MacroTransform with IdentityDenotTransformer { thisP val dotIndex = pathName.indexOf('.') val globalRef = if (dotIndex < 0) pathName - else pathName.substring(0, dotIndex).nn + else pathName.substring(0, dotIndex) checkGlobalRefName(globalRef) } diff --git a/compiler/src/dotty/tools/dotc/typer/Applications.scala b/compiler/src/dotty/tools/dotc/typer/Applications.scala index 71fc250d0710..78e2099511d7 100644 --- a/compiler/src/dotty/tools/dotc/typer/Applications.scala +++ b/compiler/src/dotty/tools/dotc/typer/Applications.scala @@ -36,6 +36,8 @@ import annotation.threadUnsafe import scala.util.control.NonFatal import dotty.tools.dotc.inlines.Inlines +import scala.annotation.tailrec +import dotty.tools.dotc.cc.isRetains object Applications { import tpd.* @@ -1115,7 +1117,9 @@ trait Applications extends Compatibility { val fun2 = Applications.retypeSignaturePolymorphicFn(fun1, methType) simpleApply(fun2, proto) case funRef: TermRef => - val app = ApplyTo(tree, fun1, funRef, proto, pt) + // println(i"typedApply: $funRef, ${tree.args}, ${funRef.symbol.maybeOwner.isRetains}") + val applyCtx = if funRef.symbol.maybeOwner.isRetains then ctx.addMode(Mode.InCaptureSet) else ctx + val app = ApplyTo(tree, fun1, funRef, proto, pt)(using applyCtx) convertNewGenericArray( widenEnumCase( postProcessByNameArgs(funRef, app).computeNullable(), @@ -1281,6 +1285,10 @@ trait Applications extends Compatibility { } else { val app = tree.fun match + case _ if ctx.mode.is(Mode.Type) && Feature.enabled(Feature.modularity) && !ctx.isAfterTyper => + untpd.methPart(tree.fun) match + case Select(nw @ New(_), _) => typedAppliedConstructorType(nw, tree.args, tree) + case _ => realApply case untpd.TypeApply(_: untpd.SplicePattern, _) if Feature.quotedPatternsWithPolymorphicFunctionsEnabled => typedAppliedSpliceWithTypes(tree, pt) case _: untpd.SplicePattern => typedAppliedSplice(tree, pt) @@ -1526,6 +1534,20 @@ trait Applications extends Compatibility { def trySelectUnapply(qual: untpd.Tree)(fallBack: (Tree, TyperState) => Tree): Tree = { // try first for non-overloaded, then for overloaded occurrences def tryWithName(name: TermName)(fallBack: (Tree, TyperState) => Tree)(using Context): Tree = + /** Returns `true` if there are type parameters after the last explicit + * (non-implicit) term parameters list. + */ + @tailrec + def hasTrailingTypeParams(paramss: List[List[Symbol]], acc: Boolean = false): Boolean = + paramss match + case Nil => acc + case params :: rest => + val newAcc = + params match + case param :: _ if param.isType => true + case param :: _ if param.isTerm && !param.isOneOf(GivenOrImplicit) => false + case _ => acc + hasTrailingTypeParams(paramss.tail, newAcc) def tryWithProto(qual: untpd.Tree, targs: List[Tree], pt: Type)(using Context) = val proto = UnapplyFunProto(pt, this) @@ -1533,7 +1555,13 @@ trait Applications extends Compatibility { val result = if targs.isEmpty then typedExpr(unapp, proto) else typedExpr(unapp, PolyProto(targs, proto)).appliedToTypeTrees(targs) - if !result.symbol.exists + if result.symbol.exists && hasTrailingTypeParams(result.symbol.paramSymss) then + // We don't accept `unapply` or `unapplySeq` methods with type + // parameters after the last explicit term parameter because we + // can't encode them: `UnApply` nodes cannot take type paremeters. + // See #22550 and associated test cases. + notAnExtractor(result) + else if !result.symbol.exists || result.symbol.name == name || ctx.reporter.hasErrors then result @@ -1694,6 +1722,28 @@ trait Applications extends Compatibility { def typedUnApply(tree: untpd.UnApply, selType: Type)(using Context): UnApply = throw new UnsupportedOperationException("cannot type check an UnApply node") + /** Typecheck an applied constructor type – An Apply node in Type mode. + * This expands to the type this term would have if it were typed as an expression. + * + * e.g. + * ```scala + * // class C(tracked val v: Any) + * val c: C(42) = ??? + * ``` + */ + def typedAppliedConstructorType(nw: untpd.New, args: List[untpd.Tree], tree: untpd.Apply)(using Context) = + val tree1 = typedExpr(tree) + val preciseTp = tree1.tpe.widenSkolems + val classTp = typedType(nw.tpt).tpe + def classSymbolHasOnlyTrackedParameters = + !classTp.classSymbol.primaryConstructor.paramSymss.nestedExists: param => + param.isTerm && !param.is(Tracked) + if !preciseTp.isError && !classSymbolHasOnlyTrackedParameters then + report.warning(OnlyFullyDependentAppliedConstructorType(), tree.srcPos) + if !preciseTp.isError && (preciseTp frozen_=:= classTp) then + report.warning(PointlessAppliedConstructorType(nw.tpt, args, classTp), tree.srcPos) + TypeTree(preciseTp) + /** Is given method reference applicable to argument trees `args`? * @param resultType The expected result type of the application */ @@ -2130,7 +2180,7 @@ trait Applications extends Compatibility { resultType.revealIgnored match { case resultType: ValueType => altType.widen match { - case tp: PolyType => resultConforms(altSym, tp.resultType, resultType) + case tp: PolyType => resultConforms(altSym, instantiateWithTypeVars(tp), resultType) case tp: MethodType => val wildRes = wildApprox(tp.resultType) diff --git a/compiler/src/dotty/tools/dotc/typer/Checking.scala b/compiler/src/dotty/tools/dotc/typer/Checking.scala index ec07fefc64ab..df4350f1eb05 100644 --- a/compiler/src/dotty/tools/dotc/typer/Checking.scala +++ b/compiler/src/dotty/tools/dotc/typer/Checking.scala @@ -37,7 +37,7 @@ import config.Feature, Feature.{sourceVersion, modularity} import config.SourceVersion.* import config.MigrationVersion import printing.Formatting.hlAsKeyword -import cc.{isCaptureChecking, isRetainsLike} +import cc.{isCaptureChecking, isRetainsLike, isUpdateMethod} import collection.mutable import reporting.* @@ -596,7 +596,7 @@ object Checking { if (sym.isConstructor && !sym.isPrimaryConstructor && sym.owner.is(Trait, butNot = JavaDefined)) val addendum = if ctx.settings.Ydebug.value then s" ${sym.owner.flagsString}" else "" fail(em"Traits cannot have secondary constructors$addendum") - checkApplicable(Inline, sym.isTerm && !sym.isOneOf(Mutable | Module)) + checkApplicable(Inline, sym.isTerm && !sym.is(Module) && !sym.isMutableVarOrAccessor) checkApplicable(Lazy, !sym.isOneOf(Method | Mutable)) if (sym.isType && !sym.isOneOf(Deferred | JavaDefined)) for (cls <- sym.allOverriddenSymbols.filter(_.isClass)) { @@ -605,8 +605,12 @@ object Checking { } if sym.isWrappedToplevelDef && !sym.isType && sym.flags.is(Infix, butNot = Extension) then fail(ModifierNotAllowedForDefinition(Flags.Infix, s"A top-level ${sym.showKind} cannot be infix.")) + if sym.isUpdateMethod && !sym.owner.derivesFrom(defn.Caps_Mutable) then + fail(em"Update methods can only be used as members of classes extending the `Mutable` trait") checkApplicable(Erased, - !sym.isOneOf(MutableOrLazy, butNot = Given) && !sym.isType || sym.isClass) + !sym.is(Lazy, butNot = Given) + && !sym.isMutableVarOrAccessor + && (!sym.isType || sym.isClass)) checkCombination(Final, Open) checkCombination(Sealed, Open) checkCombination(Final, Sealed) @@ -627,10 +631,11 @@ object Checking { */ def checkWellFormedModule(mdef: untpd.ModuleDef)(using Context) = val mods = mdef.mods - def flagSourcePos(flag: FlagSet) = - mods.mods.find(_.flags == flag).getOrElse(mdef).srcPos + def flagSourcePos(flag: Flag) = untpd.flagSourcePos(mdef, flag) if mods.is(Open) then report.error(ModifierNotAllowedForDefinition(Open), flagSourcePos(Open)) + if mods.is(Into) then + report.error(ModifierNotAllowedForDefinition(Into), flagSourcePos(Open)) if mods.is(Abstract) then report.error(ModifierNotAllowedForDefinition(Abstract), flagSourcePos(Abstract)) if mods.is(Sealed) then @@ -739,7 +744,7 @@ object Checking { } /** Verify classes extending AnyVal meet the requirements */ - def checkDerivedValueClass(clazz: Symbol, stats: List[Tree])(using Context): Unit = { + def checkDerivedValueClass(cdef: untpd.TypeDef, clazz: Symbol, stats: List[Tree])(using Context): Unit = { def checkValueClassMember(stat: Tree) = stat match { case _: TypeDef if stat.symbol.isClass => report.error(ValueClassesMayNotDefineInner(clazz, stat.symbol), stat.srcPos) @@ -752,6 +757,14 @@ object Checking { case _ => report.error(ValueClassesMayNotContainInitalization(clazz), stat.srcPos) } + inline def checkParentIsNotAnyValAlias(): Unit = + cdef.rhs match { + case impl: Template => + val parent = impl.parents.head + if parent.symbol.isAliasType && parent.typeOpt.dealias =:= defn.AnyValType then + report.error(ValueClassCannotExtendAliasOfAnyVal(clazz, parent.symbol), cdef.srcPos) + case _ => () + } // We don't check synthesised enum anonymous classes that are generated from // enum extending a value class type (AnyVal or an alias of it) // The error message 'EnumMayNotBeValueClassesID' will take care of generating the error message (See #22236) @@ -766,6 +779,9 @@ object Checking { report.error(ValueClassesMayNotBeAbstract(clazz), clazz.srcPos) if (!clazz.isStatic) report.error(ValueClassesMayNotBeContainted(clazz), clazz.srcPos) + + checkParentIsNotAnyValAlias() + if (isDerivedValueClass(underlyingOfValueClass(clazz.asClass).classSymbol)) report.error(ValueClassesMayNotWrapAnotherValueClass(clazz), clazz.srcPos) else { @@ -774,6 +790,8 @@ object Checking { } clParamAccessors match { case param :: params => + if (defn.isContextFunctionType(param.info)) + report.error("value classes are not allowed for context function types", param.srcPos) if (param.is(Mutable)) report.error(ValueClassParameterMayNotBeAVar(clazz, param), param.srcPos) if (param.info.isInstanceOf[ExprType]) @@ -1036,6 +1054,8 @@ trait Checking { pats.forall(recur(_, pt)) case Typed(arg, tpt) => check(pat, pt) && recur(arg, pt) + case NamedArg(name, pat) => + recur(pat, pt) case Ident(nme.WILDCARD) => true case pat: QuotePattern => @@ -1138,7 +1158,7 @@ trait Checking { if sym.name == nme.apply && sym.owner.derivesFrom(defn.ConversionClass) && !sym.info.isErroneous - && !expected.isInto + && !expected.isConversionTargetType then def conv = methPart(tree) match case Select(qual, _) => qual.symbol.orElse(sym.owner) @@ -1299,8 +1319,8 @@ trait Checking { else tpt /** Verify classes extending AnyVal meet the requirements */ - def checkDerivedValueClass(clazz: Symbol, stats: List[Tree])(using Context): Unit = - Checking.checkDerivedValueClass(clazz, stats) + def checkDerivedValueClass(cdef: untpd.TypeDef, clazz: Symbol, stats: List[Tree])(using Context): Unit = + Checking.checkDerivedValueClass(cdef, clazz, stats) /** Check that case classes are not inherited by case classes. */ @@ -1681,7 +1701,7 @@ trait NoChecking extends ReChecking { override def checkNoTargetNameConflict(stats: List[Tree])(using Context): Unit = () override def checkParentCall(call: Tree, caller: ClassSymbol)(using Context): Unit = () override def checkSimpleKinded(tpt: Tree)(using Context): Tree = tpt - override def checkDerivedValueClass(clazz: Symbol, stats: List[Tree])(using Context): Unit = () + override def checkDerivedValueClass(cdef: untpd.TypeDef, clazz: Symbol, stats: List[Tree])(using Context): Unit = () override def checkCaseInheritance(parentSym: Symbol, caseCls: ClassSymbol, pos: SrcPos)(using Context): Unit = () override def checkNoForwardDependencies(vparams: List[ValDef])(using Context): Unit = () override def checkMembersOK(tp: Type, pos: SrcPos)(using Context): Type = tp diff --git a/compiler/src/dotty/tools/dotc/typer/CrossVersionChecks.scala b/compiler/src/dotty/tools/dotc/typer/CrossVersionChecks.scala index f0d4d617bb74..81a67d6a1df7 100644 --- a/compiler/src/dotty/tools/dotc/typer/CrossVersionChecks.scala +++ b/compiler/src/dotty/tools/dotc/typer/CrossVersionChecks.scala @@ -78,7 +78,8 @@ class CrossVersionChecks extends MiniPhase: do val msg = annot.argumentConstantString(0).map(msg => s": $msg").getOrElse("") val since = annot.argumentConstantString(1).map(version => s" (since: $version)").getOrElse("") - report.deprecationWarning(em"inheritance from $psym is deprecated$since$msg", parent.srcPos, origin=psym.showFullName) + val composed = em"inheritance from $psym is deprecated$since$msg" + report.deprecationWarning(composed, parent.srcPos, origin = psym.showFullName) } private def unrollError(pos: SrcPos)(using Context): Unit = @@ -203,16 +204,19 @@ object CrossVersionChecks: * Also check for deprecation of the companion class for synthetic methods in the companion module. */ private[CrossVersionChecks] def checkDeprecatedRef(sym: Symbol, pos: SrcPos)(using Context): Unit = - def maybeWarn(annotee: Symbol, annot: Annotation) = if !skipWarning(sym) then + def warn(annotee: Symbol, annot: Annotation) = val message = annot.argumentConstantString(0).filter(!_.isEmpty).map(": " + _).getOrElse("") val since = annot.argumentConstantString(1).filter(!_.isEmpty).map(" since " + _).getOrElse("") - report.deprecationWarning(em"${annotee.showLocated} is deprecated${since}${message}", pos, origin=annotee.showFullName) + val composed = em"${annotee.showLocated} is deprecated${since}${message}" + report.deprecationWarning(composed, pos, origin = annotee.showFullName) sym.getAnnotation(defn.DeprecatedAnnot) match - case Some(annot) => maybeWarn(sym, annot) + case Some(annot) => if !skipWarning(sym) then warn(sym, annot) case _ => if sym.isAllOf(SyntheticMethod) then val companion = sym.owner.companionClass - if companion.is(CaseClass) then companion.getAnnotation(defn.DeprecatedAnnot).foreach(maybeWarn(companion, _)) + if companion.is(CaseClass) then + for annot <- companion.getAnnotation(defn.DeprecatedAnnot) if !skipWarning(sym) do + warn(companion, annot) /** Decide whether the deprecation of `sym` should be ignored in this context. * diff --git a/compiler/src/dotty/tools/dotc/typer/ErrorReporting.scala b/compiler/src/dotty/tools/dotc/typer/ErrorReporting.scala index 13e75be75838..a6254c0d5c00 100644 --- a/compiler/src/dotty/tools/dotc/typer/ErrorReporting.scala +++ b/compiler/src/dotty/tools/dotc/typer/ErrorReporting.scala @@ -85,7 +85,7 @@ object ErrorReporting { /** An explanatory note to be added to error messages * when there's a problem with abstract var defs */ def abstractVarMessage(sym: Symbol): String = - if (sym.underlyingSymbol.is(Mutable)) + if sym.underlyingSymbol.isMutableVarOrAccessor then "\n(Note that variables need to be initialized to be defined)" else "" @@ -294,7 +294,7 @@ object ErrorReporting { def dependentMsg = """Term-dependent types are experimental, - |they must be enabled with a `experimental.dependent` language import or setting""".stripMargin.toMessage + |they must be enabled with a `experimental.modularity` language import or setting""".stripMargin.toMessage def err(using Context): Errors = new Errors } diff --git a/compiler/src/dotty/tools/dotc/typer/Inferencing.scala b/compiler/src/dotty/tools/dotc/typer/Inferencing.scala index 7f040ccd2968..520c8bf62ba4 100644 --- a/compiler/src/dotty/tools/dotc/typer/Inferencing.scala +++ b/compiler/src/dotty/tools/dotc/typer/Inferencing.scala @@ -509,10 +509,15 @@ object Inferencing { } } } - val res = patternBindings.toList.map { (boundSym, _) => + val res = patternBindings.toList.map { (boundSym, origin) => // substitute bounds of pattern bound variables to deal with possible F-bounds for (wildCard, param) <- patternBindings do boundSym.info = boundSym.info.substParam(param, wildCard.typeRef) + + // also substitute in any GADT bounds + // e.g. in i22879, replace the `T` in `X <: Iterable[T]` with the pattern bound `T$1` + ctx.gadtState.replace(origin, boundSym.typeRef) + boundSym } diff --git a/compiler/src/dotty/tools/dotc/typer/Migrations.scala b/compiler/src/dotty/tools/dotc/typer/Migrations.scala index 0e6dc27ecf7f..5f9fc928e7d6 100644 --- a/compiler/src/dotty/tools/dotc/typer/Migrations.scala +++ b/compiler/src/dotty/tools/dotc/typer/Migrations.scala @@ -21,6 +21,8 @@ import NameKinds.ContextBoundParamName import rewrites.Rewrites.patch import util.Spans.Span import rewrites.Rewrites +import dotty.tools.dotc.rewrites.Rewrites.ActionPatch +import dotty.tools.dotc.util.SourcePosition /** A utility trait containing source-dependent deprecation messages * and migrations. @@ -130,14 +132,33 @@ trait Migrations: def implicitParams(tree: Tree, tp: MethodOrPoly, pt: FunProto)(using Context): Unit = val mversion = mv.ImplicitParamsWithoutUsing if tp.companion == ImplicitMethodType && pt.applyKind != ApplyKind.Using && pt.args.nonEmpty then - val rewriteMsg = Message.rewriteNotice("This code", mversion.patchFrom) - report.errorOrMigrationWarning( + // The application can only be rewritten if it uses parentheses syntax. + // See issue #22927 and related tests. + val hasParentheses = + ctx.source.content + .slice(tree.span.end, pt.args.head.span.start) + .exists(_ == '(') + val rewriteMsg = + if hasParentheses then + Message.rewriteNotice("This code", mversion.patchFrom) + else "" + val message = em"""Implicit parameters should be provided with a `using` clause.$rewriteMsg - |To disable the warning, please use the following option: + |To disable the warning, please use the following option: | "-Wconf:msg=Implicit parameters should be provided with a `using` clause:s" - |""", - pt.args.head.srcPos, mversion) - if mversion.needsPatch then + |""" + val codeAction = CodeAction( + title = "Add `using` clause", + description = None, + patches = List(ActionPatch(pt.args.head.startPos.sourcePos, "using ")) + ) + val withActions = message.withActions(codeAction) + report.errorOrMigrationWarning( + withActions, + pt.args.head.srcPos, + mversion + ) + if hasParentheses && mversion.needsPatch then patch(Span(pt.args.head.span.start), "using ") end implicitParams diff --git a/compiler/src/dotty/tools/dotc/typer/Namer.scala b/compiler/src/dotty/tools/dotc/typer/Namer.scala index 0442c72af6f0..a0e06cc7119d 100644 --- a/compiler/src/dotty/tools/dotc/typer/Namer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Namer.scala @@ -134,7 +134,7 @@ class Namer { typer: Typer => * The logic here is very subtle and fragile due to the fact that * we are not allowed to force anything. */ - def checkNoConflict(name: Name, isPrivate: Boolean, span: Span)(using Context): Name = + def checkNoConflict(name: Name, span: Span)(using Context): Name = val owner = ctx.owner var conflictsDetected = false @@ -169,7 +169,7 @@ class Namer { typer: Typer => def preExisting = ctx.effectiveScope.lookup(name) if (!owner.isClass || name.isTypeName) && preExisting.exists then conflict(preExisting) - else if owner.isPackageObject && !isPrivate && name != nme.CONSTRUCTOR then + else if owner.isPackageObject && name != nme.CONSTRUCTOR then checkNoConflictIn(owner.owner) for pkgObj <- pkgObjs(owner.owner) if pkgObj != owner do checkNoConflictIn(pkgObj) @@ -247,9 +247,9 @@ class Namer { typer: Typer => tree match { case tree: TypeDef if tree.isClassDef => var flags = checkFlags(tree.mods.flags) - if ctx.settings.YcompileScala2Library.value then + if Feature.shouldBehaveAsScala2 then flags |= Scala2x - val name = checkNoConflict(tree.name, flags.is(Private), tree.span).asTypeName + val name = checkNoConflict(tree.name, tree.span).asTypeName val cls = createOrRefine[ClassSymbol](tree, name, flags, ctx.owner, cls => adjustIfModule(new ClassCompleter(cls, tree)(ctx), tree), @@ -258,7 +258,7 @@ class Namer { typer: Typer => cls case tree: MemberDef => var flags = checkFlags(tree.mods.flags) - val name = checkNoConflict(tree.name, flags.is(Private), tree.span) + val name = checkNoConflict(tree.name, tree.span) tree match case tree: ValOrDefDef => if tree.isInstanceOf[ValDef] && !flags.is(Param) && name.endsWith("_=") then @@ -833,7 +833,9 @@ class Namer { typer: Typer => protected def typeSig(sym: Symbol): Type = original match case original: ValDef => if (sym.is(Module)) moduleValSig(sym) - else valOrDefDefSig(original, sym, Nil, identity)(using localContext(sym).setNewScope) + else + valOrDefDefSig(original, sym, Nil, identity)(using localContext(sym).setNewScope) + .suppressIntoIfParam(sym) case original: DefDef => // For the primary constructor DefDef, it is: // * indexed as a part of completing the class, with indexConstructor; and @@ -1223,20 +1225,21 @@ class Namer { typer: Typer => Yes } - def foreachDefaultGetterOf(sym: TermSymbol, op: TermSymbol => Unit): Unit = + def foreachDefaultGetterOf(sym: TermSymbol, alias: TermName)(op: (TermSymbol, TermName) => Unit): Unit = var n = 0 - val methodName = - if sym.name == nme.apply && sym.is(Synthetic) && sym.owner.companionClass.is(Case) then - // The synthesized `apply` methods of case classes use the constructor's default getters - nme.CONSTRUCTOR - else sym.name + // The synthesized `apply` methods of case classes use the constructor's default getters + val useConstructor = sym.name == nme.apply && sym.is(Synthetic) && sym.owner.companionClass.is(Case) + val methodName = if useConstructor then nme.CONSTRUCTOR else sym.name + val aliasedName = if useConstructor then nme.CONSTRUCTOR else alias + val useAliased = !useConstructor && methodName != aliasedName for params <- sym.paramSymss; param <- params do if param.isTerm then if param.is(HasDefault) then val getterName = DefaultGetterName(methodName, n) val getter = pathType.member(getterName).symbol assert(getter.exists, i"$path does not have a default getter named $getterName") - op(getter.asTerm) + val targetName = if useAliased then DefaultGetterName(aliasedName, n) else getterName + op(getter.asTerm, targetName) n += 1 /** Add a forwarder with name `alias` or its type name equivalent to `mbr`, @@ -1264,7 +1267,7 @@ class Namer { typer: Typer => val hasDefaults = sym.hasDefaultParams // compute here to ensure HasDefaultParams and NoDefaultParams flags are set val forwarder = if mbr.isType then - val forwarderName = checkNoConflict(alias.toTypeName, isPrivate = false, span) + val forwarderName = checkNoConflict(alias.toTypeName, span) var target = pathType.select(sym) if target.typeParams.nonEmpty then target = target.etaExpand @@ -1320,7 +1323,7 @@ class Namer { typer: Typer => (EmptyFlags, mbrInfo) var mbrFlags = MandatoryExportTermFlags | maybeStable | (sym.flags & RetainedExportTermFlags) if pathMethod.exists then mbrFlags |= ExtensionMethod - val forwarderName = checkNoConflict(alias, isPrivate = false, span) + val forwarderName = checkNoConflict(alias, span) newSymbol(cls, forwarderName, mbrFlags, mbrInfo, coord = span) forwarder.info = avoidPrivateLeaks(forwarder) @@ -1343,7 +1346,7 @@ class Namer { typer: Typer => val ddef = tpd.DefDef(forwarder.asTerm, prefss => { val forwarderCtx = ctx.withOwner(forwarder) val (pathRefss, methRefss) = prefss.splitAt(extensionParamsCount(path.tpe.widen)) - val ref = path.appliedToArgss(pathRefss).select(sym.asTerm) + val ref = path.appliedToArgss(pathRefss).select(sym.asTerm).withSpan(span.focus) val rhs = ref.appliedToArgss(adaptForwarderParams(Nil, sym.info, methRefss)) .etaExpandCFT(using forwarderCtx) if forwarder.isInlineMethod then @@ -1358,9 +1361,8 @@ class Namer { typer: Typer => }) buf += ddef.withSpan(span) if hasDefaults then - foreachDefaultGetterOf(sym.asTerm, - getter => addForwarder( - getter.name.asTermName, getter.asSeenFrom(path.tpe), span)) + foreachDefaultGetterOf(sym.asTerm, alias): (getter, getterName) => + addForwarder(getterName, getter.asSeenFrom(path.tpe), span) // adding annotations and flags at the parameter level // TODO: This probably needs to be filtered to avoid adding some annotation @@ -1415,13 +1417,13 @@ class Namer { typer: Typer => addWildcardForwardersNamed(alias, span) def addForwarders(sels: List[untpd.ImportSelector], seen: List[TermName]): Unit = sels match - case sel :: sels1 => + case sel :: sels => if sel.isWildcard then addWildcardForwarders(seen, sel.span) else if !sel.isUnimport then addForwardersNamed(sel.name, sel.rename, sel.span) - addForwarders(sels1, sel.name :: seen) + addForwarders(sels, sel.name :: seen) case _ => /** Avoid a clash of export forwarder `forwarder` with other forwarders in `forwarders`. diff --git a/compiler/src/dotty/tools/dotc/typer/Nullables.scala b/compiler/src/dotty/tools/dotc/typer/Nullables.scala index 310ca999f4c5..86b9a337e69a 100644 --- a/compiler/src/dotty/tools/dotc/typer/Nullables.scala +++ b/compiler/src/dotty/tools/dotc/typer/Nullables.scala @@ -253,7 +253,7 @@ object Nullables: val mutables = infos.foldLeft(Set[TermRef]()): (ms, info) => ms.union( if info.asserted == null then Set.empty - else info.asserted.filter(_.symbol.is(Mutable))) + else info.asserted.filter(_.symbol.isMutableVarOrAccessor)) infos.extendWith(NotNullInfo(Set(), mutables)) end extension @@ -307,7 +307,7 @@ object Nullables: || s.isClass // not in a class || recur(s.owner)) - refSym.is(Mutable) // if it is immutable, we don't need to check the rest conditions + refSym.isMutableVarOrAccessor // if it is immutable, we don't need to check the rest conditions && refOwner.isTerm && recur(ctx.owner) end extension @@ -574,7 +574,7 @@ object Nullables: object dropNotNull extends TreeMap: var dropped: Boolean = false override def transform(t: Tree)(using Context) = t match - case AssertNotNull(t0) if t0.symbol.is(Mutable) => + case AssertNotNull(t0) if t0.symbol.isMutableVarOrAccessor => nullables.println(i"dropping $t") dropped = true transform(t0) diff --git a/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala b/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala index 0cc4aaabfc93..0c5382d8849d 100644 --- a/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala +++ b/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala @@ -427,21 +427,28 @@ object ProtoTypes { * - t2 is a ascription (t22: T) and t1 is at the outside of t22 * - t2 is a closure (...) => t22 and t1 is at the outside of t22 */ - def hasInnerErrors(t: Tree)(using Context): Boolean = t match - case Typed(expr, tpe) => hasInnerErrors(expr) - case closureDef(mdef) => hasInnerErrors(mdef.rhs) + def hasInnerErrors(t: Tree, argType: Type)(using Context): Boolean = t match + case Typed(expr, tpe) => hasInnerErrors(expr, argType) + case closureDef(mdef) => hasInnerErrors(mdef.rhs, argType) case _ => t.existsSubTree { t1 => if t1.typeOpt.isError && t.span.toSynthetic != t1.span.toSynthetic && t.typeOpt != t1.typeOpt then typr.println(i"error subtree $t1 of $t with ${t1.typeOpt}, spans = ${t1.span}, ${t.span}") - true + t1.typeOpt match + case errorType: ErrorType if errorType.msg.isInstanceOf[TypeMismatchMsg] => + // if error is caused by an argument type mismatch, + // then return false to try to find an extension. + // see i20335.scala for test case. + val typeMismtachMsg = errorType.msg.asInstanceOf[TypeMismatchMsg] + argType != typeMismtachMsg.expected + case _ => true else false } - private def cacheTypedArg(arg: untpd.Tree, typerFn: untpd.Tree => Tree, force: Boolean)(using Context): Tree = { + private def cacheTypedArg(arg: untpd.Tree, typerFn: untpd.Tree => Tree, force: Boolean, argType: Type)(using Context): Tree = { var targ = state.typedArg(arg) if (targ == null) untpd.functionWithUnknownParamType(arg) match { @@ -459,7 +466,7 @@ object ProtoTypes { targ = typerFn(arg) // TODO: investigate why flow typing is not working on `targ` if ctx.reporter.hasUnreportedErrors then - if hasInnerErrors(targ.nn) then + if hasInnerErrors(targ.nn, argType) then state.errorArgs += arg else state.typedArg = state.typedArg.updated(arg, targ.nn) @@ -487,7 +494,7 @@ object ProtoTypes { val protoTyperState = ctx.typerState val oldConstraint = protoTyperState.constraint val args1 = args.mapWithIndexConserve((arg, idx) => - cacheTypedArg(arg, arg => typer.typed(norm(arg, idx)), force = false)) + cacheTypedArg(arg, arg => typer.typed(norm(arg, idx)), force = false, NoType)) val newConstraint = protoTyperState.constraint if !args1.exists(arg => isUndefined(arg.tpe)) then state.typedArgs = args1 @@ -534,7 +541,8 @@ object ProtoTypes { val locked = ctx.typerState.ownedVars val targ = cacheTypedArg(arg, typer.typedUnadapted(_, wideFormal, locked)(using argCtx), - force = true) + force = true, + wideFormal) val targ1 = typer.adapt(targ, wideFormal, locked) if wideFormal eq formal then targ1 else checkNoWildcardCaptureForCBN(targ1) @@ -1017,6 +1025,15 @@ object ProtoTypes { paramInfos = tl.paramInfos.mapConserve(wildApprox(_, theMap, seen, internal1).bounds), resType = wildApprox(tl.resType, theMap, seen, internal1) ) + case tp @ AnnotatedType(parent, _) => + // This case avoids approximating types in the annotation tree, which can + // cause the type assigner to fail. + // See #22893 and tests/pos/annot-default-arg-22874.scala. + val parentApprox = wildApprox(parent, theMap, seen, internal) + if tp.isRefining then + WildcardType(TypeBounds.upper(parentApprox)) + else + parentApprox case _ => (if (theMap != null && seen.eq(theMap.seen)) theMap else new WildApproxMap(seen, internal)) .mapOver(tp) diff --git a/compiler/src/dotty/tools/dotc/typer/QuotesAndSplices.scala b/compiler/src/dotty/tools/dotc/typer/QuotesAndSplices.scala index 59993a69797d..4e7c4336b852 100644 --- a/compiler/src/dotty/tools/dotc/typer/QuotesAndSplices.scala +++ b/compiler/src/dotty/tools/dotc/typer/QuotesAndSplices.scala @@ -130,7 +130,7 @@ trait QuotesAndSplices { report.error("Open pattern expected an identifier", arg.srcPos) EmptyTree } - for arg <- typedArgs if arg.symbol.is(Mutable) do // TODO support these patterns. Possibly using scala.quoted.util.Var + for arg <- typedArgs if arg.symbol.isMutableVarOrAccessor do // TODO support these patterns. Possibly using scala.quoted.util.Var report.error("References to `var`s cannot be used in higher-order pattern", arg.srcPos) val argTypes = typedArgs.map(_.tpe.widenTermRefExpr) val patType = (tree.typeargs.isEmpty, tree.args.isEmpty) match diff --git a/compiler/src/dotty/tools/dotc/typer/RefChecks.scala b/compiler/src/dotty/tools/dotc/typer/RefChecks.scala index a015348e90a7..c500b6cc60bb 100644 --- a/compiler/src/dotty/tools/dotc/typer/RefChecks.scala +++ b/compiler/src/dotty/tools/dotc/typer/RefChecks.scala @@ -21,7 +21,7 @@ import config.MigrationVersion import config.Printers.refcheck import reporting.* import Constants.Constant -import cc.stripCapturing +import cc.{stripCapturing, isUpdateMethod, CCState} object RefChecks { import tpd.* @@ -107,7 +107,9 @@ object RefChecks { def checkSelfConforms(other: ClassSymbol) = var otherSelf = other.declaredSelfTypeAsSeenFrom(cls.thisType) if otherSelf.exists then - if !(cinfo.selfType <:< otherSelf) then + if !CCState.withCapAsRoot: // OK? We need this here since self types use `cap` instead of `fresh` + cinfo.selfType <:< otherSelf + then report.error(DoesNotConformToSelfType("illegal inheritance", cinfo.selfType, cls, otherSelf, "parent", other), cls.srcPos) @@ -240,25 +242,24 @@ object RefChecks { && (inLinearizationOrder(sym1, sym2, parent) || parent.is(JavaDefined)) && !sym2.is(AbsOverride) - /** Checks the subtype relationship tp1 <:< tp2. - * It is passed to the `checkOverride` operation in `checkAll`, to be used for - * compatibility checking. - */ - def checkSubType(tp1: Type, tp2: Type)(using Context): Boolean = tp1 frozen_<:< tp2 - /** A hook that allows to omit override checks between `overriding` and `overridden`. * Overridden in capture checking to handle non-capture checked classes leniently. */ def needsCheck(overriding: Symbol, overridden: Symbol)(using Context): Boolean = true - protected def additionalChecks(overriding: Symbol, overridden: Symbol)(using Context): Unit = () + /** Adapt member type and other type so that they can be compared with `frozen_<:<`. + * @return optionally, if adaptation is necessary, the pair of adapted types (memberTp', otherTp') + * Note: we return an Option result to avoid a tuple allocation in the normal case + * where no adaptation is necessary. + */ + def adaptOverridePair(member: Symbol, memberTp: Type, otherTp: Type)(using Context): Option[(Type, Type)] = None - private val subtypeChecker: (Type, Type) => Context ?=> Boolean = this.checkSubType + protected def additionalChecks(overriding: Symbol, overridden: Symbol)(using Context): Unit = () - def checkAll(checkOverride: ((Type, Type) => Context ?=> Boolean, Symbol, Symbol) => Unit) = + def checkAll(checkOverride: (Symbol, Symbol) => Unit) = while hasNext do if needsCheck(overriding, overridden) then - checkOverride(subtypeChecker, overriding, overridden) + checkOverride(overriding, overridden) additionalChecks(overriding, overridden) next() @@ -273,7 +274,7 @@ object RefChecks { if dcl.is(Deferred) then for other <- dcl.allOverriddenSymbols do if !other.is(Deferred) then - checkOverride(subtypeChecker, dcl, other) + checkOverride(dcl, other) end checkAll // Disabled for capture checking since traits can get different parameter refinements @@ -373,52 +374,6 @@ object RefChecks { && atPhase(typerPhase): loop(member.info.paramInfoss, other.info.paramInfoss) - /** A map of all occurrences of `into` in a member type. - * Key: number of parameter carrying `into` annotation(s) - * Value: A list of all depths of into annotations, where each - * function arrow increases the depth. - * Example: - * def foo(x: into A, y: => [X] => into (x: X) => into B): C - * produces the map - * (0 -> List(0), 1 -> List(1, 2)) - */ - type IntoOccurrenceMap = immutable.Map[Int, List[Int]] - - def intoOccurrences(tp: Type): IntoOccurrenceMap = - - def traverseInfo(depth: Int, tp: Type): List[Int] = tp match - case AnnotatedType(tp, annot) if annot.symbol == defn.IntoParamAnnot => - depth :: traverseInfo(depth, tp) - case AppliedType(tycon, arg :: Nil) if tycon.typeSymbol == defn.RepeatedParamClass => - traverseInfo(depth, arg) - case defn.FunctionOf(_, resType, _) => - traverseInfo(depth + 1, resType) - case RefinedType(parent, rname, mt: MethodOrPoly) => - traverseInfo(depth, mt) - case tp: MethodOrPoly => - traverseInfo(depth + 1, tp.resType) - case tp: ExprType => - traverseInfo(depth, tp.resType) - case _ => - Nil - - def traverseParams(n: Int, formals: List[Type], acc: IntoOccurrenceMap): IntoOccurrenceMap = - if formals.isEmpty then acc - else - val occs = traverseInfo(0, formals.head) - traverseParams(n + 1, formals.tail, if occs.isEmpty then acc else acc + (n -> occs)) - - def traverse(n: Int, tp: Type, acc: IntoOccurrenceMap): IntoOccurrenceMap = tp match - case tp: PolyType => - traverse(n, tp.resType, acc) - case tp: MethodType => - traverse(n + tp.paramInfos.length, tp.resType, traverseParams(n, tp.paramInfos, acc)) - case _ => - acc - - traverse(0, tp, immutable.Map.empty) - end intoOccurrences - val checker = if makeOverridingPairsChecker == null then OverridingPairsChecker(clazz, self) else makeOverridingPairsChecker(clazz, self) @@ -426,19 +381,27 @@ object RefChecks { /* Check that all conditions for overriding `other` by `member` * of class `clazz` are met. */ - def checkOverride(checkSubType: (Type, Type) => Context ?=> Boolean, member: Symbol, other: Symbol): Unit = - def memberTp(self: Type) = + def checkOverride(member: Symbol, other: Symbol): Unit = + def memberType(self: Type) = if (member.isClass) TypeAlias(member.typeRef.etaExpand) else self.memberInfo(member) - def otherTp(self: Type) = - self.memberInfo(other) + def otherType(self: Type) = + self.memberInfo(other) + + var memberTp = memberType(self) + var otherTp = otherType(self) + checker.adaptOverridePair(member, memberTp, otherTp) match + case Some((mtp, otp)) => + memberTp = mtp + otherTp = otp + case None => refcheck.println(i"check override ${infoString(member)} overriding ${infoString(other)}") - def noErrorType = !memberTp(self).isErroneous && !otherTp(self).isErroneous + def noErrorType = !memberTp.isErroneous && !otherTp.isErroneous def overrideErrorMsg(core: Context ?=> String, compareTypes: Boolean = false): Message = - val (mtp, otp) = if compareTypes then (memberTp(self), otherTp(self)) else (NoType, NoType) + val (mtp, otp) = if compareTypes then (memberTp, otherTp) else (NoType, NoType) OverrideError(core, self, member, other, mtp, otp) def compatTypes(memberTp: Type, otherTp: Type): Boolean = @@ -446,8 +409,8 @@ object RefChecks { isOverridingPair(member, memberTp, other, otherTp, fallBack = warnOnMigration( overrideErrorMsg("no longer has compatible type"), - (if (member.owner == clazz) member else clazz).srcPos, version = `3.0`), - isSubType = checkSubType) + (if member.owner == clazz then member else clazz).srcPos, + version = `3.0`)) catch case ex: MissingType => // can happen when called with upwardsSelf as qualifier of memberTp and otherTp, // because in that case we might access types that are not members of the qualifier. @@ -467,7 +430,7 @@ object RefChecks { // with box adaptation, we simply ignore capture annotations here. // This should be safe since the compatibility under box adaptation is already // checked. - memberTp(self).matches(otherTp(self)) + memberTp.matches(otherTp) } def emitOverrideError(fullmsg: Message) = @@ -595,7 +558,7 @@ object RefChecks { overrideError("needs `override` modifier") else if (other.is(AbsOverride) && other.isIncompleteIn(clazz) && !member.is(AbsOverride)) overrideError("needs `abstract override` modifiers") - else if member.is(Override) && other.is(Mutable) then + else if member.is(Override) && other.isMutableVarOrAccessor then overrideError("cannot override a mutable variable") else if (member.isAnyOverride && !(member.owner.thisType.baseClasses exists (_ isSubClass other.owner)) && @@ -616,16 +579,27 @@ object RefChecks { overrideError("is erased, cannot override non-erased member") else if (other.is(Erased) && !member.isOneOf(Erased | Inline)) // (1.9) overrideError("is not erased, cannot override erased member") + else if member.isUpdateMethod && !other.is(Mutable) then + overrideError(i"is an update method, cannot override a read-only method") else if other.is(Inline) && !member.is(Inline) then // (1.10) overrideError("is not inline, cannot implement an inline method") else if (other.isScala2Macro && !member.isScala2Macro) // (1.11) overrideError("cannot be used here - only Scala-2 macros can override Scala-2 macros") - else if !compatTypes(memberTp(self), otherTp(self)) - && !compatTypes(memberTp(upwardsSelf), otherTp(upwardsSelf)) + else if !compatTypes(memberTp, otherTp) && !member.is(Tracked) // Tracked members need to be excluded since they are abstract type members with // singleton types. Concrete overrides usually have a wider type. // TODO: Should we exclude all refinements inherited from parents? + && { + var memberTpUp = memberType(upwardsSelf) + var otherTpUp = otherType(upwardsSelf) + checker.adaptOverridePair(member, memberTpUp, otherTpUp) match + case Some((mtp, otp)) => + memberTpUp = mtp + otherTpUp = otp + case _ => + !compatTypes(memberTpUp, otherTpUp) + } then overrideError("has incompatible type", compareTypes = true) else if (member.targetName != other.targetName) @@ -633,8 +607,6 @@ object RefChecks { overrideError(i"needs to be declared with @targetName(${"\""}${other.targetName}${"\""}) so that external names match") else overrideError("cannot have a @targetName annotation since external names would be different") - else if intoOccurrences(memberTp(self)) != intoOccurrences(otherTp(self)) then - overrideError("has different occurrences of `into` modifiers", compareTypes = true) else if other.is(ParamAccessor) && !isInheritedAccessor(member, other) && !member.is(Tracked) // see remark on tracked members above then // (1.12) @@ -775,7 +747,7 @@ object RefChecks { // Give a specific error message for abstract vars based on why it fails: // It could be unimplemented, have only one accessor, or be uninitialized. - if (underlying.is(Mutable)) { + if underlying.isMutableVarOrAccessor then val isMultiple = grouped.getOrElse(underlying.name, Nil).size > 1 // If both getter and setter are missing, squelch the setter error. @@ -784,7 +756,6 @@ object RefChecks { if (member.isSetter) "\n(Note that an abstract var requires a setter in addition to the getter)" else if (member.isGetter && !isMultiple) "\n(Note that an abstract var requires a getter in addition to the setter)" else err.abstractVarMessage(member)) - } else if (underlying.is(Method)) { // If there is a concrete method whose name matches the unimplemented // abstract method, and a cursory examination of the difference reveals @@ -1147,8 +1118,8 @@ object RefChecks { * This check is suppressed if the method is an override. (Because the type of the receiver * may be narrower in the override.) * - * If the extension method is nullary, it is always hidden by a member of the same name. - * (Either the member is nullary, or the reference is taken as the eta-expansion of the member.) + * If the extension method is nilary, it is always hidden by a member of the same name. + * (Either the member is nilary, or the reference is taken as the eta-expansion of the member.) * * This check is in lieu of a more expensive use-site check that an application failed to use an extension. * That check would account for accessibility and opacity. As a limitation, this check considers @@ -1172,25 +1143,29 @@ object RefChecks { extension (tp: Type) def explicit = Applications.stripImplicit(tp.stripPoly, wildcardOnly = true) def hasImplicitParams = tp.stripPoly match { case mt: MethodType => mt.isImplicitMethod case _ => false } + def isNilary = tp.stripPoly match { case mt: MethodType => false case _ => true } val explicitInfo = sym.info.explicit // consider explicit value params - val target0 = explicitInfo.firstParamTypes.head // required for extension method, the putative receiver - val target = target0.dealiasKeepOpaques.typeSymbol.info - val methTp = explicitInfo.resultType // skip leading implicits and the "receiver" parameter - def memberMatchesMethod(member: Denotation) = + def memberHidesMethod(member: Denotation): Boolean = + val methTp = explicitInfo.resultType // skip leading implicits and the "receiver" parameter + if methTp.isNilary then + return true // extension without parens is always hidden by a member of same name val memberIsImplicit = member.info.hasImplicitParams - val paramTps = - if memberIsImplicit then methTp.stripPoly.firstParamTypes - else methTp.explicit.firstParamTypes inline def paramsCorrespond = + val paramTps = + if memberIsImplicit then methTp.stripPoly.firstParamTypes + else methTp.explicit.firstParamTypes val memberParamTps = member.info.stripPoly.firstParamTypes memberParamTps.corresponds(paramTps): (m, x) => m.typeSymbol.denot.isOpaqueAlias == x.typeSymbol.denot.isOpaqueAlias && (x frozen_<:< m) - paramTps.isEmpty || memberIsImplicit && !methTp.hasImplicitParams || paramsCorrespond - def hidden = - target.nonPrivateMember(sym.name) - .filterWithPredicate: member => - member.symbol.isPublic && memberMatchesMethod(member) - .exists + memberIsImplicit && !methTp.hasImplicitParams || paramsCorrespond + def targetOfHiddenExtension: Symbol = + val target = + val target0 = explicitInfo.firstParamTypes.head // required for extension method, the putative receiver + target0.dealiasKeepOpaques.typeSymbol.info + val member = target.nonPrivateMember(sym.name) + .filterWithPredicate: member => + member.symbol.isPublic && memberHidesMethod(member) + if member.exists then target.typeSymbol else NoSymbol if sym.is(HasDefaultParams) then val getterDenot = val receiverName = explicitInfo.firstParamNames.head @@ -1199,8 +1174,10 @@ object RefChecks { sym.owner.info.member(getterName) if getterDenot.exists then report.warning(ExtensionHasDefault(sym), getterDenot.symbol.srcPos) - if !sym.nextOverriddenSymbol.exists && hidden - then report.warning(ExtensionNullifiedByMember(sym, target.typeSymbol), sym.srcPos) + if !sym.nextOverriddenSymbol.exists then + val target = targetOfHiddenExtension + if target.exists then + report.warning(ExtensionNullifiedByMember(sym, target), sym.srcPos) end checkExtensionMethods /** Verify that references in the user-defined `@implicitNotFound` message are valid. @@ -1277,9 +1254,9 @@ object RefChecks { val matches = referencePattern.findAllIn(s) for reference <- matches do val referenceOffset = matches.start - val prefixlessReference = reference.replaceFirst("""\$\{\s*""", "").nn + val prefixlessReference = reference.replaceFirst("""\$\{\s*""", "") val variableOffset = referenceOffset + reference.length - prefixlessReference.length - val variableName = prefixlessReference.replaceFirst("""\s*\}""", "").nn + val variableName = prefixlessReference.replaceFirst("""\s*\}""", "") f(variableName, variableOffset) end checkImplicitNotFoundAnnotation diff --git a/compiler/src/dotty/tools/dotc/typer/Synthesizer.scala b/compiler/src/dotty/tools/dotc/typer/Synthesizer.scala index 5111a9517fab..9c1e4951a798 100644 --- a/compiler/src/dotty/tools/dotc/typer/Synthesizer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Synthesizer.scala @@ -232,6 +232,8 @@ class Synthesizer(typer: Typer)(using @constructorOnly c: Context): withNoErrors(success(Literal(Constant(())))) case n: TermRef => withNoErrors(success(ref(n))) + case ts: ThisType => + withNoErrors(success(This(ts.cls))) case tp => EmptyTreeNoError case _ => diff --git a/compiler/src/dotty/tools/dotc/typer/TypeAssigner.scala b/compiler/src/dotty/tools/dotc/typer/TypeAssigner.scala index 28af86344621..4d16a342f484 100644 --- a/compiler/src/dotty/tools/dotc/typer/TypeAssigner.scala +++ b/compiler/src/dotty/tools/dotc/typer/TypeAssigner.scala @@ -12,6 +12,7 @@ import collection.mutable import reporting.* import Checking.{checkNoPrivateLeaks, checkNoWildcard} import cc.CaptureSet +import transform.Splicer trait TypeAssigner { import tpd.* @@ -301,7 +302,10 @@ trait TypeAssigner { if fntpe.isResultDependent then safeSubstMethodParams(fntpe, args.tpes) else fntpe.resultType // fast path optimization else - errorType(em"wrong number of arguments at ${ctx.phase.prev} for $fntpe: ${fn.tpe}, expected: ${fntpe.paramInfos.length}, found: ${args.length}", tree.srcPos) + val erroringPhase = + if Splicer.inMacroExpansion then i"${ctx.phase} (while expanding macro)" + else ctx.phase.prev.toString + errorType(em"wrong number of arguments at $erroringPhase for $fntpe: ${fn.tpe}, expected: ${fntpe.paramInfos.length}, found: ${args.length}", tree.srcPos) case err: ErrorType => err case t => diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index be3186720fa1..30d3add2529f 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -670,7 +670,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer val checkedType = checkNotShadowed(ownType) val tree1 = checkedType match case checkedType: NamedType if !prefixIsElidable(checkedType) => - ref(checkedType).withSpan(tree.span) + ref(checkedType).withSpan(tree.span).withAttachmentsFrom(tree) case _ => def isScalaModuleRef = checkedType match case moduleRef: TypeRef if moduleRef.symbol.is(ModuleClass, butNot = JavaDefined) => true @@ -715,6 +715,10 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer && ctx.owner.owner.unforcedDecls.lookup(tree.name).exists then // we are in the arguments of a this(...) constructor call errorTree(tree, em"$tree is not accessible from constructor arguments") + else if name.isTermName && ctx.mode.is(Mode.InCaptureSet) then + // If we are in a capture set and the identifier is not a term name, + // try to type it with the same name but as a type + typed(untpd.makeCapsOf(untpd.cpy.Ident(tree)(name.toTypeName)), pt) else errorTree(tree, MissingIdent(tree, kind, name, pt)) end typedIdent @@ -920,6 +924,13 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer typedCBSelect(tree0, pt, qual) else EmptyTree + // Otherwise, if we are in a capture set, try to type it as a capture variable + // reference (as selecting a type name). + def trySelectTypeInCaptureSet() = + if tree0.name.isTermName && ctx.mode.is(Mode.InCaptureSet) then + typedSelectWithAdapt(untpd.cpy.Select(tree0)(qual, tree0.name.toTypeName), pt, qual) + else EmptyTree + // Otherwise, report an error def reportAnError() = assignType(tree, @@ -941,6 +952,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer .orElse(tryDynamic()) .orElse(trySelectable()) .orElse(tryCBCompanion()) + .orElse(trySelectTypeInCaptureSet()) .orElse(reportAnError()) end typedSelectWithAdapt @@ -1393,7 +1405,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer cpy.Assign(tree)(lhsCore, typed(tree.rhs, lhs1.tpe.widen)).withType(defn.UnitType) def canAssign(sym: Symbol) = - sym.is(Mutable, butNot = Accessor) || + sym.isMutableVar || ctx.owner.isPrimaryConstructor && !sym.is(Method) && sym.maybeOwner == ctx.owner.owner || // allow assignments from the primary constructor to class fields ctx.owner.name.is(TraitSetterName) || ctx.owner.isStaticConstructor @@ -2018,7 +2030,8 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer case mt: MethodType => pt.findFunctionType match { case SAMType(samMeth, samParent) - if !defn.isFunctionNType(samParent) && mt <:< samMeth => + if !ctx.erasedTypes && !defn.isFunctionNType(samParent) + && mt <:< samMeth && !mt.isImplicitMethod => if defn.isContextFunctionType(mt.resultType) then report.error( em"""Implementation restriction: cannot convert this expression to `$samParent` @@ -2512,6 +2525,8 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer val tycon = typedType(tree.tycon) def spliced(tree: Tree) = untpd.TypedSplice(tree) val tparam = untpd.Ident(tree.paramName).withSpan(tree.span.withEnd(tree.span.point)) + if Feature.ccEnabled && typed(tparam).tpe.derivesFrom(defn.Caps_CapSet) then + report.error(em"Capture variable `${tree.paramName}` cannot have a context bound.", tycon.srcPos) if tycon.tpe.typeParams.nonEmpty then val tycon0 = tycon.withType(tycon.tpe.etaCollapse) typed(untpd.AppliedTypeTree(spliced(tycon0), tparam :: Nil)) @@ -2557,17 +2572,9 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer } def typedAppliedTypeTree(tree: untpd.AppliedTypeTree)(using Context): Tree = { - tree.args match - case arg :: _ if arg.isTerm => - if Feature.dependentEnabled then - return errorTree(tree, em"Not yet implemented: T(...)") - else - return errorTree(tree, dependentMsg) - case _ => - - val tpt1 = withoutMode(Mode.Pattern) { + val tpt1 = withoutMode(Mode.Pattern): typed(tree.tpt, AnyTypeConstructorProto) - } + val tparams = tpt1.tpe.typeParams if tpt1.tpe.isError then val args1 = tree.args.mapconserve(typedType(_)) @@ -2691,7 +2698,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer typeIndexedLambdaTypeTree(tree, tparams, body) def typedTermLambdaTypeTree(tree: untpd.TermLambdaTypeTree)(using Context): Tree = - if Feature.dependentEnabled then + if Feature.enabled(Feature.modularity) then errorTree(tree, em"Not yet implemented: (...) =>> ...") else errorTree(tree, dependentMsg) @@ -2730,13 +2737,28 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer assignType(cpy.ByNameTypeTree(tree)(result1), result1) def typedTypeBoundsTree(tree: untpd.TypeBoundsTree, pt: Type)(using Context): Tree = + lazy val CapSetBot = untpd.TypeTree(defn.Caps_CapSet.typeRef) + lazy val CapSetTop = untpd.makeRetaining(untpd.TypeTree(defn.Caps_CapSet.typeRef), Nil, tpnme.retainsCap).withSpan(tree.span) + val TypeBoundsTree(lo, hi, alias) = tree val lo1 = typed(lo) val hi1 = typed(hi) val alias1 = typed(alias) - val lo2 = if (lo1.isEmpty) typed(untpd.TypeTree(defn.NothingType)) else lo1 - val hi2 = if (hi1.isEmpty) typed(untpd.TypeTree(defn.AnyType)) else hi1 + val isCap = tree.hasAttachment(CaptureVar) + val lo2 = + if lo1.isEmpty then + if Feature.ccEnabled && (isCap || hi1.tpe.derivesFrom(defn.Caps_CapSet)) then + typed(CapSetBot) + else typed(untpd.TypeTree(defn.NothingType)) + else lo1 + val hi2 = + if hi1.isEmpty then + if Feature.ccEnabled && (isCap || lo1.tpe.derivesFrom(defn.Caps_CapSet)) then + typed(CapSetTop) + else typed(untpd.TypeTree(defn.AnyType)) + else hi1 assignType(cpy.TypeBoundsTree(tree)(lo2, hi2, alias1), lo2, hi2, alias1) + end typedTypeBoundsTree def typedBind(tree: untpd.Bind, pt: Type)(using Context): Tree = { if !isFullyDefined(pt, ForceDegree.all) then @@ -2805,8 +2827,20 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer else assert(ctx.reporter.errorsReported) tree.withType(defn.AnyType) - val trees1 = tree.trees.mapconserve(typed(_, pt)(using nestedCtx)) + val preGadt = nestedCtx.gadt + var gadtConstrs: mutable.ArrayBuffer[GadtConstraint] = mutable.ArrayBuffer.empty + val trees1 = tree.trees.mapconserve: t => + nestedCtx.gadtState.restore(preGadt) + val res = typed(t, pt)(using nestedCtx) + gadtConstrs += nestedCtx.gadt + res .mapconserve(ensureValueTypeOrWildcard) + // Look for the necessary constraint that is subsumed by all alternatives. + // Use that constraint as the outcome if possible, otherwise fallback to not using + // GADT reasoning for soundness. + TypeComparer.necessaryGadtConstraint(gadtConstrs.toList, preGadt) match + case Some(constr) => nestedCtx.gadtState.restore(constr) + case None => nestedCtx.gadtState.restore(preGadt) assignType(cpy.Alternative(tree)(trees1), trees1) } @@ -3030,7 +3064,25 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer if sym.isOpaqueAlias then checkFullyAppliedType(rhs1, "Opaque type alias must be fully applied, but ") checkNoContextFunctionType(rhs1) - assignType(cpy.TypeDef(tdef)(name, rhs1), sym) + var attachCap = false + if Feature.ccEnabled then + val isCap = tdef.hasAttachment(CaptureVar) + rhs1 match + case TypeBoundsTree(lo, hi, _) => + val loIsCap = lo.tpe.derivesFrom(defn.Caps_CapSet) + val hiIsCap = hi.tpe.derivesFrom(defn.Caps_CapSet) + if !isCap && (loIsCap ^ hiIsCap) then + report.error(em"Illegal type bounds: >: $lo <: $hi. Capture-set bounds cannot be mixed with type bounds of other kinds", rhs.srcPos) + if isCap && !(loIsCap && hiIsCap) then + report.error(em"Illegal type bounds: >: $lo <: $hi. $name^ can only have capture sets as bounds", rhs.srcPos) + attachCap = !isCap && loIsCap && hiIsCap + case LambdaTypeTree(_, _) if isCap => + report.error(em"`$name` cannot have type parameters, because it ranges over capture sets", rhs.srcPos) + case _ => + val res = assignType(cpy.TypeDef(tdef)(name, rhs1), sym) + if Feature.ccEnabled && attachCap then + res.putAttachment(CaptureVar, ()) + res } def typedClassDef(cdef: untpd.TypeDef, cls: ClassSymbol)(using Context): Tree = ctx.profiler.onTypedDef(cls) { @@ -3164,10 +3216,26 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer val usingParamAccessors = cls.paramAccessors.filter(_.is(Given)) val paramScope = newScopeWith(usingParamAccessors*) val searchCtx = ctx.outer.fresh.setScope(paramScope) + + // Before losing the reference to ctx.owner + // when calling implicitArgTree with searchCtx, + // let's store ctx.owner as the fallback "responsibleForImports" + // in DependencyRecorder. That way, if we end up recording any dependencies + // we use ctx.owner as the "fromClass" rather than emitting a warning + // (because ctx.compilationUnit.tpdTree is still EmptyTree during typer). + // For example, to record mirror dependencies, see i23049. + val depRecorder = ctx.compilationUnit.depRecorder + val responsibleForImports = depRecorder._responsibleForImports + if responsibleForImports == null then + depRecorder._responsibleForImports = ctx.owner + val rhs = implicitArgTree(target, cdef.span, where = i"inferring the implementation of the deferred ${dcl.showLocated}" )(using searchCtx) + if responsibleForImports == null then + depRecorder._responsibleForImports = null + val impl = dcl.copy(cls, flags = dcl.flags &~ (HasDefault | Deferred) | Final | Override, info = target, @@ -3204,7 +3272,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer checkEnumParent(cls, firstParent) - if defn.ScalaValueClasses()(cls) && ctx.settings.YcompileScala2Library.value then + if defn.ScalaValueClasses()(cls) && Feature.shouldBehaveAsScala2 then constr1.symbol.resetFlag(Private) val self1 = typed(self)(using ctx.outer).asInstanceOf[ValDef] // outer context where class members are not visible @@ -3241,7 +3309,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer checkNonCyclicInherited(cls.thisType, cls.info.parents, cls.info.decls, cdef.srcPos) // check value class constraints - checkDerivedValueClass(cls, body1) + checkDerivedValueClass(cdef, cls, body1) val effectiveOwner = cls.owner.skipWeakOwner if cls.is(ModuleClass) @@ -4524,7 +4592,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer tree } else TypeComparer.testSubType(tree.tpe.widenExpr, pt) match - case CompareResult.Fail => + case CompareResult.Fail(_) => wtp match case wtp: MethodType => missingArgs(wtp) case _ => @@ -4661,7 +4729,8 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer && !ctx.isAfterTyper && !tree.isInstanceOf[Inlined] && !isThisTypeResult(tree) - && !tree.hasAttachment(AscribedToUnit) then + && !isAscribedToUnit(tree) + then report.warning(ValueDiscarding(tree.tpe), tree.srcPos) return tpd.Block(tree1 :: Nil, unitLiteral) diff --git a/compiler/src/dotty/tools/dotc/typer/VarianceChecker.scala b/compiler/src/dotty/tools/dotc/typer/VarianceChecker.scala index 3699ca80d011..0c2929283ee3 100644 --- a/compiler/src/dotty/tools/dotc/typer/VarianceChecker.scala +++ b/compiler/src/dotty/tools/dotc/typer/VarianceChecker.scala @@ -157,7 +157,7 @@ class VarianceChecker(using Context) { def isLocal = base.isAllOf(PrivateLocal) || base.is(Private) && !base.hasAnnotation(defn.AssignedNonLocallyAnnot) - if base.is(Mutable, butNot = Method) && !isLocal then + if base.isMutableVar && !isLocal then base.removeAnnotation(defn.AssignedNonLocallyAnnot) variance = 0 try checkInfo(base.info) diff --git a/compiler/src/dotty/tools/dotc/util/Chars.scala b/compiler/src/dotty/tools/dotc/util/Chars.scala index 916bdfa9dca3..e68c48903a63 100644 --- a/compiler/src/dotty/tools/dotc/util/Chars.scala +++ b/compiler/src/dotty/tools/dotc/util/Chars.scala @@ -50,7 +50,7 @@ object Chars: } /** Is character a whitespace character (but not a new line)? */ - def isWhitespace(c: Char): Boolean = + inline def isWhitespace(c: Char): Boolean = c == ' ' || c == '\t' || c == CR /** Can character form part of a doc comment variable $xxx? */ diff --git a/compiler/src/dotty/tools/dotc/util/LRUCache.scala b/compiler/src/dotty/tools/dotc/util/LRUCache.scala index e124159575e7..4abe8962ae5b 100644 --- a/compiler/src/dotty/tools/dotc/util/LRUCache.scala +++ b/compiler/src/dotty/tools/dotc/util/LRUCache.scala @@ -1,6 +1,5 @@ package dotty.tools.dotc.util -import scala.language.unsafeNulls import reflect.ClassTag import annotation.tailrec @@ -17,7 +16,7 @@ import annotation.tailrec * get promoted to be first in the queue. Elements are evicted * at the `last` position. */ -class LRUCache[Key >: Null <: AnyRef : ClassTag, Value >: Null: ClassTag] { +class LRUCache[Key >: Null <: AnyRef | Null : ClassTag, Value >: Null: ClassTag] { import LRUCache.* val keys: Array[Key] = new Array[Key](Retained) val values: Array[Value] = new Array(Retained) diff --git a/compiler/src/dotty/tools/dotc/util/NameTransformer.scala b/compiler/src/dotty/tools/dotc/util/NameTransformer.scala index f641ea90dcdd..1d8ef6150e46 100644 --- a/compiler/src/dotty/tools/dotc/util/NameTransformer.scala +++ b/compiler/src/dotty/tools/dotc/util/NameTransformer.scala @@ -2,7 +2,6 @@ package dotty.tools package dotc package util -import scala.language.unsafeNulls import core.Names.* @@ -55,7 +54,7 @@ object NameTransformer { * the Scala spec as well as the Java spec. */ def encode(name: SimpleName): SimpleName = { - var buf: StringBuilder = null + var buf: StringBuilder | Null = null val len = name.length var i = 0 while (i < len) { @@ -88,11 +87,11 @@ object NameTransformer { */ def decode(name: SimpleName): SimpleName = { //System.out.println("decode: " + name);//DEBUG - var buf: StringBuilder = null + var buf: StringBuilder | Null = null val len = name.length var i = 0 while (i < len) { - var ops: OpCodes = null + var ops: OpCodes | Null = null var unicode = false val c = name(i) if (c == '$' && i + 2 < len) { diff --git a/compiler/src/dotty/tools/dotc/util/ParsedComment.scala b/compiler/src/dotty/tools/dotc/util/ParsedComment.scala index ee56a74d5eb5..acced8dd857c 100644 --- a/compiler/src/dotty/tools/dotc/util/ParsedComment.scala +++ b/compiler/src/dotty/tools/dotc/util/ParsedComment.scala @@ -1,7 +1,5 @@ package dotty.tools.dotc.util -import scala.language.unsafeNulls - import dotty.tools.dotc.core.Comments.{Comment, docCtx} import dotty.tools.dotc.core.Contexts.* import dotty.tools.dotc.core.Names.TermName diff --git a/compiler/src/dotty/tools/dotc/util/ShowPickled.scala b/compiler/src/dotty/tools/dotc/util/ShowPickled.scala index 8d5d0c27ab0e..15d553e703b3 100644 --- a/compiler/src/dotty/tools/dotc/util/ShowPickled.scala +++ b/compiler/src/dotty/tools/dotc/util/ShowPickled.scala @@ -113,7 +113,7 @@ object ShowPickled { result.toInt } - def printFile(buf: PickleBuffer, out: PrintStream = System.out.nn): Unit = { + def printFile(buf: PickleBuffer, out: PrintStream = System.out): Unit = { out.println("Version " + buf.readNat() + "." + buf.readNat()) val index = buf.createIndex val entryList = makeEntryList(buf, index) diff --git a/compiler/src/dotty/tools/dotc/util/SimpleIdentitySet.scala b/compiler/src/dotty/tools/dotc/util/SimpleIdentitySet.scala index b243145c9e5f..1d8dc484c989 100644 --- a/compiler/src/dotty/tools/dotc/util/SimpleIdentitySet.scala +++ b/compiler/src/dotty/tools/dotc/util/SimpleIdentitySet.scala @@ -1,6 +1,5 @@ package dotty.tools.dotc.util -import scala.language.unsafeNulls import collection.mutable @@ -18,12 +17,18 @@ abstract class SimpleIdentitySet[+Elem <: AnyRef] { var acc: SimpleIdentitySet[B] = SimpleIdentitySet.empty foreach(x => acc += f(x)) acc + def flatMap[B <: AnyRef](f: Elem => SimpleIdentitySet[B]): SimpleIdentitySet[B] = + var acc: SimpleIdentitySet[B] = SimpleIdentitySet.empty + foreach(x => acc ++= f(x)) + acc def /: [A, E >: Elem <: AnyRef](z: A)(f: (A, E) => A): A def toList: List[Elem] - def iterator: Iterator[Elem] + def nth(n: Int): Elem final def isEmpty: Boolean = size == 0 + final def iterator: Iterator[Elem] = Iterator.tabulate(size)(nth) + def forall[E >: Elem <: AnyRef](p: E => Boolean): Boolean = !exists(!p(_)) def filter(p: Elem => Boolean): SimpleIdentitySet[Elem] = @@ -42,8 +47,16 @@ abstract class SimpleIdentitySet[+Elem <: AnyRef] { if (that.contains(x)) s else s + x } + def ** [E >: Elem <: AnyRef](that: SimpleIdentitySet[E]): SimpleIdentitySet[E] = + if this.size == 0 then this + else if that.size == 0 then that + else this.filter(that.contains) + def == [E >: Elem <: AnyRef](that: SimpleIdentitySet[E]): Boolean = - this.size == that.size && forall(that.contains) + (this eq that) || this.size == that.size && forall(that.contains) + + def != [E >: Elem <: AnyRef](that: SimpleIdentitySet[E]): Boolean = + !(this == that) override def toString: String = toList.mkString("{", ", ", "}") } @@ -69,7 +82,7 @@ object SimpleIdentitySet { override def map[B <: AnyRef](f: Nothing => B): SimpleIdentitySet[B] = empty def /: [A, E <: AnyRef](z: A)(f: (A, E) => A): A = z def toList = Nil - def iterator = Iterator.empty + def nth(n: Int): Nothing = throw new IndexOutOfBoundsException(n.toString) } private class Set1[+Elem <: AnyRef](x0: AnyRef) extends SimpleIdentitySet[Elem] { @@ -87,7 +100,9 @@ object SimpleIdentitySet { def /: [A, E >: Elem <: AnyRef](z: A)(f: (A, E) => A): A = f(z, x0.asInstanceOf[E]) def toList = x0.asInstanceOf[Elem] :: Nil - def iterator = Iterator.single(x0.asInstanceOf[Elem]) + def nth(n: Int) = + if n == 0 then x0.asInstanceOf[Elem] + else throw new IndexOutOfBoundsException(n.toString) } private class Set2[+Elem <: AnyRef](x0: AnyRef, x1: AnyRef) extends SimpleIdentitySet[Elem] { @@ -109,10 +124,10 @@ object SimpleIdentitySet { def /: [A, E >: Elem <: AnyRef](z: A)(f: (A, E) => A): A = f(f(z, x0.asInstanceOf[E]), x1.asInstanceOf[E]) def toList = x0.asInstanceOf[Elem] :: x1.asInstanceOf[Elem] :: Nil - def iterator = Iterator.tabulate(2) { + def nth(n: Int) = n match case 0 => x0.asInstanceOf[Elem] case 1 => x1.asInstanceOf[Elem] - } + case _ => throw new IndexOutOfBoundsException(n.toString) } private class Set3[+Elem <: AnyRef](x0: AnyRef, x1: AnyRef, x2: AnyRef) extends SimpleIdentitySet[Elem] { @@ -149,11 +164,11 @@ object SimpleIdentitySet { def /: [A, E >: Elem <: AnyRef](z: A)(f: (A, E) => A): A = f(f(f(z, x0.asInstanceOf[E]), x1.asInstanceOf[E]), x2.asInstanceOf[E]) def toList = x0.asInstanceOf[Elem] :: x1.asInstanceOf[Elem] :: x2.asInstanceOf[Elem] :: Nil - def iterator = Iterator.tabulate(3) { + def nth(n: Int) = n match case 0 => x0.asInstanceOf[Elem] case 1 => x1.asInstanceOf[Elem] case 2 => x2.asInstanceOf[Elem] - } + case _ => throw new IndexOutOfBoundsException(n.toString) } private class SetN[+Elem <: AnyRef](val xs: Array[AnyRef]) extends SimpleIdentitySet[Elem] { @@ -200,11 +215,13 @@ object SimpleIdentitySet { foreach(buf += _) buf.toList } - def iterator = xs.iterator.asInstanceOf[Iterator[Elem]] + def nth(n: Int) = + if 0 <= n && n < size then xs(n).asInstanceOf[Elem] + else throw new IndexOutOfBoundsException(n.toString) override def ++ [E >: Elem <: AnyRef](that: SimpleIdentitySet[E]): SimpleIdentitySet[E] = that match { case that: SetN[?] => - var toAdd: mutable.ArrayBuffer[AnyRef] = null + var toAdd: mutable.ArrayBuffer[AnyRef] | Null = null var i = 0 val limit = that.xs.length while (i < limit) { @@ -234,7 +251,7 @@ object SimpleIdentitySet { case that: SetN[?] => // both sets are large, optimize assuming they are similar // by starting from empty set and adding elements - var toAdd: mutable.ArrayBuffer[AnyRef] = null + var toAdd: mutable.ArrayBuffer[AnyRef] | Null = null val thisSize = this.size val thatSize = that.size val thatElems = that.xs diff --git a/compiler/src/dotty/tools/dotc/util/SourceFile.scala b/compiler/src/dotty/tools/dotc/util/SourceFile.scala index 1c264b395689..eb99fe99d926 100644 --- a/compiler/src/dotty/tools/dotc/util/SourceFile.scala +++ b/compiler/src/dotty/tools/dotc/util/SourceFile.scala @@ -229,8 +229,7 @@ object SourceFile { * It relies on SourceFile#virtual implementation to create the virtual file. */ def virtual(uri: URI, content: String): SourceFile = - val path = Paths.get(uri).toString - SourceFile.virtual(path, content) + SourceFile(new VirtualFile(Paths.get(uri), content.getBytes(StandardCharsets.UTF_8)), content.toCharArray) /** Returns the relative path of `source` within the `reference` path * diff --git a/compiler/src/dotty/tools/dotc/util/SourcePosition.scala b/compiler/src/dotty/tools/dotc/util/SourcePosition.scala index a7358755043c..e4f56bd85c0b 100644 --- a/compiler/src/dotty/tools/dotc/util/SourcePosition.scala +++ b/compiler/src/dotty/tools/dotc/util/SourcePosition.scala @@ -2,7 +2,6 @@ package dotty.tools package dotc package util -import scala.language.unsafeNulls import printing.{Showable, Printer} import printing.Texts.* @@ -67,7 +66,7 @@ extends SrcPos, interfaces.SourcePosition, Showable { def toSynthetic: SourcePosition = withSpan(span.toSynthetic) def outermost: SourcePosition = - if outer == null || outer == NoSourcePosition then this else outer.outermost + if (outer eq null) || outer == NoSourcePosition then this else outer.outermost /** Inner most position that is contained within the `outermost` position. * Most precise position that comes from the call site. @@ -86,7 +85,7 @@ extends SrcPos, interfaces.SourcePosition, Showable { } /** A sentinel for a non-existing source position */ -@sharable object NoSourcePosition extends SourcePosition(NoSource, NoSpan, null) { +@sharable object NoSourcePosition extends SourcePosition(NoSource, NoSpan) { override def line: Int = -1 override def column: Int = -1 override def toString: String = "?" diff --git a/compiler/src/dotty/tools/dotc/util/Spans.scala b/compiler/src/dotty/tools/dotc/util/Spans.scala index 7d4bbe0e8180..33346ad6da17 100644 --- a/compiler/src/dotty/tools/dotc/util/Spans.scala +++ b/compiler/src/dotty/tools/dotc/util/Spans.scala @@ -42,19 +42,19 @@ object Spans { /** The start of this span. */ def start: Int = { - assert(exists) + assert(exists, "start of NoSpan") (coords & StartEndMask).toInt } /** The end of this span */ def end: Int = { - assert(exists) + assert(exists, "end of NoSpan") ((coords >>> StartEndBits) & StartEndMask).toInt } /** The point of this span, returns start for synthetic spans */ def point: Int = { - assert(exists) + assert(exists, "point of NoSpan") val poff = pointDelta if (poff == SyntheticPointDelta) start else start + poff } diff --git a/compiler/src/dotty/tools/io/AbstractFile.scala b/compiler/src/dotty/tools/io/AbstractFile.scala index ee72297c2a4f..1bc9c051cdc6 100644 --- a/compiler/src/dotty/tools/io/AbstractFile.scala +++ b/compiler/src/dotty/tools/io/AbstractFile.scala @@ -115,7 +115,7 @@ abstract class AbstractFile extends Iterable[AbstractFile] { def container : AbstractFile /** Returns the underlying File if any and null otherwise. */ - def file: JFile = try { + def file: JFile | Null = try { if (jpath == null) null else jpath.toFile } catch { @@ -123,7 +123,7 @@ abstract class AbstractFile extends Iterable[AbstractFile] { } /** Returns the underlying Path if any and null otherwise. */ - def jpath: JPath + def jpath: JPath | Null /** An underlying source, if known. Mostly, a zip/jar file. */ def underlyingSource: Option[AbstractFile] = None @@ -196,12 +196,12 @@ abstract class AbstractFile extends Iterable[AbstractFile] { } /** Returns all abstract subfiles of this abstract directory. */ - def iterator(): Iterator[AbstractFile] + def iterator: Iterator[AbstractFile] /** Drill down through subdirs looking for the target, as in lookupName. * Ths target name is the last of parts. */ - final def lookupPath(parts: Seq[String], directory: Boolean): AbstractFile = + final def lookupPath(parts: Seq[String], directory: Boolean): AbstractFile | Null = var file: AbstractFile = this var i = 0 val n = parts.length - 1 diff --git a/compiler/src/dotty/tools/io/File.scala b/compiler/src/dotty/tools/io/File.scala index 59e4a2ee451b..ea48966de6fb 100644 --- a/compiler/src/dotty/tools/io/File.scala +++ b/compiler/src/dotty/tools/io/File.scala @@ -8,8 +8,6 @@ package dotty.tools.io -import scala.language.unsafeNulls - import java.io.{File => JavaIoFile, _} import java.nio.file.{Files, Paths} import java.nio.file.StandardOpenOption.* diff --git a/compiler/src/dotty/tools/io/FileWriters.scala b/compiler/src/dotty/tools/io/FileWriters.scala index 5fdf43cfe8e3..849b80c5e745 100644 --- a/compiler/src/dotty/tools/io/FileWriters.scala +++ b/compiler/src/dotty/tools/io/FileWriters.scala @@ -66,7 +66,7 @@ object FileWriters { def warning(message: Context ?=> Message): Unit = warning(message, NoSourcePosition) final def exception(reason: Context ?=> Message, throwable: Throwable): Unit = error({ - val trace = throwable.getStackTrace().nn.mkString("\n ") + val trace = throwable.getStackTrace().mkString("\n ") em"An unhandled exception was thrown in the compiler while\n ${reason.message}.\n${throwable}\n $trace" }, NoSourcePosition) } @@ -189,7 +189,7 @@ object FileWriters { def close(): Unit protected def classToRelativePath(className: InternalName): String = - className.replace('.', '/').nn + ".tasty" + className.replace('.', '/') + ".tasty" } object TastyWriter { @@ -234,7 +234,7 @@ object FileWriters { new JarEntryWriter(jarFile, jarManifestMainClass, jarCompressionLevel) } else if (file.isVirtual) new VirtualFileWriter(file) - else if (file.isDirectory) new DirEntryWriter(file.file.toPath.nn) + else if (file.isDirectory) new DirEntryWriter(file.file.toPath) else throw new IllegalStateException(s"don't know how to handle an output of $file [${file.getClass}]") } @@ -248,7 +248,7 @@ object FileWriters { val jarWriter: JarOutputStream = { import scala.util.Properties.* val manifest = new Manifest - val attrs = manifest.getMainAttributes.nn + val attrs = manifest.getMainAttributes attrs.put(MANIFEST_VERSION, "1.0") attrs.put(ScalaCompilerVersion, versionNumberString) mainClass.foreach(c => attrs.put(MAIN_CLASS, c)) diff --git a/compiler/src/dotty/tools/io/Path.scala b/compiler/src/dotty/tools/io/Path.scala index 6f97e03ca4d7..39665395c289 100644 --- a/compiler/src/dotty/tools/io/Path.scala +++ b/compiler/src/dotty/tools/io/Path.scala @@ -5,8 +5,6 @@ package dotty.tools.io -import scala.language.unsafeNulls - import java.io.RandomAccessFile import java.nio.file.* import java.net.{URI, URL} @@ -46,7 +44,7 @@ object Path { def fileName(name: String): String = { val i = name.lastIndexOf('.') if (i < 0) name - else name.substring(0, i).nn + else name.substring(0, i) } def onlyDirs(xs: Iterator[Path]): Iterator[Directory] = xs.filter(_.isDirectory).map(_.toDirectory) diff --git a/compiler/src/dotty/tools/io/PlainFile.scala b/compiler/src/dotty/tools/io/PlainFile.scala index a6a39d9ff3eb..ff637c913cd8 100644 --- a/compiler/src/dotty/tools/io/PlainFile.scala +++ b/compiler/src/dotty/tools/io/PlainFile.scala @@ -14,7 +14,7 @@ import java.nio.file.{InvalidPathException, Paths} /** ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */ class PlainDirectory(givenPath: Directory) extends PlainFile(givenPath) { override val isDirectory: Boolean = true - override def iterator(): Iterator[PlainFile] = givenPath.list.filter(_.exists).map(new PlainFile(_)) + override def iterator: Iterator[PlainFile] = givenPath.list.filter(_.exists).map(new PlainFile(_)) } /** This class implements an abstract file backed by a File. diff --git a/compiler/src/dotty/tools/io/Streamable.scala b/compiler/src/dotty/tools/io/Streamable.scala index 328ce03f7853..fd72854a9127 100644 --- a/compiler/src/dotty/tools/io/Streamable.scala +++ b/compiler/src/dotty/tools/io/Streamable.scala @@ -5,7 +5,6 @@ package dotty.tools.io -import scala.language.unsafeNulls import java.net.URL import java.io.{ BufferedInputStream, InputStream } diff --git a/compiler/src/dotty/tools/io/VirtualDirectory.scala b/compiler/src/dotty/tools/io/VirtualDirectory.scala index 949f2d0e61dd..28c4bc308cae 100644 --- a/compiler/src/dotty/tools/io/VirtualDirectory.scala +++ b/compiler/src/dotty/tools/io/VirtualDirectory.scala @@ -43,7 +43,7 @@ extends AbstractFile { // the toList is so that the directory may continue to be // modified while its elements are iterated - def iterator(): Iterator[AbstractFile] = files.values.toList.iterator + def iterator: Iterator[AbstractFile] = files.values.toList.iterator override def lookupName(name: String, directory: Boolean): AbstractFile = (files get name filter (_.isDirectory == directory)).orNull diff --git a/compiler/src/dotty/tools/io/VirtualFile.scala b/compiler/src/dotty/tools/io/VirtualFile.scala index 6fb9859503f2..93788990d368 100644 --- a/compiler/src/dotty/tools/io/VirtualFile.scala +++ b/compiler/src/dotty/tools/io/VirtualFile.scala @@ -40,15 +40,34 @@ class VirtualFile(val name: String, override val path: String) extends AbstractF this.content = content } + /** + * Initializes this instance with the specified path + * and a name taken from the last path element. + * + * @param path the path of the virtual file to be created + * @param content the initial contents of the virtual file + * @return the created virtual file + */ + def this(path: JPath, content: Array[Byte]) = { + this(path.getFileName().toString(), path.toString()) + this.content = content + this.jpath_ = path + } + private var content = Array.emptyByteArray + private var jpath_ : JPath = null + def absolute: AbstractFile = this - /** Returns null. */ - def jpath: JPath = null + /** Returns path, which might be a non-existing file or null. */ + def jpath: JPath = jpath_ override def sizeOption: Option[Int] = Some(content.length) + /** Always returns true, even if jpath is a non-existing file. */ + override def exists: Boolean = true + def input : InputStream = new ByteArrayInputStream(content) override def output: OutputStream = { diff --git a/compiler/src/dotty/tools/io/ZipArchive.scala b/compiler/src/dotty/tools/io/ZipArchive.scala index a23bde8faaed..1ee79bd8036a 100644 --- a/compiler/src/dotty/tools/io/ZipArchive.scala +++ b/compiler/src/dotty/tools/io/ZipArchive.scala @@ -220,7 +220,7 @@ final class FileZipArchive(jpath: JPath, release: Option[String]) extends ZipArc } final class ManifestResources(val url: URL) extends ZipArchive(null, None) { - def iterator(): Iterator[AbstractFile] = { + def iterator: Iterator[AbstractFile] = { val root = new DirEntry("/", null) val dirs = mutable.HashMap[String, DirEntry]("/" -> root) val stream = input diff --git a/compiler/src/dotty/tools/runner/ScalaClassLoader.scala b/compiler/src/dotty/tools/runner/ScalaClassLoader.scala index a8cd36cba6bd..0cb31189be5f 100644 --- a/compiler/src/dotty/tools/runner/ScalaClassLoader.scala +++ b/compiler/src/dotty/tools/runner/ScalaClassLoader.scala @@ -1,7 +1,6 @@ package dotty.tools package runner -import scala.language.unsafeNulls import java.lang.ClassLoader import java.lang.invoke.{MethodHandles, MethodType} @@ -64,7 +63,7 @@ object ScalaClassLoader { def fromURLsParallelCapable(urls: Seq[URL], parent: ClassLoader | Null = null): URLClassLoader = new URLClassLoader(urls.toArray, if parent == null then bootClassLoader else parent) - @sharable private val bootClassLoader: ClassLoader = + @sharable private val bootClassLoader: ClassLoader | Null = if scala.util.Properties.isJavaAtLeast("9") then try ClassLoader.getSystemClassLoader.getParent diff --git a/compiler/src/scala/quoted/runtime/impl/QuotesImpl.scala b/compiler/src/scala/quoted/runtime/impl/QuotesImpl.scala index 1c420dd93b29..88b73a776e58 100644 --- a/compiler/src/scala/quoted/runtime/impl/QuotesImpl.scala +++ b/compiler/src/scala/quoted/runtime/impl/QuotesImpl.scala @@ -25,6 +25,8 @@ import scala.quoted.runtime.impl.printers.* import scala.reflect.TypeTest import dotty.tools.dotc.core.NameKinds.ExceptionBinderName import dotty.tools.dotc.transform.TreeChecker +import dotty.tools.dotc.core.Names +import dotty.tools.dotc.util.Spans.NoCoord object QuotesImpl { @@ -241,9 +243,35 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler object ClassDef extends ClassDefModule: def apply(cls: Symbol, parents: List[Tree], body: List[Statement]): ClassDef = - val untpdCtr = untpd.DefDef(nme.CONSTRUCTOR, Nil, tpd.TypeTree(dotc.core.Symbols.defn.UnitClass.typeRef), tpd.EmptyTree) + val paramsDefs: List[untpd.ParamClause] = + cls.primaryConstructor.paramSymss.map { paramSym => + if paramSym.headOption.map(_.isType).getOrElse(false) then + paramSym.map(sym => TypeDef(sym)) + else + paramSym.map(ValDef(_, None)) + } + def throwError() = + throw new RuntimeException( + "Symbols necessary for creation of the ClassDef tree could not be found." + ) + val paramsAccessDefs: List[untpd.ParamClause] = + cls.primaryConstructor.paramSymss.map { paramSym => + if paramSym.headOption.map(_.isType).getOrElse(false) then + paramSym.map { symm => + def isParamAccessor(memberSym: Symbol) = memberSym.flags.is(Flags.Param) && memberSym.name == symm.name + TypeDef(cls.typeMembers.find(isParamAccessor).getOrElse(throwError())) + } + else + paramSym.map { symm => + def isParam(memberSym: Symbol) = memberSym.flags.is(Flags.ParamAccessor) && memberSym.name == symm.name + ValDef(cls.fieldMembers.find(isParam).getOrElse(throwError()), None) + } + } + + val termSymbol: dotc.core.Symbols.TermSymbol = cls.primaryConstructor.asTerm + val untpdCtr = untpd.DefDef(nme.CONSTRUCTOR, paramsDefs, tpd.TypeTree(dotc.core.Symbols.defn.UnitClass.typeRef), tpd.EmptyTree) val ctr = ctx.typeAssigner.assignType(untpdCtr, cls.primaryConstructor) - tpd.ClassDefWithParents(cls.asClass, ctr, parents, body) + tpd.ClassDefWithParents(cls.asClass, ctr, parents, paramsAccessDefs.flatten ++ body) def copy(original: Tree)(name: String, constr: DefDef, parents: List[Tree], selfOpt: Option[ValDef], body: List[Statement]): ClassDef = { val dotc.ast.Trees.TypeDef(_, originalImpl: tpd.Template) = original: @unchecked @@ -2501,7 +2529,12 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler end StringConstantTypeTest object StringConstant extends StringConstantModule: - def apply(x: String): StringConstant = dotc.core.Constants.Constant(x) + def apply(x: String): StringConstant = + require(x != null, "value of StringConstant cannot be `null`") + // A `null` constant must be represented as a `NullConstant`, c.f. a + // constant with `tag == NullTag`, which is not a `StringConstant`. + // See issue 23008. + dotc.core.Constants.Constant(x) def unapply(constant: StringConstant): Some[String] = Some(constant.stringValue) end StringConstant @@ -2655,8 +2688,134 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler for sym <- decls(cls) do cls.enter(sym) cls - def newModule(owner: Symbol, name: String, modFlags: Flags, clsFlags: Flags, parents: List[TypeRepr], decls: Symbol => List[Symbol], privateWithin: Symbol): Symbol = - assert(parents.nonEmpty && !parents.head.typeSymbol.is(dotc.core.Flags.Trait), "First parent must be a class") + def newClass( + owner: Symbol, + name: String, + parents: Symbol => List[TypeRepr], + decls: Symbol => List[Symbol], + selfType: Option[TypeRepr], + clsFlags: Flags, + clsPrivateWithin: Symbol, + conParams: List[(String, TypeRepr)] + ): Symbol = + val (conParamNames, conParamTypes) = conParams.unzip + newClass( + owner, + name, + parents, + decls, + selfType, + clsFlags, + clsPrivateWithin, + Nil, + conMethodType = res => MethodType(conParamNames)(_ => conParamTypes, _ => res), + conFlags = Flags.EmptyFlags, + conPrivateWithin = Symbol.noSymbol, + conParamFlags = List(for i <- conParamNames yield Flags.EmptyFlags), + conParamPrivateWithins = List(for i <- conParamNames yield Symbol.noSymbol) + ) + + def newClass( + owner: Symbol, + name: String, + parents: Symbol => List[TypeRepr], + decls: Symbol => List[Symbol], + selfType: Option[TypeRepr], + clsFlags: Flags, + clsPrivateWithin: Symbol, + clsAnnotations: List[Term], + conMethodType: TypeRepr => MethodOrPoly, + conFlags: Flags, + conPrivateWithin: Symbol, + conParamFlags: List[List[Flags]], + conParamPrivateWithins: List[List[Symbol]] + ) = + assert(!clsPrivateWithin.exists || clsPrivateWithin.isType, "clsPrivateWithin must be a type symbol or `Symbol.noSymbol`") + assert(!conPrivateWithin.exists || conPrivateWithin.isType, "consPrivateWithin must be a type symbol or `Symbol.noSymbol`") + checkValidFlags(clsFlags.toTypeFlags, Flags.validClassFlags) + checkValidFlags(conFlags.toTermFlags, Flags.validClassConstructorFlags) + val cls = dotc.core.Symbols.newNormalizedClassSymbol( + owner, + name.toTypeName, + clsFlags, + parents, + selfType.getOrElse(Types.NoType), + clsPrivateWithin, + clsAnnotations, + NoCoord, + compUnitInfo = null + ) + val methodType: MethodOrPoly = conMethodType(cls.typeRef) + def throwShapeException() = throw new Exception("Shapes of conMethodType and conParamFlags differ.") + def checkMethodOrPolyShape(checkedMethodType: TypeRepr, clauseIdx: Int): Unit = + checkedMethodType match + case PolyType(params, _, res) if clauseIdx == 0 => + if (conParamFlags.length < clauseIdx) throwShapeException() + if (conParamFlags(clauseIdx).length != params.length) throwShapeException() + checkMethodOrPolyShape(res, clauseIdx + 1) + case PolyType(_, _, _) => throw new Exception("Clause interleaving not supported for constructors") + case MethodType(params, _, res) => + if (conParamFlags.length <= clauseIdx) throwShapeException() + if (conParamFlags(clauseIdx).length != params.length) throwShapeException() + checkMethodOrPolyShape(res, clauseIdx + 1) + case other => + xCheckMacroAssert( + other.typeSymbol == cls, + "Incorrect type returned from the innermost PolyOrMethod." + ) + (other, methodType) match + case (AppliedType(tycon, args), pt: PolyType) => + xCheckMacroAssert( + args.length == pt.typeParams.length && + args.zip(pt.typeParams).forall { + case (arg, param) => arg == param.paramRef + }, + "Constructor result type does not correspond to the declared type parameters" + ) + case _ => + xCheckMacroAssert( + !(other.isInstanceOf[AppliedType] || methodType.isInstanceOf[PolyType]), + "AppliedType has to be the innermost resultTypeExp result if and only if conMethodType returns a PolyType" + ) + checkMethodOrPolyShape(methodType, clauseIdx = 0) + + cls.enter(dotc.core.Symbols.newSymbol(cls, nme.CONSTRUCTOR, Flags.Synthetic | Flags.Method | conFlags, methodType, conPrivateWithin, dotty.tools.dotc.util.Spans.NoCoord)) + + case class ParamSymbolData(name: String, tpe: TypeRepr, isTypeParam: Boolean, clauseIdx: Int, elementIdx: Int) + def getParamSymbolsData(methodType: TypeRepr, clauseIdx: Int): List[ParamSymbolData] = + methodType match + case MethodType(paramInfosExp, resultTypeExp, res) => + paramInfosExp.zip(resultTypeExp).zipWithIndex.map { case ((name, tpe), elementIdx) => + ParamSymbolData(name, tpe, isTypeParam = false, clauseIdx, elementIdx) + } ++ getParamSymbolsData(res, clauseIdx + 1) + case pt @ PolyType(paramNames, paramBounds, res) => + paramNames.zip(paramBounds).zipWithIndex.map {case ((name, tpe), elementIdx) => + ParamSymbolData(name, tpe, isTypeParam = true, clauseIdx, elementIdx) + } ++ getParamSymbolsData(res, clauseIdx + 1) + case result => + List() + // Maps PolyType indexes to type parameter symbol typerefs + val paramRefMap = collection.mutable.HashMap[Int, Symbol]() + val paramRefRemapper = new Types.TypeMap { + def apply(tp: Types.Type) = tp match { + case pRef: ParamRef if pRef.binder == methodType => paramRefMap(pRef.paramNum).typeRef + case _ => mapOver(tp) + } + } + for case ParamSymbolData(name, tpe, isTypeParam, clauseIdx, elementIdx) <- getParamSymbolsData(methodType, 0) do + if isTypeParam then + checkValidFlags(conParamFlags(clauseIdx)(elementIdx).toTypeFlags, Flags.validClassTypeParamFlags) + val symbol = dotc.core.Symbols.newSymbol(cls, name.toTypeName, Flags.Param | Flags.Deferred | Flags.Private | Flags.PrivateLocal | Flags.Local | conParamFlags(clauseIdx)(elementIdx), tpe, conParamPrivateWithins(clauseIdx)(elementIdx)) + paramRefMap.addOne(elementIdx, symbol) + cls.enter(symbol) + else + checkValidFlags(conParamFlags(clauseIdx)(elementIdx).toTermFlags, Flags.validClassTermParamFlags) + val fixedType = paramRefRemapper(tpe) + cls.enter(dotc.core.Symbols.newSymbol(cls, name.toTermName, Flags.ParamAccessor | conParamFlags(clauseIdx)(elementIdx), fixedType, conParamPrivateWithins(clauseIdx)(elementIdx))) + for sym <- decls(cls) do cls.enter(sym) + cls + + def newModule(owner: Symbol, name: String, modFlags: Flags, clsFlags: Flags, parents: Symbol => List[TypeRepr], decls: Symbol => List[Symbol], privateWithin: Symbol): Symbol = assert(!privateWithin.exists || privateWithin.isType, "privateWithin must be a type symbol or `Symbol.noSymbol`") val mod = dotc.core.Symbols.newNormalizedModuleSymbol( owner, @@ -2665,7 +2824,10 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler clsFlags | dotc.core.Flags.ModuleClassCreationFlags, parents, dotc.core.Scopes.newScope, - privateWithin) + privateWithin, + NoCoord, + compUnitInfo = null + ) val cls = mod.moduleClass.asClass cls.enter(dotc.core.Symbols.newConstructor(cls, dotc.core.Flags.Synthetic, Nil, Nil)) for sym <- decls(cls) do cls.enter(sym) @@ -3021,6 +3183,7 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler def Implicit: Flags = dotc.core.Flags.Implicit def Infix: Flags = dotc.core.Flags.Infix def Inline: Flags = dotc.core.Flags.Inline + def Into: Flags = dotc.core.Flags.Into def Invisible: Flags = dotc.core.Flags.Invisible def JavaDefined: Flags = dotc.core.Flags.JavaDefined def JavaStatic: Flags = dotc.core.Flags.JavaStatic @@ -3046,6 +3209,7 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler def StableRealizable: Flags = dotc.core.Flags.StableRealizable @deprecated("Use JavaStatic instead", "3.3.0") def Static: Flags = dotc.core.Flags.JavaStatic def Synthetic: Flags = dotc.core.Flags.Synthetic + def Tracked: Flags = dotc.core.Flags.Tracked def Trait: Flags = dotc.core.Flags.Trait def Transparent: Flags = dotc.core.Flags.Transparent @@ -3063,6 +3227,18 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler // Keep: aligned with Quotes's `newTypeAlias` doc private[QuotesImpl] def validTypeAliasFlags: Flags = Private | Protected | Override | Final | Infix | Local + // Keep: aligned with Quotes's `newClass` + private[QuotesImpl] def validClassFlags: Flags = Private | Protected | PrivateLocal | Local | Final | Trait | Abstract | Open + + // Keep: aligned with Quote's 'newClass' + private[QuotesImpl] def validClassConstructorFlags: Flags = Synthetic | Method | Private | Protected | PrivateLocal | Local + + // Keep: aligned with Quotes's `newClass` + private[QuotesImpl] def validClassTypeParamFlags: Flags = Param | Deferred | Private | PrivateLocal | Local + + // Keep: aligned with Quotes's `newClass` + private[QuotesImpl] def validClassTermParamFlags: Flags = ParamAccessor | Private | Protected | PrivateLocal | Local + end Flags given FlagsMethods: FlagsMethods with diff --git a/compiler/src/scala/quoted/runtime/impl/printers/Extractors.scala b/compiler/src/scala/quoted/runtime/impl/printers/Extractors.scala index 82be54a9d793..b43b6e23e8ca 100644 --- a/compiler/src/scala/quoted/runtime/impl/printers/Extractors.scala +++ b/compiler/src/scala/quoted/runtime/impl/printers/Extractors.scala @@ -1,6 +1,7 @@ package scala.quoted package runtime.impl.printers +import scala.collection.mutable import scala.quoted.* object Extractors { @@ -67,6 +68,12 @@ object Extractors { import quotes.reflect.* private val sb: StringBuilder = new StringBuilder + private var recTypeCounter = 0 + private val recTypeIds = mutable.Map.empty[RecursiveType, Int] + + private def nextRecTypeId(): Int = + recTypeCounter += 1 + recTypeCounter def result(): String = sb.result() @@ -226,9 +233,14 @@ object Extractors { case SuperType(thistpe, supertpe) => this += "SuperType(" += thistpe += ", " += supertpe += ")" case RecursiveThis(binder) => - this += "RecursiveThis(" += binder += ")" - case RecursiveType(underlying) => - this += "RecursiveType(" += underlying += ")" + val id = recTypeIds.getOrElse(binder, -1) + if (id == -1) + this += "RecursiveThis(" += binder += ")" + else + this += "RecursiveThis( + val id = recTypeIds.getOrElseUpdate(rt, nextRecTypeId()) + this += "RecursiveType(rec" += id += " => " += underlying += ")" case MethodType(argNames, argTypes, resType) => this += "MethodType(" ++= argNames += ", " ++= argTypes += ", " += resType += ")" case PolyType(argNames, argBounds, resType) => diff --git a/compiler/src/scala/quoted/runtime/impl/printers/SourceCode.scala b/compiler/src/scala/quoted/runtime/impl/printers/SourceCode.scala index 64a0ff9db9ec..6e0d65bc044e 100644 --- a/compiler/src/scala/quoted/runtime/impl/printers/SourceCode.scala +++ b/compiler/src/scala/quoted/runtime/impl/printers/SourceCode.scala @@ -1379,13 +1379,13 @@ object SourceCode { printTypeTree(bounds.low) else bounds.low match { - case Inferred() => + case Inferred() if bounds.low.tpe.typeSymbol == TypeRepr.of[Nothing].typeSymbol => case low => this += " >: " printTypeTree(low) } bounds.hi match { - case Inferred() => this + case Inferred() if bounds.hi.tpe.typeSymbol == TypeRepr.of[Any].typeSymbol => this case hi => this += " <: " printTypeTree(hi) @@ -1454,7 +1454,7 @@ object SourceCode { case '"' => "\\\"" case '\'' => "\\\'" case '\\' => "\\\\" - case _ => if ch.isControl then f"${"\\"}u${ch.toInt}%04x" else String.valueOf(ch).nn + case _ => if ch.isControl then f"${"\\"}u${ch.toInt}%04x" else String.valueOf(ch) } private def escapedString(str: String): String = str flatMap escapedChar @@ -1470,7 +1470,7 @@ object SourceCode { namesIndex(name0) = index + 1 val name = if index == 1 then name0 - else s"`$name0${index.toString.toCharArray.nn.map {x => (x - '0' + '₀').toChar}.mkString}`" + else s"`$name0${index.toString.toCharArray.map {x => (x - '0' + '₀').toChar}.mkString}`" names(sym) = name Some(name) } diff --git a/compiler/test-coursier/dotty/tools/coursier/CoursierScalaTests.scala b/compiler/test-coursier/dotty/tools/coursier/CoursierScalaTests.scala index 86b22009d15a..f44774141947 100644 --- a/compiler/test-coursier/dotty/tools/coursier/CoursierScalaTests.scala +++ b/compiler/test-coursier/dotty/tools/coursier/CoursierScalaTests.scala @@ -159,7 +159,7 @@ object CoursierScalaTests: def csScalaCmd(options: String*): List[String] = csScalaCmdWithStdin(options, None) - + def csScalaCmdWithStdin(options: Seq[String], stdin: Option[String]): List[String] = csCmd("dotty.tools.MainGenericRunner", options, stdin) diff --git a/compiler/test-resources/repl/i13181 b/compiler/test-resources/repl/i13181 index 32f6c6e40c1e..c2c48f9d4214 100644 --- a/compiler/test-resources/repl/i13181 +++ b/compiler/test-resources/repl/i13181 @@ -1,2 +1,2 @@ scala> scala.compiletime.codeOf(1+2) -val res0: String = 1.+(2) +val res0: String = 1 + 2 diff --git a/compiler/test/dotc/neg-best-effort-pickling.excludelist b/compiler/test/dotc/neg-best-effort-pickling.excludelist index 13fd5669dd8a..52371ecf17a5 100644 --- a/compiler/test/dotc/neg-best-effort-pickling.excludelist +++ b/compiler/test/dotc/neg-best-effort-pickling.excludelist @@ -18,6 +18,7 @@ i20317a.scala # recursion limit exceeded i11226.scala # missing type i974.scala # cyclic reference i13864.scala # missing symbol in pickling +type-params.scala # recursion limit exceeded # semantic db generation fails in the first compilation i15158.scala # cyclic reference - stack overflow diff --git a/compiler/test/dotc/neg-best-effort-unpickling.excludelist b/compiler/test/dotc/neg-best-effort-unpickling.excludelist index d57f7e0176e8..9c20bf3ccc03 100644 --- a/compiler/test/dotc/neg-best-effort-unpickling.excludelist +++ b/compiler/test/dotc/neg-best-effort-unpickling.excludelist @@ -18,3 +18,6 @@ i18750.scala # Crash on invalid prefix ([A] =>> Int) i22357a.scala + +# `110 (of class java.lang.Integer)` +context-function-syntax.scala diff --git a/compiler/test/dotc/neg-init-global-scala2-library-tasty.excludelist b/compiler/test/dotc/neg-init-global-scala2-library-tasty.excludelist index 18a665e0119b..8c2a1d6ce5f4 100644 --- a/compiler/test/dotc/neg-init-global-scala2-library-tasty.excludelist +++ b/compiler/test/dotc/neg-init-global-scala2-library-tasty.excludelist @@ -15,4 +15,6 @@ global-list.scala t5366.scala mutable-read7.scala t9115.scala -Color.scala \ No newline at end of file +Color.scala +unapplySeq-implicit-arg2.scala +unapplySeq-implicit-arg3.scala \ No newline at end of file diff --git a/compiler/test/dotc/pos-test-pickling.excludelist b/compiler/test/dotc/pos-test-pickling.excludelist index 23c79affada0..28bce963bfd1 100644 --- a/compiler/test/dotc/pos-test-pickling.excludelist +++ b/compiler/test/dotc/pos-test-pickling.excludelist @@ -71,6 +71,7 @@ i18211.scala named-tuples1.scala i20897.scala i20512.scala +i22645b.scala # Opaque type i5720.scala diff --git a/compiler/test/dotc/run-test-pickling.excludelist b/compiler/test/dotc/run-test-pickling.excludelist index c880a4b78f23..3cc9bc5a4a9e 100644 --- a/compiler/test/dotc/run-test-pickling.excludelist +++ b/compiler/test/dotc/run-test-pickling.excludelist @@ -49,4 +49,7 @@ named-tuples-strawman-2.scala # typecheckErrors method unpickling typeCheckErrors.scala i18150.scala +i22968.scala +# Pickling differences with local parameters export forwarders of methods with into parameters. But their external type is the same +Parser.scala diff --git a/compiler/test/dotty/Properties.scala b/compiler/test/dotty/Properties.scala index 86e0788a3b8f..d937fff6242d 100644 --- a/compiler/test/dotty/Properties.scala +++ b/compiler/test/dotty/Properties.scala @@ -19,7 +19,6 @@ object Properties { /** Are we running on the CI? */ val isRunByCI: Boolean = sys.env.isDefinedAt("DOTTY_CI_RUN") - || sys.env.isDefinedAt("DRONE") // TODO remove this when we drop Drone val testCache: Path = sys.env.get("DOTTY_TEST_CACHE").map(Paths.get(_)).getOrElse { diff --git a/compiler/test/dotty/tools/backend/jvm/StringConcatTest.scala b/compiler/test/dotty/tools/backend/jvm/StringConcatTest.scala index 14352106c5e8..613e72b32e52 100644 --- a/compiler/test/dotty/tools/backend/jvm/StringConcatTest.scala +++ b/compiler/test/dotty/tools/backend/jvm/StringConcatTest.scala @@ -122,7 +122,7 @@ class StringConcatTest extends DottyBytecodeTest { chsq: java.lang.CharSequence, chrs: Array[Char]) = { val s1 = str + obj + v + z + c + b + s + i + f + l + d + sbuf + chsq + chrs - val s2 = String.valueOf(obj).nn + str + v + z + c + b + s + i + f + l + d + sbuf + chsq + chrs + val s2 = String.valueOf(obj) + str + v + z + c + b + s + i + f + l + d + sbuf + chsq + chrs s1 + "//" + s2 } def sbuf = { val r = new java.lang.StringBuffer(); r.append("sbuf"); r } diff --git a/compiler/test/dotty/tools/debug/DebugTests.scala b/compiler/test/dotty/tools/debug/DebugTests.scala index 95bf5a2e52a6..e8f744286ba4 100644 --- a/compiler/test/dotty/tools/debug/DebugTests.scala +++ b/compiler/test/dotty/tools/debug/DebugTests.scala @@ -18,7 +18,8 @@ class DebugTests: implicit val testGroup: TestGroup = TestGroup("debug") CompilationTest.aggregateTests( compileFile("tests/debug-custom-args/eval-explicit-nulls.scala", TestConfiguration.explicitNullsOptions), - compileFilesInDir("tests/debug", TestConfiguration.defaultOptions) + compileFilesInDir("tests/debug", TestConfiguration.defaultOptions), + compileFilesInDir("tests/debug-preview", TestConfiguration.defaultOptions.and("-preview")) ).checkDebug() object DebugTests extends ParallelTesting: diff --git a/compiler/test/dotty/tools/dotc/CompilationTests.scala b/compiler/test/dotty/tools/dotc/CompilationTests.scala index e62c80d7bff7..2d2f01388374 100644 --- a/compiler/test/dotty/tools/dotc/CompilationTests.scala +++ b/compiler/test/dotty/tools/dotc/CompilationTests.scala @@ -37,7 +37,7 @@ class CompilationTests { compileFilesInDir("tests/pos-special/sourcepath/outer", defaultOptions.and("-sourcepath", "tests/pos-special/sourcepath")), compileFile("tests/pos-special/sourcepath/outer/nested/Test4.scala", defaultOptions.and("-sourcepath", "tests/pos-special/sourcepath")), compileFilesInDir("tests/pos-scala2", defaultOptions.and("-source", "3.0-migration")), - compileFilesInDir("tests/pos-custom-args/captures", defaultOptions.and("-language:experimental.captureChecking")), + compileFilesInDir("tests/pos-custom-args/captures", defaultOptions.and("-language:experimental.captureChecking", "-source", "3.8")), compileFile("tests/pos-special/utf8encoded.scala", defaultOptions.and("-encoding", "UTF8")), compileFile("tests/pos-special/utf16encoded.scala", defaultOptions.and("-encoding", "UTF16")), compileDir("tests/pos-special/i18589", defaultOptions.and("-Wsafe-init").without("-Ycheck:all")), @@ -83,7 +83,9 @@ class CompilationTests { compileFile("tests/rewrites/ambiguous-named-tuple-assignment.scala", defaultOptions.and("-rewrite", "-source:3.6-migration")), compileFile("tests/rewrites/i21382.scala", defaultOptions.and("-indent", "-rewrite")), compileFile("tests/rewrites/unused.scala", defaultOptions.and("-rewrite", "-Wunused:all")), - compileFile("tests/rewrites/i22440.scala", defaultOptions.and("-rewrite")) + compileFile("tests/rewrites/i22440.scala", defaultOptions.and("-rewrite")), + compileFile("tests/rewrites/i22731.scala", defaultOptions.and("-rewrite", "-source:3.7-migration")), + compileFile("tests/rewrites/i22731b.scala", defaultOptions.and("-rewrite", "-source:3.7-migration")), ).checkRewrites() } @@ -144,7 +146,7 @@ class CompilationTests { aggregateTests( compileFilesInDir("tests/neg", defaultOptions, FileFilter.exclude(TestSources.negScala2LibraryTastyExcludelisted)), compileFilesInDir("tests/neg-deep-subtype", allowDeepSubtypes), - compileFilesInDir("tests/neg-custom-args/captures", defaultOptions.and("-language:experimental.captureChecking")), + compileFilesInDir("tests/neg-custom-args/captures", defaultOptions.and("-language:experimental.captureChecking", "-source", "3.8")), compileFile("tests/neg-custom-args/sourcepath/outer/nested/Test1.scala", defaultOptions.and("-sourcepath", "tests/neg-custom-args/sourcepath")), compileDir("tests/neg-custom-args/sourcepath2/hi", defaultOptions.and("-sourcepath", "tests/neg-custom-args/sourcepath2", "-Xfatal-warnings")), compileList("duplicate source", List( @@ -167,7 +169,7 @@ class CompilationTests { aggregateTests( compileFilesInDir("tests/run", defaultOptions.and("-Wsafe-init")), compileFilesInDir("tests/run-deep-subtype", allowDeepSubtypes), - compileFilesInDir("tests/run-custom-args/captures", allowDeepSubtypes.and("-language:experimental.captureChecking")), + compileFilesInDir("tests/run-custom-args/captures", allowDeepSubtypes.and("-language:experimental.captureChecking", "-source", "3.8")), // Run tests for legacy lazy vals. compileFilesInDir("tests/run", defaultOptions.and("-Wsafe-init", "-Ylegacy-lazy-vals", "-Ycheck-constraint-deps"), FileFilter.include(TestSources.runLazyValsAllowlist)), ).checkRuns() @@ -273,20 +275,45 @@ class CompilationTests { * compatible, but (b) and (c) are not. If (b) and (c) are compiled together, there should be * an error when reading the files' TASTy trees. */ locally { - val tastyErrorGroup = TestGroup("checkInit/tasty-error") + val tastyErrorGroup = TestGroup("checkInit/tasty-error/val-or-defdef") val tastyErrorOptions = options.without("-Xfatal-warnings") - val a0Dir = defaultOutputDir + tastyErrorGroup + "/A/v0/A" - val a1Dir = defaultOutputDir + tastyErrorGroup + "/A/v1/A" - val b1Dir = defaultOutputDir + tastyErrorGroup + "/B/v1/B" + val classA0 = defaultOutputDir + tastyErrorGroup + "/A/v0/A" + val classA1 = defaultOutputDir + tastyErrorGroup + "/A/v1/A" + val classB1 = defaultOutputDir + tastyErrorGroup + "/B/v1/B" val tests = List( - compileFile("tests/init/tasty-error/v1/A.scala", tastyErrorOptions)(tastyErrorGroup), - compileFile("tests/init/tasty-error/v1/B.scala", tastyErrorOptions.withClasspath(a1Dir))(tastyErrorGroup), - compileFile("tests/init/tasty-error/v0/A.scala", tastyErrorOptions)(tastyErrorGroup), + compileFile("tests/init/tasty-error/val-or-defdef/v1/A.scala", tastyErrorOptions)(tastyErrorGroup), + compileFile("tests/init/tasty-error/val-or-defdef/v1/B.scala", tastyErrorOptions.withClasspath(classA1))(tastyErrorGroup), + compileFile("tests/init/tasty-error/val-or-defdef/v0/A.scala", tastyErrorOptions)(tastyErrorGroup), ).map(_.keepOutput.checkCompile()) - compileFile("tests/init/tasty-error/Main.scala", tastyErrorOptions.withClasspath(a0Dir).withClasspath(b1Dir))(tastyErrorGroup).checkExpectedErrors() + compileFile("tests/init/tasty-error/val-or-defdef/Main.scala", tastyErrorOptions.withClasspath(classA0).withClasspath(classB1))(tastyErrorGroup).checkExpectedErrors() + + tests.foreach(_.delete()) + } + + /* This tests for errors in the program's TASTy trees. + * The test consists of five files: Main, C, v1/A, v1/B, and v0/A. The files v1/A, v1/B, and v0/A all depend on C. v1/A and v1/B are + * compatible, but v1/B and v0/A are not. If v1/B and v0/A are compiled together, there should be + * an error when reading the files' TASTy trees. This fact is demonstrated by the compilation of Main. */ + locally { + val tastyErrorGroup = TestGroup("checkInit/tasty-error/typedef") + val tastyErrorOptions = options.without("-Xfatal-warnings").without("-Ycheck:all") + + val classC = defaultOutputDir + tastyErrorGroup + "/C/typedef/C" + val classA0 = defaultOutputDir + tastyErrorGroup + "/A/v0/A" + val classA1 = defaultOutputDir + tastyErrorGroup + "/A/v1/A" + val classB1 = defaultOutputDir + tastyErrorGroup + "/B/v1/B" + + val tests = List( + compileFile("tests/init/tasty-error/typedef/C.scala", tastyErrorOptions)(tastyErrorGroup), + compileFile("tests/init/tasty-error/typedef/v1/A.scala", tastyErrorOptions.withClasspath(classC))(tastyErrorGroup), + compileFile("tests/init/tasty-error/typedef/v1/B.scala", tastyErrorOptions.withClasspath(classC).withClasspath(classA1))(tastyErrorGroup), + compileFile("tests/init/tasty-error/typedef/v0/A.scala", tastyErrorOptions.withClasspath(classC))(tastyErrorGroup), + ).map(_.keepOutput.checkCompile()) + + compileFile("tests/init/tasty-error/typedef/Main.scala", tastyErrorOptions.withClasspath(classC).withClasspath(classA0).withClasspath(classB1))(tastyErrorGroup).checkExpectedErrors() tests.foreach(_.delete()) } diff --git a/compiler/test/dotty/tools/dotc/SettingsTests.scala b/compiler/test/dotty/tools/dotc/SettingsTests.scala index 996ab22f67b1..24549ade4d23 100644 --- a/compiler/test/dotty/tools/dotc/SettingsTests.scala +++ b/compiler/test/dotty/tools/dotc/SettingsTests.scala @@ -1,8 +1,6 @@ package dotty.tools package dotc -import scala.language.unsafeNulls - import reporting.StoreReporter import vulpix.TestConfiguration @@ -205,7 +203,7 @@ class SettingsTests { } @Test def `Output setting is overriding existing jar`: Unit = - val result = Using.resource(Files.createTempFile("myfile", ".jar").nn){ file => + val result = Using.resource(Files.createTempFile("myfile", ".jar")){ file => object Settings extends SettingGroup: val defaultDir = new PlainDirectory(Directory(".")) val testOutput = OutputSetting(RootSetting, "testOutput", "testOutput", "", defaultDir) @@ -224,7 +222,7 @@ class SettingsTests { @Test def `Output setting is respecting previous setting`: Unit = val result = Using.resources( - Files.createTempFile("myfile", ".jar").nn, Files.createTempFile("myfile2", ".jar").nn + Files.createTempFile("myfile", ".jar"), Files.createTempFile("myfile2", ".jar") ){ (file1, file2) => object Settings extends SettingGroup: val defaultDir = new PlainDirectory(Directory(".")) @@ -247,10 +245,10 @@ class SettingsTests { assertNotEquals(file1StateBefore, String(Files.readAllBytes(file1))) assertEquals(file2StateBefore, String(Files.readAllBytes(file2))) - }(Files.deleteIfExists(_), Files.deleteIfExists(_)) + }(using Files.deleteIfExists(_), Files.deleteIfExists(_)) @Test def `Output side effect is not present when setting is deprecated`: Unit = - val result = Using.resource(Files.createTempFile("myfile", ".jar").nn){ file => + val result = Using.resource(Files.createTempFile("myfile", ".jar")){ file => object Settings extends SettingGroup: val defaultDir = new PlainDirectory(Directory(".")) val testOutput = OutputSetting(RootSetting, "testOutput", "testOutput", "", defaultDir, preferPrevious = true, deprecation = Deprecation.renamed("XtestOutput")) diff --git a/compiler/test/dotty/tools/dotc/TupleShowTests.scala b/compiler/test/dotty/tools/dotc/TupleShowTests.scala index 2d76c480b001..88e0587d7d71 100644 --- a/compiler/test/dotty/tools/dotc/TupleShowTests.scala +++ b/compiler/test/dotty/tools/dotc/TupleShowTests.scala @@ -71,7 +71,7 @@ class TupleShowTests extends DottyTest: /** On Windows the string literal in this test source file will be read with `\n` (b/c of "-encoding UTF8") * but the compiler will correctly emit \r\n as the line separator. * So we align the expected result to faithfully compare test results. */ - extension (str: String) def normEOL = if EOL == "\n" then str else str.replace("\n", EOL).nn + extension (str: String) def normEOL = if EOL == "\n" then str else str.replace("\n", EOL) def diff(exp: String, obt: String) = val min = math.min(exp.length, obt.length) diff --git a/compiler/test/dotty/tools/dotc/config/ScalaSettingsTests.scala b/compiler/test/dotty/tools/dotc/config/ScalaSettingsTests.scala index 07834684d33b..c74be4901137 100644 --- a/compiler/test/dotty/tools/dotc/config/ScalaSettingsTests.scala +++ b/compiler/test/dotty/tools/dotc/config/ScalaSettingsTests.scala @@ -299,4 +299,11 @@ class ScalaSettingsTests: ) assertEquals(result, Right(reporting.Action.Error)) + @Test def `illegal source versions are not accepted when parsing the settings`: Unit = + for source <- SourceVersion.illegalInSettings do + val settings = ScalaSettings + val result = settings.processArguments(List("-source", source.toString()), true) + assertEquals(0, result.warnings.length) + assertEquals(1, result.errors.length) + end ScalaSettingsTests diff --git a/compiler/test/dotty/tools/dotc/core/NameOpsTest.scala b/compiler/test/dotty/tools/dotc/core/NameOpsTest.scala new file mode 100644 index 000000000000..947ca482fd6f --- /dev/null +++ b/compiler/test/dotty/tools/dotc/core/NameOpsTest.scala @@ -0,0 +1,15 @@ +package dotty.tools.dotc.core + +import dotty.tools.dotc.core.NameOps.isOperatorName +import dotty.tools.dotc.core.Names.{termName, SimpleName} + +import org.junit.Test + +class NameOpsTest: + @Test def isOperatorNamePos: Unit = + for name <- List("+", "::", "frozen_=:=", "$_+", "a2_+", "a_b_+") do + assert(isOperatorName(termName(name))) + + @Test def isOperatorNameNeg: Unit = + for name <- List("foo", "*_*", "", "$reserved", "a*", "2*") do + assert(!isOperatorName(termName(name))) diff --git a/compiler/test/dotty/tools/dotc/core/tasty/PathPicklingTest.scala b/compiler/test/dotty/tools/dotc/core/tasty/PathPicklingTest.scala index 326a2dc87b2a..12d8425a32d6 100644 --- a/compiler/test/dotty/tools/dotc/core/tasty/PathPicklingTest.scala +++ b/compiler/test/dotty/tools/dotc/core/tasty/PathPicklingTest.scala @@ -47,7 +47,7 @@ class PathPicklingTest { val sb = new StringBuffer val jar = JarArchive.open(Path(s"$out/out.jar"), create = false) try - for file <- jar.iterator() if file.name.endsWith(".tasty") do + for file <- jar.iterator if file.name.endsWith(".tasty") do sb.append(TastyPrinter.showContents(file.toByteArray, noColor = true, isBestEffortTasty = false)) finally jar.close() sb.toString() diff --git a/compiler/test/dotty/tools/dotc/core/tasty/TastyHeaderUnpicklerTest.scala b/compiler/test/dotty/tools/dotc/core/tasty/TastyHeaderUnpicklerTest.scala index bde4246ef0f0..ddc88803854f 100644 --- a/compiler/test/dotty/tools/dotc/core/tasty/TastyHeaderUnpicklerTest.scala +++ b/compiler/test/dotty/tools/dotc/core/tasty/TastyHeaderUnpicklerTest.scala @@ -265,7 +265,7 @@ class TastyHeaderUnpicklerTest { object TastyHeaderUnpicklerTest { def fillHeader(maj: Int, min: Int, exp: Int, compiler: String): TastyBuffer = { - val compilerBytes = compiler.getBytes(java.nio.charset.StandardCharsets.UTF_8).nn + val compilerBytes = compiler.getBytes(java.nio.charset.StandardCharsets.UTF_8) val buf = new TastyBuffer(header.length + 32 + compilerBytes.length) for (ch <- header) buf.writeByte(ch.toByte) buf.writeNat(maj) @@ -304,7 +304,7 @@ object TastyHeaderUnpicklerTest { fail() } catch { - case err: UnpickleException => assert(err.getMessage.nn.contains(message)) + case err: UnpickleException => assert(err.getMessage.contains(message)) } } diff --git a/compiler/test/dotty/tools/dotc/reporting/CodeActionTest.scala b/compiler/test/dotty/tools/dotc/reporting/CodeActionTest.scala index 870da08dcfba..ffc6762cc8c7 100644 --- a/compiler/test/dotty/tools/dotc/reporting/CodeActionTest.scala +++ b/compiler/test/dotty/tools/dotc/reporting/CodeActionTest.scala @@ -54,6 +54,19 @@ class CodeActionTest extends DottyTest: |""".stripMargin ) + @Test def addUsingClause = + checkCodeAction( + """|object Test: + | def foo(implicit a: Int) = a + | foo(123) + |""".stripMargin, + "Add `using` clause", + """|object Test: + | def foo(implicit a: Int) = a + | foo(using 123) + |""".stripMargin + ) + @Test def insertMissingCases = checkCodeAction( code = diff --git a/compiler/test/dotty/tools/dotc/transform/PatmatExhaustivityTest.scala b/compiler/test/dotty/tools/dotc/transform/PatmatExhaustivityTest.scala index 7c6a27ea7422..c0a609b226a8 100644 --- a/compiler/test/dotty/tools/dotc/transform/PatmatExhaustivityTest.scala +++ b/compiler/test/dotty/tools/dotc/transform/PatmatExhaustivityTest.scala @@ -35,7 +35,7 @@ class PatmatExhaustivityTest { e.printStackTrace(printWriter) } - stringBuffer.toString.trim.nn.replaceAll("\\s+\n", "\n") match { + stringBuffer.toString.trim.replaceAll("\\s+\n", "\n") match { case "" => Nil case s => s.linesIterator.toSeq } diff --git a/compiler/test/dotty/tools/repl/ReplCompilerTests.scala b/compiler/test/dotty/tools/repl/ReplCompilerTests.scala index d32b28647c32..0592cbbed1be 100644 --- a/compiler/test/dotty/tools/repl/ReplCompilerTests.scala +++ b/compiler/test/dotty/tools/repl/ReplCompilerTests.scala @@ -511,6 +511,16 @@ class ReplCompilerTests extends ReplTest: val all = lines() assertTrue(hints.forall(hint => all.exists(_.contains(hint)))) + @Test def `i22844 regression colon eol`: Unit = initially: + run: + """|println: + | "hello, world" + |""".stripMargin // outdent, but this test does not exercise the bug + assertEquals(List("hello, world"), lines()) + + @Test def `i22844b regression colon arrow eol`: Unit = contextually: + assertTrue(ParseResult.isIncomplete("List(42).map: x =>")) + object ReplCompilerTests: private val pattern = Pattern.compile("\\r[\\n]?|\\n"); diff --git a/compiler/test/dotty/tools/utils.scala b/compiler/test/dotty/tools/utils.scala index c33310acf06e..14981e001d38 100644 --- a/compiler/test/dotty/tools/utils.scala +++ b/compiler/test/dotty/tools/utils.scala @@ -41,7 +41,7 @@ extension (str: String) def dropExtension = private def withFile[T](file: File)(action: Source => T): T = resource(Source.fromFile(file, UTF_8.name))(action) -def readLines(f: File): List[String] = withFile(f)(_.getLines.toList) +def readLines(f: File): List[String] = withFile(f)(_.getLines().toList) def readFile(f: File): String = withFile(f)(_.mkString) private object Unthrown extends ControlThrowable @@ -124,6 +124,7 @@ private val toolArg = raw"(?://|/\*| \*) ?(?i:(${ToolName.values.mkString("|")}) private val directiveOptionsArg = raw"//> using options (.*)".r.unanchored private val directiveJavacOptions = raw"//> using javacOpt (.*)".r.unanchored private val directiveTargetOptions = raw"//> using target.platform (jvm|scala-js)".r.unanchored +private val directiveUnsupported = raw"//> using (scala) (.*)".r.unanchored private val directiveUnknown = raw"//> using (.*)".r.unanchored // Inspect the lines for compiler options of the form @@ -141,6 +142,7 @@ def toolArgsParse(lines: List[String], filename: Option[String]): List[(String,S case directiveOptionsArg(args) => List(("scalac", args)) case directiveJavacOptions(args) => List(("javac", args)) case directiveTargetOptions(platform) => List(("target", platform)) + case directiveUnsupported(name, args) => Nil case directiveUnknown(rest) => sys.error(s"Unknown directive: `//> using ${CommandLineParser.tokenize(rest).headOption.getOrElse("''")}`${filename.fold("")(f => s" in file $f")}") case _ => Nil } diff --git a/compiler/test/dotty/tools/vulpix/ParallelTesting.scala b/compiler/test/dotty/tools/vulpix/ParallelTesting.scala index 12a53a19931d..35fbb6e5fb14 100644 --- a/compiler/test/dotty/tools/vulpix/ParallelTesting.scala +++ b/compiler/test/dotty/tools/vulpix/ParallelTesting.scala @@ -838,7 +838,7 @@ trait ParallelTesting extends RunnerOrchestration { self => count += 1 for file <- files if isSourceFile(file) do Using.resource(Source.fromFile(file, StandardCharsets.UTF_8.name)) { source => - source.getLines.zipWithIndex.foreach: (line, lineNbr) => + source.getLines().zipWithIndex.foreach: (line, lineNbr) => comment.findAllMatchIn(line).foreach: case comment("nopos-") => bump("nopos") case _ => bump(s"${file.getPath}:${lineNbr+1}") @@ -980,7 +980,7 @@ trait ParallelTesting extends RunnerOrchestration { self => expectedErrors += 1 files.filter(isSourceFile).foreach { file => Using(Source.fromFile(file, StandardCharsets.UTF_8.name)) { source => - source.getLines.zipWithIndex.foreach { case (line, lineNbr) => + source.getLines().zipWithIndex.foreach { case (line, lineNbr) => comment.findAllMatchIn(line).foreach { m => m.group(2) match case prefix if m.group(1).isEmpty => diff --git a/compiler/test/dotty/tools/vulpix/VulpixUnitTests.scala b/compiler/test/dotty/tools/vulpix/VulpixUnitTests.scala index baf61c845d96..ab8a611caa33 100644 --- a/compiler/test/dotty/tools/vulpix/VulpixUnitTests.scala +++ b/compiler/test/dotty/tools/vulpix/VulpixUnitTests.scala @@ -105,7 +105,7 @@ object VulpixUnitTests extends ParallelTesting { def maxDuration = 3.seconds def numberOfSlaves = 5 def safeMode = sys.env.get("SAFEMODE").isDefined - def isInteractive = !sys.env.contains("DRONE") + def isInteractive = !sys.env.contains("DOTTY_CI_RUN") def testFilter = Nil def updateCheckFiles: Boolean = false def failedTests = None diff --git a/docs/_docs/contributing/setting-up-your-ide.md b/docs/_docs/contributing/setting-up-your-ide.md index 3779ce1c3403..690b42a37a51 100644 --- a/docs/_docs/contributing/setting-up-your-ide.md +++ b/docs/_docs/contributing/setting-up-your-ide.md @@ -42,7 +42,15 @@ want to make sure you do two things: + val enableBspAllProjects = true, ``` -2. Run `sbt publishLocal` to get the needed presentation compiler jars. +2. Run in sbt shell `sbt> scala3-bootstrapped/compile` and then `sbt> scala3-bootstrapped/publishLocalBin` + to get the required presentation compiler jars. + + If any step fails due to random errors, try removing `./out/` directory and running `sbt> clean` + + This step has to be repeated every time compiler version has been bumped. + + + By default Metals uses Bloop build server, however you can also use sbt directly. You can achieve this with the `Metals: Switch Build Server` command diff --git a/docs/_docs/internals/exclusive-capabilities.md b/docs/_docs/internals/exclusive-capabilities.md new file mode 100644 index 000000000000..97c6592ac693 --- /dev/null +++ b/docs/_docs/internals/exclusive-capabilities.md @@ -0,0 +1,551 @@ +# Exclusive Capabilities + +Language design draft + + +## Capability Kinds + +A capability is called + - _exclusive_ if it is `cap` or it has an exclusive capability in its capture set. + - _shared_ otherwise. + +There is a new top capability `shared` which can be used as a capability for deriving shared capture sets. Other shared capabilities are created as read-only versions of exclusive capabilities. + +## Update Methods + +We introduce a new trait +```scala +trait Mutable +``` +It is used as a base trait for types that define _update methods_ using +a new modifier `mut`. + +`mut` can only be used in classes or objects extending `Mutable`. An update method is allowed to access exclusive capabilities in the method's environment. By contrast, a normal method in a type extending `Mutable` may access exclusive capabilities only if they are defined locally or passed to it in parameters. + +**Example:** +```scala +class Ref(init: Int) extends Mutable: + private var current = init + def get: Int = current + mut def put(x: Int): Unit = current = x +``` +Here, `put` needs to be declared as an update method since it accesses the exclusive write capability of the variable `current` in its environment. +`mut` can also be used on an inner class of a class or object extending `Mutable`. It gives all code in the class the right +to access exclusive capabilities in the class environment. Normal classes +can only access exclusive capabilities defined in the class or passed to it in parameters. + +```scala +object Registry extends Mutable: + var count = 0 + mut class Counter: + mut def next: Int = + count += 1 + count +``` +Normal method members of `Mutable` classes cannot call update methods. This is indicated since accesses in the callee are recorded in the caller. So if the callee captures exclusive capabilities so does the caller. + +An update method cannot implement or override a normal method, whereas normal methods may implement or override update methods. Since methods such as `toString` or `==` inherited from Object are normal methods, it follows that none of these methods may be implemented as an update method. + +The `apply` method of a function type is also a normal method, hence `Mutable` classes may not implement a function type with an update method as the `apply` method. + +## Mutable Types + +A type is called a _mutable_ if it extends `Mutable` and it has an update method or an update class as non-private member or constructor. + +When we create an instance of a mutable type we always add `cap` to its capture set. For instance, if class `Ref` is declared as shown previously then `new Ref(1)` has type `Ref[Int]^{cap}`. + +**Restriction:** A non-mutable type cannot be downcast by a pattern match to a mutable type. + +**Definition:** A class is _read_only_ if the following conditions are met: + + 1. It does not extend any exclusive capabilities from its environment. + 2. It does not take parameters with exclusive capabilities. + 3. It does not contain mutable fields, or fields that take exclusive capabilities. + +**Restriction:** If a class or trait extends `Mutable` all its parent classes or traits must either extend `Mutable` or be read-only. + +The idea is that when we upcast a reference to a type extending `Mutable` to a type that does not extend `Mutable`, we cannot possibly call a method on this reference that uses an exclusive capability. Indeed, by the previous restriction this class must be a read-only class, which means that none of the code implemented +in the class can access exclusive capabilities on its own. And we +also cannot override any of the methods of this class with a method +accessing exclusive capabilities, since such a method would have +to be an update method and update methods are not allowed to override regular methods. + + + +**Example:** + +Consider trait `IterableOnce` from the standard library. + +```scala +trait IterableOnce[+T] extends Mutable: + def iterator: Iterator[T]^{this} + mut def foreach(op: T => Unit): Unit + mut def exists(op: T => Boolean): Boolean + ... +``` +The trait is a mutable type with many update methods, among them `foreach` and `exists`. These need to be classified as `mut` because their implementation in the subtrait `Iterator` uses the update method `next`. +```scala +trait Iterator[T] extends IterableOnce[T]: + def iterator = this + def hasNext: Boolean + mut def next(): T + mut def foreach(op: T => Unit): Unit = ... + mut def exists(op; T => Boolean): Boolean = ... + ... +``` +But there are other implementations of `IterableOnce` that are not mutable types (even though they do indirectly extend the `Mutable` trait). Notably, collection classes implement `IterableOnce` by creating a fresh +`iterator` each time one is required. The mutation via `next()` is then restricted to the state of that iterator, whereas the underlying collection is unaffected. These implementations would implement each `mut` method in `IterableOnce` by a normal method without the `mut` modifier. + +```scala +trait Iterable[T] extends IterableOnce[T]: + def iterator = new Iterator[T] { ... } + def foreach(op: T => Unit) = iterator.foreach(op) + def exists(op: T => Boolean) = iterator.exists(op) +``` +Here, `Iterable` is not a mutable type since it has no update method as member. +All inherited update methods are (re-)implemented by normal methods. + +**Note:** One might think that we don't need a base trait `Mutable` since in any case +a mutable type is defined by the presence of update methods, not by what it extends. In fact the importance of `Mutable` is that it defines _the other methods_ as read-only methods that _cannot_ access exclusive capabilities. For types not extending `Mutable`, this is not the case. For instance, the `apply` method of a function type is not an update method and the type itself does not extend `Mutable`. But `apply` may well be implemented by +a method that accesses exclusive capabilities. + + + +## Read-only Capabilities + +If `x` is an exclusive capability of a type extending `Mutable`, `x.rd` is its associated, shared _read-only_ capability. + +`shared` can be understood as the read-only capability corresponding to `cap`. +```scala + shared = cap.rd +``` + +A _top capability_ is either `cap` or `shared`. + + +## Shorthands + +**Meaning of `^`:** + +The meaning of `^` and `=>` is the same as before: + + - `C^` means `C^{cap}`. + - `A => B` means `(A -> B)^{cap}`. + +**Implicitly added capture sets** + +A reference to a type extending any of the traits `Capability` or `Mutable` gets an implicit capture set `{shared}` in case no explicit capture set is given. + +For instance, a matrix multiplication method can be expressed as follows: + +```scala +class Matrix(nrows: Int, ncols: Int) extends Mutable: + mut def update(i: Int, j: Int, x: Double): Unit = ... + def apply(i: Int, j: Int): Double = ... + +def mul(a: Matrix, b: Matrix, c: Matrix^): Unit = + // multiply a and b, storing the result in c +``` +Here, `a` and `b` are implicitly read-only, and `c`'s type has capture set `cap`. I.e. with explicit capture sets this would read: +```scala +def mul(a: Matrix^{shared}, b: Matrix^{shared}, c: Matrix^{cap}): Unit +``` +Separation checking will then make sure that `a` and `b` must be different from `c`. + + +## Capture Sets + +As the previous example showed, we would like to use a `Mutable` type such as `Array` or `Matrix` in two permission levels: read-only and unrestricted. A standard technique is to invent a type qualifier such as "read-only" or "mutable" to indicate access permissions. What we would like to do instead is to combine the qualifier with the capture set of a type. So we +distinguish two kinds of capture sets: regular and read-only. Read-only sets can contain only shared capabilities. + +Internally, in the discussion that follows we use a label after the set to indicate its mode. `{...}_` is regular and `{...}rd` is read-only. We could envisage source language to specify read-only sets, e.g. something like + +```scala +{io, async}.rd +``` + +But in almost all cases we don't need an explicit mode in source code to indicate the kind of capture set, since the contents of the set itself tell us what kind it is. A capture set is assumed to be read-only if it is on a +type extending `Mutable` and it contains only shared capabilities, otherwise it is assumed to be regular. + +The read-only function `ro` maps capture sets to read-only capture sets. It is defined pointwise on capabilities as follows: + + - `ro ({ x1, ..., xn } _) = { ro(x1), ..., ro(xn) }` + - `ro(x) = x` if `x` is shared + - `ro(x) = x.rd` if `x` is exclusive + + + +## Subcapturing + +Subcapturing has to take the mode of capture sets into account. We let `m` stand for arbitrary modes. + +1. Rule (sc-var) comes in two variants. If `x` is defined as `S^C` then + + - `{x, xs} m <: (C u {xs}) m` + - `{x.rd, xs} m <: (ro(C) u {xs}) m` + +3. The subset rule works only between sets of the same kind: + + - `C _ <: C _ u {x}` + - `C rd <: C rd u {x}` if `x` is a shared capability. + +4. We can map regular capture sets to read-only sets: + + - `C _ <: ro(C) rd` + +5. Read-only capabilities in regular capture sets can be widened to exclusive capabilities: + + - `{x.rd, xs} _ <: {x, xs} _` + +One case where an explicit capture set mode would be useful concerns +refinements of type variable bounds, as in the following example. +```scala +class A: + type T <: Object^{x.rd, y} +class B extends A: + type T <: Object^{x.rd} +class C extends B: + type T = Matrix^{x.rd} +``` +We assume that `x` and `y` are exclusive capabilities. +The capture set of type `T` in class `C` is a read-only set since `Matrix` extends `Mutable`. But the capture sets of the occurrences of +`T` in `A` and `B` are regular. This leads to an error in bounds checking +the definition of `T` in `C` against the one in `B` +since read-only sets do not subcapture regular sets. We can fix the +problem by declaring the capture set in class `B` as read-only: +```scala +class B extends A: + type T <: Object^{x.rd}.rd +``` +But now a different problem arises since the capture set of `T` in `B` is +read-only but the capture set of `T` and `A` is regular. The capture set of +`T` in `A` cannot be made read-only since it contains an exclusive capability `y`. So we'd have to drop `y` and declare class `A` like this: +```scala +class A: + type T <: Object^{x.rd}.rd +``` + + + +## Accesses to Mutable Types + +A _read-only access_ is a reference `x` to a type extending `Mutable` with a regular capture set if the expected type is one of the following: + + - a value type that is not a mutable type, or + - a select prototype with a member that is a normal method or class (not an update method or class). + +A read-only access contributes the read-only capability `x.rd` to its environment (as formalized by _cv_). Other accesses contribute the full capability `x`. + +A reference `p.m` to an update method or class `m` of a mutable type is allowed only if `p`'s capture set is regular. + +If `e` is an expression of a type `T^cs` extending `Mutable` and the expected type is a value type that is not a mutable type, then the type of `e` is mapped to `T^ro(cs)`. + + +## Expression Typing + +An expression's type should never contain a top capability in its deep capture set. This is achieved by the following rules: + + - On var access `x`: + + - replace all direct capture sets with `x` + - replace all boxed caps with `x*` + + _Variant_: If the type of the typevar corresponding to a boxed cap can be uniquely reached by a path `this.p`, replace the `cap` with `x.p*`. + + - On select `t.foo` where `C` is the capture set of `t`: apply the SELECT rule, which amounts to: + + - replace all direct caps with `C` + - replace all boxed caps with `C*` + + - On applications: `t(args)`, `new C(args)` if the result type `T` contains `cap` (deeply): + + - create a fresh skolem `val sk: T` + - set result type to `sk.type` + + Skolem symbols are eliminated before they reach the type of the enclosing val or def. + + - When avoiding a variable in a local block, as in: + ```scala + { val x: T^ = ...; ... r: List[T^{x}] } + ``` + where the capture set of `x` contains a top capability, + replace `x` by a fresh skolem `val sk: T`. Alternatively: keep it as is, but don't widen it. + + +## Post Processing Right Hand Sides + +The type of the right hand sides of `val`s or `def`s is post-processed before it becomes the inferred type or is compared with the declared type. Post processing +means that all local skolems in the type are avoided, which might mean `cap` can now occur in the the type. + +However, if a local skolem `sk` has `cap` as underlying type, but is only used +in its read-only form `sk.rd` in the result type, we can drop the skolem instead of widening to `shared`. + +**Example:** + +```scala + def f(x: Int): Double = ... + + def precomputed(n: Int)(f: Int -> Double): Int -> Double = + val a: Array[Double]^ = Array.tabulate(n)(f) + a(_) +``` +Here, `Array.tabulate(n)(f)` returns a value of type `Array[Double]^{cap}`. +The last expression `a(_)` expands to the closure `idx => a(idx)`, which +has type `Int ->{a.rd} Double`, since `a` appears only in the context of a +selection with the `apply` method of `Array`, which is not an update method. The type of the enclosing block then has type `Int ->{sk.rd} Double` for a fresh skolem `sk`, +since `a` is no longer visible. After post processing, this type becomes +`Int -> Double`. + +This pattern allows to use mutation in the construction of a local data structure, returning a pure result when the construction is done. Such +data structures are said to have _transient mutability_. + +## Separation checking + +Separation checking checks that we don't have hidden aliases. A hidden alias arises when we have two definitions `x` and `y` with overlapping transitive capture sets that are not manifest in the types of `x` and `y` because one of these types has widened the alias to a top capability. + +Since expression types can't mention cap, widening happens only + - when passing an argument to a parameter + - when widening to a declared (result) type of a val or def + +**Definitions:** + + - The _transitive capture set_ `tcs(c)` of a capability `c` with underlying capture set `C` is `c` itself, plus the transitive capture set of `C`, but excluding `cap` or `shared`. + + - The _transitive capture set_ `tcs(C)` of a capture set C is the union + of `tcs(c)` for all elements `c` of `C`. + + - Two capture sets _interfere_ if one contains an exclusive capability `x` and the other + also contains `x` or contains the read-only capability `x.rd`. + + - If `C1 <: C2` and `C2` contains a top capability, then let `C2a` be `C2` without top capabilities. The hidden set `hidden(C1, C2)` of `C1` relative to `C2` is the smallest subset `C1h` of `C1` such that `C1 \ C1h <: C2a`. + + - If `T1 <: T2` then let the hidden set `hidden(T1, T2)` of `T1` relative to `T2` be the + union of all hidden sets of corresponding capture sets in `T1` and `T2`. + + +**Algorithm outline:** + + - Associate _shadowed sets_ with blocks, template statement sequences, applications, and val symbols. The idea is that a shadowed set gets populated when a capture reference is widened to cap. In that case the original references that were widened get added to the set. + + - After processing a `val x: T2 = t` with `t: T1` after post-processing: + + - If `T2` is declared, add `tcs(hidden(T1, T2))` to the shadowed set + of the enclosing statement sequence and remember it as `shadowed(x)`. + - If`T2` is inferred, add `tcs(T1)` to the shadowed set + of the enclosing statement sequence and remember it as `shadowed(x)`. + + - When processing the right hand side of a `def f(params): T2 = t` with `t: T1` after post-processing + + - If `T2` is declared, check that `shadowed*(hidden(T1, T2))` contains only local values (including skolems). + - If `T2` is inferred, check that `shadowed*(tcs(T1))` contains only local values (including skolems). + + Here, `shadowed*` is the transitive closure of `shadowed`. + + - When processing an application `p.f(arg1, ..., arg_n)`, after processing `p`, add its transitive capture set to the shadowed set of the call. Then, in sequence, process each argument by adding `tcs(hidden(T1, T2))` to the shadowed set of the call, where `T1` is the argument type and `T2` is the type of the formal parameter. + + - When adding a reference `r` or capture set `C` in `markFree` to enclosing environments, check that `tcs(r)` (respectively, `tcs(C)`) does not interfere with an enclosing shadowed set. + + +This requires, first, a linear processing of the program in evaluation order, and, second, that all capture sets are known. Normal rechecking violates both of these requirements. First, definitions +without declared result types are lazily rechecked using completers. Second, capture sets are constructed +incrementally. So we probably need a second scan after rechecking proper. In order not to duplicate work, we need to record during rechecking all additions to environments via `markFree`. + +**Notes:** + + - Mutable variables are not allowed to have top capabilities in their deep capture sets, so separation checking is not needed for checking var definitions or assignments. + + - A lazy val can be thought of conceptually as a value with possibly a capturing type and as a method computing that value. A reference to a lazy val is interpreted as a call to that method. It's use set is the reference to the lazy val itself as well as the use set of the called method. + + - + +## Escape Checking + +The rules for separation checking also check that capabilities do not escape. Separate +rules for explicitly preventing cap to be boxed or unboxed are not needed anymore. Consider the canonical `withFile` example: +```scala +def withFile[T](body: File^ => T): T = + ... + +withFile: f => + () => f.write("too late") +``` +Here, the argument to `withFile` has the dependent function type +```scala +(f: File^) -> () ->{f} Unit +``` +A non-dependent type is required so the expected result type of the closure is +``` +() ->{cap} Unit +``` +When typing a closure, we type an anonymous function. The result type of that function is determined by type inference. That means the generated closure looks like this +```scala +{ def $anon(f: File^): () ->{cap} Unit = + () => f.write("too late") + $anon +} +``` +By the rules of separation checking the hidden set of the body of $anon is `f`, which refers +to a value outside the rhs of `$anon`. This is illegal according to separation checking. + +In the last example, `f: File^` was an exclusive capability. But it could equally have been a shared capability, i.e. `withFile` could be formulated as follows: +```scala +def withFile[T](body: File^{shared} => T): T = +``` +The same reasoning as before would enforce that there are no leaks. + + +## Mutable Variables + +Local mutable variables are tracked by default. It is essentially as if a mutable variable `x` was decomposed into a new private field of class `Ref` together with a getter and setter. I.e. instead of +```scala +var x: T = init +``` +we'd deal with +```scala +val x$ = Ref[T](init) +def x = x$.get +mut def x_=(y: T) = x$.put(y) +``` + +There should be a way to exclude a mutable variable or field from tracking. Maybe an annotation or modifier such as `transparent` or `untracked`? + +The expansion outlined above justifies the following rules for handling mutable variables directly: + + - A type with non-private tracked mutable fields is classified as mutable. + It has to extend the `Mutable` class. + - A read access to a local mutable variable `x` charges the capability `x.rd` to the environment. + - An assignment to a local mutable variable `x` charges the capability `x` to the environment. + - A read access to a mutable field `this.x` charges the capability `this.rd` to the environment. + - A write access to a mutable field `this.x` charges the capability `this` to the environment. + +Mutable Scopes +============== + +We sometimes want to make separation checking coarser. For instance when constructing a doubly linked list we want to create `Mutable` objects and +store them in mutable variables. Since a variable's type cannot contain `cap`, +we must know beforehand what mutable objects it can be refer to. This is impossible if the other objects are created later. + +Mutable scopes provide a solution to this they permit to derive a set of variables from a common exclusive reference. We define a new class +```scala +class MutableScope extends Mutable +``` +To make mutable scopes useful, we need a small tweak +of the rule governing `new` in the _Mutable Types_ section. The previous rule was: + +> When we create an instance of a mutable type we always add `cap` to its capture set. + +The new rule is: + +> When we create an instance of a mutable type we search for a given value of type `MutableScope`. If such a value is found (say it is `ms`) then we use +`ms` as the capture set of the created instance. Otherwise we use `cap`. + +We could envisage using mutable scopes like this: +``` +object enclave: + private given ms: MutableScope() + + ... +``` +Within `enclave` all mutable objects have `ms` as their capture set. So they can contain variables that also have `ms` as their capture set of their values. + +Mutable scopes should count as mutable types (this can be done either by decree or by adding an update method to `MutableScope`). Hence, mutable scopes can themselves be nested inside other mutable scopes. + +## Consumed Capabilities + +We allow `consume` as a modifier on parameters and methods. Example: + +```scala +class C extends Capability + +class Channel[T]: + def send(consume x: T) + + + +class Buffer[+T] extends Mutable: + consume def append(x: T): Buffer[T]^ + +b.append(x) +b1.append(y) + +def concat[T](consume buf1: Buffer[T]^, buf2: Buffer[T]): Buffer[T]^ + +A ->{x.consume} B + + +A + + C , Gamma, x: S |- t; T + --------------------------- + , Gamma |- (x -> t): S ->C T + + + C, Gamma |- let x = s in t: T + + +class Iterator[T]: + consume def filter(p: T => Boolean): Iterator[T]^ + consume def exists(p: T => Boolean): Boolean +``` + +As a parameter, `consume` implies `^` as capture set of the parameter type. The `^` can be given, but is redundant. + +When a method with a `consume` parameter of type `T2^` is called with an argument of type `T1`, we add the elements of `tcs(hidden(T1, T2^))` not just to the enclosing shadowed set but to all enclosing shadowed sets where elements are visible. This makes these elements permanently inaccessible. + + + +val f = Future { ... } +val g = Future { ... } + + +A parameter is implicitly @unbox if it contains a boxed cap. Example: + +def apply[T](f: Box[T => T], y: T): T = + xs.head(y) + +def compose[T](fs: @unbox List[T => T]) = + xs.foldRight(identity)((f: T => T, g: T => T) => x => g(f(x))) + + + +compose(List(f, g)) + +f :: g :: Nil + +def compose[T](fs: List[Unbox[T => T]], x: T) = + val combined = (xs.foldRight(identity)((f: T => T, g: T => T) => x => g(f(x)))): T->{fs*} T + combined(x) + + +With explicit diff --git a/docs/_docs/internals/syntax.md b/docs/_docs/internals/syntax.md index 665b4f5144ba..899b7f5d3c0b 100644 --- a/docs/_docs/internals/syntax.md +++ b/docs/_docs/internals/syntax.md @@ -141,7 +141,7 @@ type val var while with yield ### Soft keywords ``` -as derives end erased extension infix inline opaque open throws tracked transparent using | * + - +as derives end erased extension infix inline mut opaque open throws tracked transparent using | * + - ``` See the [separate section on soft keywords](../reference/soft-modifier.md) for additional @@ -182,7 +182,9 @@ Type ::= FunType | MatchType | InfixType FunType ::= FunTypeArgs (‘=>’ | ‘?=>’) Type Function(ts, t) | FunctionWithMods(ts, t, mods, erasedParams) - | TypTypeParamClause '=>' Type PolyFunction(ps, t) + | FunTypeArgs (‘->’ | ‘?->’) [CaptureSet] Type -- under pureFunctions and captureChecking + | TypTypeParamClause ‘=>’ Type PolyFunction(ps, t) + | TypTypeParamClause ‘->’ [CaptureSet] Type -- under pureFunctions and captureChecking FunTypeArgs ::= InfixType | ‘(’ [ FunArgTypes ] ‘)’ | FunParamClause @@ -190,13 +192,15 @@ FunParamClause ::= ‘(’ TypedFunParam {‘,’ TypedFunParam } ‘)’ TypedFunParam ::= [`erased`] id ‘:’ Type MatchType ::= InfixType `match` <<< TypeCaseClauses >>> InfixType ::= RefinedType {id [nl] RefinedType} InfixOp(t1, op, t2) + | RefinedType ‘^’ -- under captureChecking RefinedType ::= AnnotType {[nl] Refinement} RefinedTypeTree(t, ds) + | AnnotType {[nl] Refinement} ‘^’ CaptureSet -- under captureChecking AnnotType ::= SimpleType {Annotation} Annotated(t, annot) AnnotType1 ::= SimpleType1 {Annotation} Annotated(t, annot) SimpleType ::= SimpleLiteral SingletonTypeTree(l) | ‘?’ TypeBounds - | SimpleType1 + | SimpleType1 {ParArgumentExprs} SimpleType1 ::= id Ident(name) | Singleton ‘.’ id Select(t, name) | Singleton ‘.’ ‘type’ SingletonTypeTree(p) @@ -210,26 +214,28 @@ Singleton ::= SimpleRef | Singleton ‘.’ id FunArgType ::= Type | ‘=>’ Type PrefixOp(=>, t) + | ‘->’ [CaptureSet] Type -- under captureChecking FunArgTypes ::= FunArgType { ‘,’ FunArgType } ParamType ::= [‘=>’] ParamValueType + | ‘->’ [CaptureSet] ParamValueType -- under captureChecking ParamValueType ::= Type [‘*’] PostfixOp(t, "*") - | IntoType - | ‘(’ IntoType ‘)’ ‘*’ PostfixOp(t, "*") -IntoType ::= [‘into’] IntoTargetType Into(t) - | ‘(’ IntoType ‘)’ -IntoTargetType ::= Type - | FunTypeArgs (‘=>’ | ‘?=>’) IntoType -TypeArgs ::= ‘[’ Types ‘]’ ts +TypeArgs ::= ‘[’ TypeArg {‘,’ TypeArg} ‘]’ ts Refinement ::= :<<< [RefineDcl] {semi [RefineDcl]} >>> ds -TypeBounds ::= [‘>:’ Type] [‘<:’ Type] TypeBoundsTree(lo, hi) +TypeBounds ::= [‘>:’ TypeBound] [‘<:’ TypeBound] TypeBoundsTree(lo, hi) TypeAndCtxBounds ::= TypeBounds [‘:’ ContextBounds] ContextBounds(typeBounds, tps) ContextBounds ::= ContextBound | ContextBound `:` ContextBounds -- to be deprecated | '{' ContextBound {',' ContextBound} '}' ContextBound ::= Type ['as' id] Types ::= Type {‘,’ Type} +TypeArg ::= Type + | CaptureSet -- under captureChecking +TypeBound ::= Type + | CaptureSet -- under captureChecking NamesAndTypes ::= NameAndType {‘,’ NameAndType} NameAndType ::= id ':' Type +CaptureSet ::= ‘{’ CaptureRef {‘,’ CaptureRef} ‘}’ -- under captureChecking +CaptureRef ::= { SimpleRef ‘.’ } SimpleRef [‘*’] [‘.’ ‘rd’] -- under captureChecking ``` ### Expressions @@ -365,16 +371,20 @@ ArgumentPatterns ::= ‘(’ [Patterns] ‘)’ ClsTypeParamClause::= ‘[’ ClsTypeParam {‘,’ ClsTypeParam} ‘]’ ClsTypeParam ::= {Annotation} [‘+’ | ‘-’] TypeDef(Modifiers, name, tparams, bounds) id [HkTypeParamClause] TypeAndCtxBounds Bound(below, above, context) + | {Annotation} [‘+’ | ‘-’] id `^` TypeAndCtxBounds -- under captureChecking DefTypeParamClause::= [nl] ‘[’ DefTypeParam {‘,’ DefTypeParam} ‘]’ DefTypeParam ::= {Annotation} id [HkTypeParamClause] TypeAndCtxBounds + | {Annotation} id `^` TypeAndCtxBounds -- under captureChecking TypTypeParamClause::= ‘[’ TypTypeParam {‘,’ TypTypeParam} ‘]’ -TypTypeParam ::= {Annotation} (id | ‘_’) [HkTypeParamClause] TypeBounds +TypTypeParam ::= {Annotation} (id | ‘_’) [HkTypeParamClause] TypeAndCtxBounds + | {Annotation} id `^` TypeAndCtxBounds -- under captureChecking HkTypeParamClause ::= ‘[’ HkTypeParam {‘,’ HkTypeParam} ‘]’ HkTypeParam ::= {Annotation} [‘+’ | ‘-’] (id | ‘_’) [HkTypeParamClause] TypeBounds + | {Annotation} [‘+’ | ‘-’] id `^` TypeBounds -- under captureChecking ClsParamClauses ::= {ClsParamClause} [[nl] ‘(’ [‘implicit’] ClsParams ‘)’] ClsParamClause ::= [nl] ‘(’ ClsParams ‘)’ @@ -387,8 +397,8 @@ DefParamClauses ::= DefParamClause { DefParamClause } -- and two DefTypeParam DefParamClause ::= DefTypeParamClause | DefTermParamClause | UsingParamClause -TypelessClauses ::= TypelessClause {TypelessClause} -TypelessClause ::= DefTermParamClause +ConstrParamClauses::= ConstrParamClause {ConstrParamClause} +ConstrParamClause ::= DefTermParamClause | UsingParamClause DefTermParamClause::= [nl] ‘(’ [DefTermParams] ‘)’ UsingParamClause ::= [nl] ‘(’ ‘using’ (DefTermParams | FunArgTypes) ‘)’ @@ -419,6 +429,7 @@ LocalModifier ::= ‘abstract’ | ‘infix’ | ‘erased’ | ‘tracked’ + | ‘mut’ -- under captureChecking AccessModifier ::= (‘private’ | ‘protected’) [AccessQualifier] AccessQualifier ::= ‘[’ id ‘]’ @@ -459,10 +470,13 @@ Def ::= ‘val’ PatDef PatDef ::= ids [‘:’ Type] [‘=’ Expr] | Pattern2 [‘:’ Type] [‘=’ Expr] PatDef(_, pats, tpe?, expr) DefDef ::= DefSig [‘:’ Type] [‘=’ Expr] DefDef(_, name, paramss, tpe, expr) - | ‘this’ TypelessClauses [DefImplicitClause] ‘=’ ConstrExpr DefDef(_, , vparamss, EmptyTree, expr | Block) + | ‘this’ ConstrParamClauses [DefImplicitClause] ‘=’ ConstrExpr DefDef(_, , vparamss, EmptyTree, expr | Block) DefSig ::= id [DefParamClauses] [DefImplicitClause] TypeDef ::= id [HkTypeParamClause] {FunParamClause} TypeAndCtxBounds TypeDefTree(_, name, tparams, bound - [‘=’ Type] + [‘=’ TypeDefRHS] + | id `^` TypeAndCtxBounds [‘=’ TypeDefRHS] -- under captureChecking +TypeDefRHS ::= Type + | CaptureSet -- under captureChecking TmplDef ::= ([‘case’] ‘class’ | ‘trait’) ClassDef | [‘case’] ‘object’ ObjectDef diff --git a/docs/_docs/reference/contextual/context-bounds.md b/docs/_docs/reference/contextual/context-bounds.md index 60357b3f098d..c0891aa1800e 100644 --- a/docs/_docs/reference/contextual/context-bounds.md +++ b/docs/_docs/reference/contextual/context-bounds.md @@ -47,7 +47,7 @@ Since the context parameter now has a name, it can be referred to in the body of `reduce`. An example is the `m.unit` reference in the definition above. If the context bound does not carry an `as` clause, the generated witness parameter gets a compiler-synthesized name. However, a [currently experimental -language extension](../experimental/default-names-context-bounds.md) would in this case give the context parameter the same name as the bound type parameter. +language extension](../experimental/typeclasses.md#better-default-names-for-context-bounds) would in this case give the context parameter the same name as the bound type parameter. Named context bounds were introduced in Scala 3.6. diff --git a/docs/_docs/reference/dropped-features/type-projection.md b/docs/_docs/reference/dropped-features/type-projection.md index 2c3e82ce99b8..9b9f643ceb6e 100644 --- a/docs/_docs/reference/dropped-features/type-projection.md +++ b/docs/_docs/reference/dropped-features/type-projection.md @@ -4,15 +4,18 @@ title: "Dropped: General Type Projection" nightlyOf: https://docs.scala-lang.org/scala3/reference/dropped-features/type-projection.html --- -Scala so far allowed general type projection `T#A` where `T` is an arbitrary type -and `A` names a type member of `T`. +Scala 2 allowed general type projection `T#A` where `T` is an arbitrary type and `A` names a type member of `T`. +This turns out to be [unsound](https://github.com/scala/scala3/issues/1050) (at least when combined with other Scala 3 features). -Scala 3 disallows this if `T` is an abstract type (class types and type aliases -are fine). This change was made because unrestricted type projection -is [unsound](https://github.com/scala/scala3/issues/1050). - -This restriction rules out the [type-level encoding of a combinator -calculus](https://michid.wordpress.com/2010/01/29/scala-type-level-encoding-of-the-ski-calculus/). +To remedy this, Scala 3 only allows type projection if `T` is a concrete type (any type which is not abstract), an example for such a type would be a class type (`class T`). +A type is abstract if it is: +* An abstract type member (`type T` without `= SomeType`) +* A type parameter (`[T]`) +* An alias to an abstract type (`type T = SomeAbstractType`). +There are no restriction on `A` apart from the fact it has to be a member type of `T`, for example a subclass (`class T { class A }`). To rewrite code using type projections on abstract types, consider using path-dependent types or implicit parameters. + +This restriction rules out the [type-level encoding of a combinator +calculus](https://michid.wordpress.com/2010/01/29/scala-type-level-encoding-of-the-ski-calculus/). \ No newline at end of file diff --git a/docs/_docs/reference/experimental/cc-advanced.md b/docs/_docs/reference/experimental/cc-advanced.md new file mode 100644 index 000000000000..52d1956caf49 --- /dev/null +++ b/docs/_docs/reference/experimental/cc-advanced.md @@ -0,0 +1,78 @@ +--- +layout: doc-page +title: "Capture Checking -- Advanced Use Cases" +nightlyOf: https://docs.scala-lang.org/scala3/reference/experimental/cc-advanced.html +--- + + +## Access Control +Analogously to type parameters, we can lower- and upper-bound capability parameters where the bounds consist of concrete capture sets: +```scala +def main() = + // We can close over anything branded by the 'trusted' capability, but nothing else + def runSecure[C^ >: {trusted} <: {trusted}](block: () ->{C} Unit): Unit = ... + + // This is a 'brand" capability to mark what can be mentioned in trusted code + object trusted extends caps.Capability + + // These capabilities are trusted: + val trustedLogger: Logger^{trusted} + val trustedChannel: Channel[String]^{trusted} + // These aren't: + val untrustedLogger: Logger^ + val untrustedChannel: Channel[String]^ + + runSecure: () => + trustedLogger.log("Hello from trusted code") // ok + + runSecure: () => + trustedChannel.send("I can send") // ok + trustedLogger.log(trustedChannel.recv()) // ok + + runSecure: () => "I am pure and that's ok" // ok + + runSecure: () => + untrustedLogger.log("I can't be used") // error + untrustedChannel.send("I can't be used") // error +``` +The idea is that every capability derived from the marker capability `trusted` (and only those) are eligible to be used in the `block` closure +passed to `runSecure`. We can enforce this by an explicit capability parameter `C` constraining the possible captures of `block` to the interval `>: {trusted} <: {trusted}`. + +Note that since capabilities of function types are covariant, we could have equivalently specified `runSecure`'s signature using implicit capture polymorphism to achieve the same behavior: +```scala +def runSecure(block: () ->{trusted} Unit): Unit +``` + +## Capture-safe Lexical Control + +Capability members and paths to these members can prevent leakage +of labels for lexically-delimited control operators: +```scala +trait Label extends Capability: + type Fv^ // the capability set occurring freely in the `block` passed to `boundary` below. + +def boundary[T, C^](block: Label{type Fv = {C} } ->{C} T): T = ??? // ensure free caps of label and block match +def suspend[U](label: Label)[D^ <: {label.Fv}](handler: () ->{D} U): U = ??? // may only capture the free capabilities of label + +def test = + val x = 1 + boundary: outer => + val y = 2 + boundary: inner => + val z = 3 + val w = suspend(outer) {() => z} // ok + val v = suspend(inner) {() => y} // ok + val u = suspend(inner): () => + suspend(outer) {() => w + v} // ok + y + suspend(outer): () => + println(inner) // error (would leak the inner label) + x + y + z +``` +A key property is that `suspend` (think `shift` from delimited continuations) targeting a specific label (such as `outer`) should not accidentally close over labels from a nested `boundary` (such as `inner`), because they would escape their defining scope this way. +By leveraging capability polymorphism, capability members, and path-dependent capabilities, we can prevent such leaks from occurring at compile time: + +* `Label`s store the free capabilities `C` of the `block` passed to `boundary` in their capability member `Fv`. +* When suspending on a given label, the suspension handler can capture at most the capabilities that occur freely at the `boundary` that introduced the label. That prevents mentioning nested bound labels. + +[Back to Capture Checking](cc.md) \ No newline at end of file diff --git a/docs/_docs/reference/experimental/cc.md b/docs/_docs/reference/experimental/cc.md index ff480ffb638b..542094b05c96 100644 --- a/docs/_docs/reference/experimental/cc.md +++ b/docs/_docs/reference/experimental/cc.md @@ -726,34 +726,45 @@ Reach capabilities take the form `x*` where `x` is syntactically a regular capab It is sometimes convenient to write operations that are parameterized with a capture set of capabilities. For instance consider a type of event sources `Source` on which `Listener`s can be registered. Listeners can hold certain capabilities, which show up as a parameter to `Source`: ```scala - class Source[X^]: - private var listeners: Set[Listener^{X^}] = Set.empty - def register(x: Listener^{X^}): Unit = - listeners += x +class Source[X^]: + private var listeners: Set[Listener^{X}] = Set.empty + def register(x: Listener^{X}): Unit = + listeners += x - def allListeners: Set[Listener^{X^}] = listeners + def allListeners: Set[Listener^{X}] = listeners ``` The type variable `X^` can be instantiated with a set of capabilities. It can occur in capture sets in its scope. For instance, in the example above -we see a variable `listeners` that has as type a `Set` of `Listeners` capturing `X^`. The `register` method takes a listener of this type +we see a variable `listeners` that has as type a `Set` of `Listeners` capturing `X`. The `register` method takes a listener of this type and assigns it to the variable. -Capture set variables `X^` are represented as regular type variables with a -special upper bound `CapSet`. For instance, `Source` could be equivalently +Capture-set variables `X^` without user-annotated bounds by default range over the interval `>: {} <: {caps.cap}` which is the universe of capture sets instead of regular types. + +Under the hood, such capture-set variables are represented as regular type variables within the special interval + `>: CapSet <: CapSet^`. +For instance, `Source` from above could be equivalently defined as follows: ```scala - class Source[X <: CapSet^]: - ... +class Source[X >: CapSet <: CapSet^]: + ... ``` -`CapSet` is a sealed trait in the `caps` object. It cannot be instantiated or inherited, so its only purpose is to identify capture set type variables and types. Capture set variables can be inferred like regular type variables. When they should be instantiated explicitly one uses a capturing -type `CapSet`. For instance: +`CapSet` is a sealed trait in the `caps` object. It cannot be instantiated or inherited, so its only +purpose is to identify type variables which are capture sets. In non-capture-checked +usage contexts, the type system will treat `CapSet^{a}` and `CapSet^{a,b}` as the type `CapSet`, whereas +with capture checking enabled, it will take the annotated capture sets into account, +so that `CapSet^{a}` and `CapSet^{a,b}` are distinct. +This representation based on `CapSet` is subject to change and +its direct use is discouraged. + +Capture-set variables can be inferred like regular type variables. When they should be instantiated +explicitly one supplies a concrete capture set. For instance: ```scala - class Async extends caps.Capability +class Async extends caps.Capability - def listener(async: Async): Listener^{async} = ??? +def listener(async: Async): Listener^{async} = ??? - def test1(async1: Async, others: List[Async]) = - val src = Source[CapSet^{async1, others*}] - ... +def test1(async1: Async, others: List[Async]) = + val src = Source[{async1, others*}] + ... ``` Here, `src` is created as a `Source` on which listeners can be registered that refer to the `async` capability or to any of the capabilities in list `others`. So we can continue the example code above as follows: ```scala @@ -761,6 +772,44 @@ Here, `src` is created as a `Source` on which listeners can be registered that r others.map(listener).foreach(src.register) val ls: Set[Listener^{async, others*}] = src.allListeners ``` +A common use-case for explicit capture parameters is describing changes to the captures of mutable fields, such as concatenating +effectful iterators: +```scala +class ConcatIterator[A, C^](var iterators: mutable.List[IterableOnce[A]^{C}]): + def concat(it: IterableOnce[A]^): ConcatIterator[A, {C, it}]^{this, it} = + iterators ++= it // ^ + this // track contents of `it` in the result +``` +In such a scenario, we also should ensure that any pre-existing alias of a `ConcatIterator` object should become +inaccessible after invoking its `concat` method. This is achieved with mutation and separation tracking which are +currently in development. + + +## Capability Members + +Just as parametrization by types can be equally expressed with type members, we could +also define the `Source[X^]` class above could using a _capability member_: +```scala +class Source: + type X^ + private var listeners: Set[Listener^{this.X}] = Set.empty + ... // as before +``` +Here, we can refer to capability members using paths in capture sets (such as `{this.X}`). Similarly to type members, +capability members can be upper- and lower-bounded with capture sets: +```scala +trait Thread: + type Cap^ + def run(block: () ->{this.Cap} -> Unit): Unit + +trait GPUThread extends Thread: + type Cap^ >: {cudaMalloc, cudaFree} <: {caps.cap} +``` +Since `caps.cap` is the top element for subcapturing, we could have also left out the +upper bound: `type Cap^ >: {cudaMalloc, cudaFree}`. + + +[More Advanced Use Cases](cc-advanced.md) ## Compilation Options diff --git a/docs/_docs/reference/experimental/into-modifier.md b/docs/_docs/reference/experimental/into-modifier.md deleted file mode 100644 index 54da5f976320..000000000000 --- a/docs/_docs/reference/experimental/into-modifier.md +++ /dev/null @@ -1,120 +0,0 @@ ---- -layout: doc-page -title: "The `into` Type Modifier" -redirectFrom: /docs/reference/other-new-features/into-modifier.html -nightlyOf: https://docs.scala-lang.org/scala3/reference/experimental/into-modifier.html ---- - -Scala 3's implicit conversions of the `scala.Conversion` class require a language import -``` -import scala.language.implicitConversions -``` -in any code that uses them as implicit conversions (code that calls conversions explicitly is not affected). If the import is missing, a feature warning is currently issued, and this will become an error in a future version of Scala 3. The motivation for this restriction is that code with hidden implicit conversions is hard to understand and might have correctness or performance problems that go undetected. - -There is one broad use case, however, where implicit conversions are very hard to replace. This is the case where an implicit conversion is used to adapt a method argument to its formal parameter type. An example from the standard library: -```scala -scala> val xs = List(0, 1) -scala> val ys = Array(2, 3) -scala> xs ++ ys -val res0: List[Int] = List(0, 1, 2, 3) -``` -The last input made use of an implicit conversion from `Array[Int]` to `IterableOnce[Int]` which is defined as a Scala 2 style implicit conversion in the standard library. Once the standard library is rewritten with Scala 3 conversions, this will -require a language import at the use site, which is clearly unacceptable. It is possible to avoid the need for implicit conversions using method overloading or type classes, but this often leads to longer and more complicated code, and neither of these alternatives work for vararg parameters. - -This is where the `into` modifier on parameter types comes in. Here is a signature of the `++` method on `List[A]` that uses it: -```scala - def ++ (elems: into IterableOnce[A]): List[A] -``` -The `into` modifier on the type of `elems` means that implicit conversions can be applied to convert the actual argument to an `IterableOnce` value, and this without needing a language import. - -## Function arguments - -`into` also allows conversions on the results of function arguments. For instance, consider the new proposed signature of the `flatMap` method on `List[A]`: - -```scala - def flatMap[B](f: A => into IterableOnce[B]): List[B] -``` -This accepts all actual arguments `f` that, when applied to an `A`, give a result -that is convertible to `IterableOnce[B]`. So the following would work: -```scala -scala> val xs = List(1, 2, 3) -scala> xs.flatMap(x => x.toString * x) -val res2: List[Char] = List(1, 2, 2, 3, 3, 3) -``` -Here, the conversion from `String` to `Iterable[Char]` is applied on the results of `flatMap`'s function argument when it is applied to the elements of `xs`. - -## Vararg arguments - -When applied to a vararg parameter, `into` allows a conversion on each argument value individually. For example, consider a method `concatAll` that concatenates a variable -number of `IterableOnce[Char]` arguments, and also allows implicit conversions into `IterableOnce[Char]`: - -```scala -def concatAll(xss: (into IterableOnce[Char])*): List[Char] = - xss.foldLeft(List[Char]())(_ ++ _) -``` -Here, the call -```scala -concatAll(List('a'), "bc", Array('d', 'e')) -``` -would apply two _different_ implicit conversions: the conversion from `String` to `Iterable[Char]` gets applied to the second argument and the conversion from `Array[Char]` to `Iterable[Char]` gets applied to the third argument. - -Note that a vararg parameter type with into modifiers needs to be put in parentheses, as is shown in the example above. This is to make the precedence clear: each element of the argument sequence is converted by itself. - -## Retrofitting Scala 2 libraries - -There is also an annotation `@into` in the `scala.annotation` package that has -the same effect as an `into` modifier. It is intended to be used for retrofitting Scala 2 library code so that Scala 3 conversions can be applied to arguments without language imports. For instance, the definitions of -`++` and `flatMap` in the Scala 2.13 `List` class could be retrofitted as follows. -```scala - def ++ (elems: IterableOnce[A] @into): List[A] - def flatMap[B](f: A => IterableOnce[B] @into): List[B] -``` -For Scala 3 code, the `into` modifier is preferred, because it adheres to the principle that annotations should not influence typing and type inference in Scala. - -## Restrictions - -The `into` modifier is only allowed in the types of method parameters. It can be given either for the whole type, or some result type of a top-level function type, but not anywhere else. The `into` modifier does not propagate outside the method. In particular, a partially applied method does not propagate `into` modifiers to its result. - -**Example:** - -Say we have -```scala -def f(x: Int)(y: into Text): Unit -``` -then -```scala -f(3) : Text => Unit -``` -Note the `into` modifier is not longer present on the type of `f(3)`. Therefore, follow-on arguments to `f(3)` do not allow implicit conversions. Generally it is not possible to -define function types that allow implicit conversions on their arguments, but it is possible to define SAM types that allow conversions. E.g. -```scala -trait ConvArg: - def apply(x: into Text): Unit - -val x: ConvArg = f(3)(_) -``` - -Note this is similar to the way vararg parameters are handled in Scala. If we have -```scala -def g(x: Int)(y: Int*): Unit -``` -then -```scala -g(4) : Seq[Int] => Unit -``` -Observe that the vararg annotation also got dropped in the result type of `g(4)`. - -## Syntax changes - -The addition to the grammar is: -``` -ParamType ::= [‘=>’] ParamValueType -ParamValueType ::= Type [‘*’] - | IntoType - | ‘(’ IntoType ‘)’ ‘*’ -IntoType ::= [‘into’] IntoTargetType - | ‘(’ IntoType ‘)’ -IntoTargetType ::= Type - | FunTypeArgs (‘=>’ | ‘?=>’) IntoType -``` -As the grammar shows, `into` can only applied in the type of a parameter; it is illegal in other positions. Also, `into` modifiers in vararg types have to be enclosed in parentheses. diff --git a/docs/_docs/reference/experimental/into.md b/docs/_docs/reference/experimental/into.md new file mode 100644 index 000000000000..8b54865c2cba --- /dev/null +++ b/docs/_docs/reference/experimental/into.md @@ -0,0 +1,285 @@ +--- +layout: doc-page +title: The `into` Type and Modifier +redirectFrom: /docs/reference/other-new-features/into.html +nightlyOf: https://docs.scala-lang.org/scala3/reference/experimental/into.html +--- + +This feature is not yet part of the Scala 3 language definition. It can be made available by a language import: + +```scala +import scala.language.experimental.into +``` + + +## Summary + +Scala 3 offers two alternative schemes to allow implicit conversions using Scala-3's `Conversion` +class without requiring a language import. + +The first scheme is +to have a special type `into[T]` which serves as a marker that conversions into that type are allowed. These types are typically used in parameters of methods that are designed to work with implicit conversions of their arguments. This allows fine-grained control over where implicit conversions should be allowed. We call this scheme "_into as a type constructor_". + +The second scheme allows `into` as a soft modifier on traits, classes, and opaque type aliases. If a type definition is declared with this modifier, conversions to that type are allowed. The second scheme requires that one has control over the conversion target types so that an `into` can be added to their declaration. It is appropriate where there are a few designated types that are meant to be conversion targets. If that's the case, migration from Scala 2 to Scala 3 +becomes easier since no function signatures need to be rewritten. We call this scheme "_into as a modifier_". + + +## Motivation + +Scala 3's implicit conversions of the `scala.Conversion` class require a language import +``` +import scala.language.implicitConversions +``` +in any code that uses them as implicit conversions (code that calls conversions explicitly is not affected). If the import is missing, a feature warning is currently issued, and this will become an error in future versions of Scala 3. The motivation for this restriction is two-fold: + + - Code with hidden implicit conversions is hard to understand and might have correctness or performance issues that go undetected. + - If we require explicit user opt-in for implicit conversions, we can significantly improve type inference by propagating expected type information more widely in those parts of the program where there is no opt-in. + +There is one broad use case, however, where implicit conversions are very hard to replace. This is the case where an implicit conversion is used to adapt a method argument to its formal parameter type. An example from the standard library: +```scala +scala> val xs = List(0, 1) +scala> val ys = Array(2, 3) +scala> xs ++ ys +val res0: List[Int] = List(0, 1, 2, 3) +``` +The input line `xs ++ ys` makes use of an implicit conversion from `Array[Int]` to `IterableOnce[Int]`. This conversion is defined in the standard library as an `implicit def`. Once the standard library is rewritten with Scala 3 conversions, this will require a language import at the use site, which is clearly unacceptable. It is possible to avoid the need for implicit conversions using method overloading or type classes, but this often leads to longer and more complicated code, and neither of these alternatives work for vararg parameters. + +## First Scheme: `into` as a Type Constructor + +This is where the `into` type constructor comes in. Here is a signature of a `++` method on `List[A]` that uses it: + +```scala + def ++ (elems: into[IterableOnce[A]]): List[A] +``` +The `into` wrapper on the type of `elems` means that implicit conversions can be applied to convert the actual argument to an `IterableOnce` value, and this without needing a language import. + +`into` is defined as follows in the companion object of the `scala.Conversion` class: +```scala +opaque type into[T] >: T = T +``` +Types of the form `into[T]` are treated specially during type checking. If the expected type of an expression is `into[T]` then an implicit conversion to that type can be inserted without the need for a language import. + +Note: Unlike other types, `into` starts with a lower-case letter. This emphasizes the fact that `into` is treated specially by the compiler, by making `into` look more like a keyword than a regular type. + +### Example 1 + +```scala +given Conversion[Array[Int], IterableOnce[Int]] = wrapIntArray +val xs: List[Int] = List(1) +val ys: Array[Int] = Array(2, 3) +xs ++ ys +``` +This inserts the given conversion on the `ys` argument in `xs ++ ys`. It typechecks without a feature warning since the formal parameter of `++` is of type `into[IterableOnce]`, which is also the expected type of `ys`. + +### Example 2 + +Consider a simple expression AST type: +```scala +enum Expr: + case Neg(e: Expr) + case Add(e1: Expr, e2: Expr) + case Const(n: Int) +import Expr.* +``` +Say we'd like to build `Expr` trees without explicit `Const` wrapping, as in `Add(1, Neg(2))`. The usual way to achieve this is with an implicit conversion from `Int` to `Const`: +```scala +given Conversion[Int, Const] = Const(_) +``` +Normally, that would require a language import in all source modules that construct `Expr` trees. We can avoid this requirement on user code by declaring `Neg` and `Add` with `into` parameters: +```scala +enum Expr: + case Neg(e: into[Expr]) + case Add(e1: into[Expr], e2: into[Expr]) + case Const(n: Int) +``` +This would allow conversions from `Int` to `Const` when constructing trees but not elsewhere. + +### `into` in Function Results + +`into` allows conversions everywhere it appears as expected type, including in the results of function arguments. For instance, consider the new proposed signature of the `flatMap` method on `List[A]`: + +```scala + def flatMap[B](f: A => into[IterableOnce[B]]): List[B] +``` +This accepts all actual arguments `f` that, when applied to an `A`, give a result +that is convertible to `IterableOnce[B]`. So the following would work: +```scala +scala> val xs = List(1, 2, 3) +scala> xs.flatMap(x => x.toString * x) +val res2: List[Char] = List(1, 2, 2, 3, 3, 3) +``` +Here, the conversion from `String` to `Iterable[Char]` is applied on the results of `flatMap`'s function argument when it is applied to the elements of `xs`. + +### Vararg arguments + +When applied to a vararg parameter, `into` allows a conversion on each argument value individually. For example, consider a method `concatAll` that concatenates a variable +number of `IterableOnce[Char]` arguments, and also allows implicit conversions into `IterableOnce[Char]`: + +```scala +def concatAll(xss: into[IterableOnce[Char]]*): List[Char] = + xss.foldRight(Nil)(_ ++: _) +``` +Here, the call +```scala +concatAll(List('a'), "bc", Array('d', 'e')) +``` +would apply two _different_ implicit conversions: the conversion from `String` to `Iterable[Char]` gets applied to the second argument and the conversion from `Array[Char]` to `Iterable[Char]` gets applied to the third argument. + + +### Unwrapping `into` + +Since `into[T]` is an opaque type, its run-time representation is just `T`. +At compile time, the type `into[T]` is a known supertype of the type `T`. So if `t: T`, then +```scala + val x: into[T] = t +``` +typechecks but +```scala +val y: T = x // error +``` +is ill-typed. We can recover the underlying type `T` using the `underlying` extension method which is also defined in object `Conversion`: +```scala +import Conversion.underlying + +val y: T = x.underlying // ok +``` +However, the next section shows that unwrapping with `.underlying` is not needed for parameters, which is the most common use case. So explicit unwrapping should be quite rare. + + + +### Dropping `into` for Parameters in Method Bodies + +The typical use cases for `into` wrappers are for parameters. Here, they specify that the +corresponding arguments can be converted to the formal parameter types. On the other hand, inside a method, a parameter type can be assumed to be of the underlying type since the conversion already took place when the enclosing method was called. This is reflected in the type system which erases `into` wrappers in the local types of parameters +as they are seen in a method body. Here is an example: +```scala + def ++ (elems: into[IterableOnce[A]]): List[A] = + val buf = ListBuffer[A]() + for elem <- elems.iterator do // no `.underlying` needed here + buf += elems + buf.toList +``` +Inside the `++` method, the `elems` parameter is of type `IterableOnce[A]`, not `into[IterableOne[A]]`. Hence, we can simply write `elems.iterator` to get at the `iterator` method of the `IterableOnce` class. + +Specifically, we erase all `into` wrappers in the local types of parameter types that appear in covariant or invariant position. Contravariant `into` wrappers are kept since these typically are on the parameters of function arguments. + +### Into Constructors in Type Aliases + +Since `into` is a regular type constructor, it can be used anywhere, including in type aliases and type parameters. For instance, in the Scala standard library we could define +```scala +type ToIterator[T] = into[IterableOnce[T]] +``` +and then `++`, `flatMap` and other functions could use this alias in their parameter types. The effect would be the same as when `into` is written out explicitly. + +## Second Scheme: `into` as a Modifier + +The `into` scheme discussed so far strikes a nice balance between explicitness and convenience. But migrating to it from Scala 2 implicits does require major changes since possibly a large number of function signatures has to be changed to allow conversions on the arguments. This might ultimately hold back migration to Scala 3 implicits. + +To facilitate migration, we also introduce an alternative way to specify target types of implicit conversions. We allow `into` as a soft modifier on +classes, traits, and opaque type aliases. If a type definition is declared with `into`, then implicit conversions into that type don't need a language import. + +For instance, the Laminar framework +defines a trait `Modifier` that is commonly used as a parameter type of user-defined methods and that should support implicit conversions into it. +`Modifier` is commonly used as a parameter type in both Laminar framework functions and in application-level functions that use Laminar. + +We can support implicit conversions to `Modifier`s simply by making `Modifier` an `into` trait: +```scala +into trait Modifier ... +``` +This means implicit `Conversion` instances with `Modifier` results can be inserted without requiring a language import. + +Here is a simplified example: +```scala +trait Modifier +given Conversion[Option[Node], Modifier] = ... +given Conversion[Seq[Node], Modifier] = ... + +def f(x: Source, m: Modifier) = ... +f(source, Some(node)) // inserts conversion +``` + +The `into`-as-a-modifier scheme is handy in codebases that have a small set of specific types that are intended as the targets of implicit conversions defined in the same codebase. Laminar's `Modifier` is a typical example. But the scheme can be easily abused by making the number of `into` types too large. One should restrict the number of `into`-declared types to the absolute minimum. In particular, never make a type `into` to just cater for the possibility that someone might want to later add an implicit conversion to it. + + +## Details: Conversion target types + +To make the preceding descriptions more precise: An implicit conversion is permitted without an `implicitConversions` language import if the target type is a valid conversion target type. A valid conversion target type is one of the following: + + - A type of the form `into[T]`. + - A reference `p.C` to a class, trait, or opaque type alias `C` that is declared with an `into` modifier. The reference can be followed by type arguments. + - A type alias of a valid conversion target type. + - A match type that reduces to a valid conversion target type. + - An annotated type `T @ann` where `T` is a valid conversion target type. + - A refined type `T {...}` where `T` is a valid conversion target type. + - A union `T | U` of two valid conversion target types `T` and `U`. + - An intersection `T & U` of two valid conversion target types `T` and `U`. + - An instance of a type parameter that is explicitly instantiated to a valid conversion target type. + +Type parameters that are not fully instantiated do not count as valid conversion target types. For instance, consider: + +```scala + trait Token + class Keyword(str: String) + given Conversion[String, Keyword] = KeyWord(_) + + List[into[Keyword]]("if", "then", "else") +``` +This type-checks since the target type of the list elements is the type parameter of the `List.apply` method which is explicitly instantiated to `into[Keyword]`. On the other hand, if we continue the example as follows we get an error: +```scala + val ifKW: into[Keyword] = "if" + val ys: List[into[Keyword]] = List(ifKW, "then", "else") +``` +Here, the type variable of `List.apply` is not explicitly instantiated +when we check the `List(...)` arguments (it is just upper-bounded by the target type `into[Keyword]`). This is not enough to allow +implicit conversions on the second and third arguments. + +Subclasses of `into` classes or traits do not count as valid conversion target types. For instance, consider: + +```scala +into trait T +class C(x: Int) extends T +given Conversion[Int, C] = C(_) + +def f(x: T) = () +def g(x: C) = () +f(1) // ok +g(1) // error +``` +The call `f("abc")` type-checks since `f`'s parameter type `T` is `into`. +But the call `g("abc")` does not type-check since `g`'s parameter type `C` is not `into`. It does not matter that `C` extends a trait `T` that is `into`. + + +## Why Two Different Schemes? + +Can we make do with just one scheme instead of two? In practice this would be difficult. + +Let's first take a look the `Expr` example, which uses into-as-a-constructor. Could it be rewritten to use into-as-a-modifier? +This would mean we have to add `into` to the whole `Expr` enum. Adding it to just `Const` is not enough, since `Add` and `Neg` take `Expr` arguments, not `Const` arguments. + +But we might not always have permission to change the `Expr` enum. For instance, `Expr` could be defined in a lower level library without implicit conversions, but later we want to make `Expr` construction convenient by eliding `Const` wrappers in some higher-level library or application. With `into` constructors, this is easy: Define the implicit conversion and facade methods that construct `Expr` trees while taking `into[Expr]` parameters. +With `into` modifiers there is no way to achieve the same. + +A possibly more important objection is that even if we could add the `into` modifier to `Expr`, it would be bad style to do so! We want to allow for implicit conversion in the very specific case where we build an `Expr` tree using the `Add` and `Neg` constructors. Our applications could have lots of other methods that take `Expr` trees, for instance to analyze them or evaluate them. +We probably do not want to allow implicit conversions for the arguments of all these other methods. The `into` modifier is too unspecific to distinguish the good use case from the problematic ones. + +On the other hand, there are also situations where into-as-a-modifier is the practical choice. To see this, consider again the `Modifier` use case in Laminar. +We could avoid the `into` modifier by wrapping all `Modifier` parameters +with the `into` constructor. This would be a lot more work than adding just the single `into` modifier. Worse, functions taking `Modifier` parameters are found both in the Laminar framework code and in many applications using it. The framework and the applications would have to be upgraded in lockstep. When Laminar upgrades to Scala 3 implicits, all applications would have to be rewritten, which would make such a migration very cumbersome. + +One can try to mitigate the effort by playing with type aliases. For instance, a hypothetical future Laminar using Scala 3 conversions could rename the +trait `Modifier` to `ModifierTrait` and define an alias +```scala +type Modifier = into[ModifierTrait] +``` +Then the source code of applications would not have to change (unless these applications define classes directly extending `Modifier`). But that future Laminar would not be binary compatible with the current one, since the name +of the original `Modifier` trait has changed. In summary, upgrading Laminar to use Scala 3 conversions could keep either source compatibility or binary compatibility but not both at the same time. + + +## Syntax Changes + +``` +LocalModifier ::= ... | ‘into’ +``` + +`into` is a soft modifier. It is only allowed classes, traits, and opaque type aliases. + diff --git a/docs/_docs/reference/experimental/modularity.md b/docs/_docs/reference/experimental/modularity.md index 1a3d47695861..580044ce4d66 100644 --- a/docs/_docs/reference/experimental/modularity.md +++ b/docs/_docs/reference/experimental/modularity.md @@ -196,6 +196,43 @@ LocalModifier ::= ‘tracked’ The (soft) `tracked` modifier is allowed as a local modifier. +## Applied constructor types + +A new syntax is also introduced, to make classes with `tracked` parameters +easier to use. The new syntax is essentially the ability to use an application +of a class constructor as a type, we call such types applied constructor types. + +With this new feature the following example compiles correctly and the type in +the comment is the resulting type of the applied constructor types. + +```scala +import scala.language.experimental.modularity + +class C(tracked val v: Any) + +val c: C(42) /* C { val v: 42 } */ = C(42) +``` + +### Syntax change + +``` +SimpleType ::= SimpleLiteral + | ‘?’ TypeBounds +--- | SimpleType1 ++++ | SimpleType1 {ParArgumentExprs} +``` + +A `SimpleType` can now optionally be followed by `ParArgumentExprs`. + +The arguments are used to typecheck the whole type, as if it was a normal +constructor application. For classes with `tracked` parameters this will mean +that the resulting type will have a refinement for each `tracked` parameter. + +For example, given the following class definition: +```scala +class Person(tracked val name: String, tracked val age: Int) +``` +**Type** `Person("Kasia", 27)` will be translated to `Person { val name: "Kasia"; val age: 27 }`. ## Allow Class Parents to be Refined Types diff --git a/docs/_docs/reference/experimental/typeclasses.md b/docs/_docs/reference/experimental/typeclasses.md index add5853e10ba..0fc4fa043940 100644 --- a/docs/_docs/reference/experimental/typeclasses.md +++ b/docs/_docs/reference/experimental/typeclasses.md @@ -25,7 +25,7 @@ in source version `future` if the additional experimental language import `modul ``` It is intended to turn features described here into proposals under the Scala improvement process. A first installment is SIP 64, which covers some syntactic changes, names for context bounds, multiple context bounds and deferred givens. This SIP has been accepted for inclusion in the language and will be released in Scala 3.6. The remaining elements -that concern type classes are described in the following. There is also a separate [page on modularity improvements](../modularity.md) that describes proposed additions not directly related to type classes. +that concern type classes are described in the following. There is also a separate [page on modularity improvements](modularity.md) that describes proposed additions not directly related to type classes. ## Generalizing Context Bounds diff --git a/docs/_docs/reference/features-classification.md b/docs/_docs/reference/features-classification.md index 550130780b44..ce5318859ce0 100644 --- a/docs/_docs/reference/features-classification.md +++ b/docs/_docs/reference/features-classification.md @@ -40,7 +40,7 @@ These constructs replace existing constructs with the aim of making the language - [Extension methods](contextual/extension-methods.md) replace implicit classes with a clearer and simpler mechanism. - [Opaque type aliases](other-new-features/opaques.md) replace most uses of value classes while guaranteeing absence of boxing. - - [Top-level definitions](dropped-features/package-objects.md) replace package objects, dropping syntactic boilerplate. + - [Top-level definitions](other-new-features/toplevel-definitions.md) replace package objects, dropping syntactic boilerplate. - [Export clauses](other-new-features/export.md) provide a simple and general way to express aggregation, which can replace the previous facade pattern of package objects inheriting from classes. diff --git a/docs/_docs/reference/other-new-features/kind-polymorphism.md b/docs/_docs/reference/other-new-features/kind-polymorphism.md index e452ee8384f9..4bb1e659dfe9 100644 --- a/docs/_docs/reference/other-new-features/kind-polymorphism.md +++ b/docs/_docs/reference/other-new-features/kind-polymorphism.md @@ -43,5 +43,4 @@ It is declared `abstract` and `final`, so it can be neither instantiated nor ext `AnyKind` plays a special role in Scala's subtype system: It is a supertype of all other types no matter what their kind is. It is also assumed to be kind-compatible with all other types. Furthermore, `AnyKind` is treated as a higher-kinded type (so it cannot be used as a type of values), but at the same time it has no type parameters (so it cannot be instantiated). -**Note:** This feature is considered experimental but stable and it can be disabled under compiler flag -(i.e. `-Yno-kind-polymorphism`). +**Note:** This feature is now stable. The compiler flag `-Yno-kind-polymorphism` is deprecated as of 3.7.0, has no effect (is ignored), and will be removed in a future version. diff --git a/docs/_docs/reference/other-new-features/toplevel-definitions.md b/docs/_docs/reference/other-new-features/toplevel-definitions.md index b1793bd1941c..29da270729cb 100644 --- a/docs/_docs/reference/other-new-features/toplevel-definitions.md +++ b/docs/_docs/reference/other-new-features/toplevel-definitions.md @@ -1,7 +1,7 @@ --- layout: doc-page title: "Toplevel Definitions" -nightlyOf: https://docs.scala-lang.org/scala3/reference/dropped-features/toplevel-definitions.html +nightlyOf: https://docs.scala-lang.org/scala3/reference/other-new-features/toplevel-definitions.html --- All kind of definitions can now be written at the top-level. diff --git a/docs/_docs/reference/other-new-features/type-test.md b/docs/_docs/reference/other-new-features/type-test.md index ec7a87230753..fb2a2e584711 100644 --- a/docs/_docs/reference/other-new-features/type-test.md +++ b/docs/_docs/reference/other-new-features/type-test.md @@ -63,7 +63,7 @@ We could create a type test at call site where the type test can be performed wi val tt: TypeTest[Any, String] = new TypeTest[Any, String]: def unapply(s: Any): Option[s.type & String] = s match - case s: String => Some(s) + case q: (s.type & String) => Some(q) case _ => None f[AnyRef, String]("acb")(using tt) diff --git a/docs/_docs/reference/overview.md b/docs/_docs/reference/overview.md index bdb8aa74c1aa..35a3a58733f1 100644 --- a/docs/_docs/reference/overview.md +++ b/docs/_docs/reference/overview.md @@ -46,7 +46,7 @@ These constructs replace existing constructs with the aim of making the language replace implicit classes with a clearer and simpler mechanism. - [Opaque type aliases](other-new-features/opaques.md) replace most uses of value classes while guaranteeing the absence of boxing. -- [Top-level definitions](dropped-features/package-objects.md) +- [Top-level definitions](other-new-features/toplevel-definitions.md) replace package objects, dropping syntactic boilerplate. - [Export clauses](other-new-features/export.md) provide a simple and general way to express aggregation, which can replace diff --git a/docs/_docs/reference/changed-features/better-fors.md b/docs/_docs/reference/preview/better-fors.md similarity index 93% rename from docs/_docs/reference/changed-features/better-fors.md rename to docs/_docs/reference/preview/better-fors.md index 36355f0faa88..d5fd32da9a1e 100644 --- a/docs/_docs/reference/changed-features/better-fors.md +++ b/docs/_docs/reference/preview/better-fors.md @@ -1,10 +1,10 @@ --- layout: doc-page title: "Better fors" -nightlyOf: https://docs.scala-lang.org/scala3/reference/changed-features/better-fors.html +nightlyOf: https://docs.scala-lang.org/scala3/reference/preview/better-fors.html --- -Starting in Scala `3.7`, the usability of `for`-comprehensions is improved. +Starting in Scala `3.7` under `-preview` mode, the usability of `for`-comprehensions is improved. The biggest user facing change is the new ability to start `for`-comprehensions with aliases. This means that the following previously invalid code is now valid: diff --git a/docs/_docs/reference/preview/overview.md b/docs/_docs/reference/preview/overview.md new file mode 100644 index 000000000000..62fc499487a1 --- /dev/null +++ b/docs/_docs/reference/preview/overview.md @@ -0,0 +1,25 @@ +--- +layout: doc-page +title: "Preview" +nightlyOf: https://docs.scala-lang.org/scala3/reference/preview/overview.html +redirectFrom: overview.html +--- + +## Preview language features + +New Scala language features or standard library APIs are initially introduced as experimental, but once they become fully implemented and accepted by the [SIP](https://docs.scala-lang.org/sips/) these can become a preview features. + +Preview language features and APIs are guaranteed to be standardized in some next Scala minor release, but allow the compiler team to introduce small, possibly binary incompatible, changes based on the community feedback. +These can be used by early adopters who can accept the possibility of binary compatibility breakage. For instance, preview features could be used in some internal tool or application. On the other hand, preview features are discouraged in publicly available libraries. + +More information about preview featues can be found in [preview defintions guide](../other-new-features/preview-defs.md) + +### `-preview` compiler flag + +This flag enables the use of all preview language feature in the project. + + +## List of available preview features + +* [`better-fors`](./better-fors.md): Enables new for-comprehension behaviour under SIP-62 under `-source:3.7` or later + diff --git a/docs/_docs/reference/syntax.md b/docs/_docs/reference/syntax.md index 0f78ff03583e..ccba2ec9578a 100644 --- a/docs/_docs/reference/syntax.md +++ b/docs/_docs/reference/syntax.md @@ -363,8 +363,8 @@ DefParamClauses ::= DefParamClause { DefParamClause } -- and two DefTypeParam DefParamClause ::= DefTypeParamClause | DefTermParamClause | UsingParamClause -TypelessClauses ::= TypelessClause {TypelessClause} -TypelessClause ::= DefTermParamClause +ConstrParamClauses::= ConstrParamClause {ConstrParamClause} +ConstrParamClause ::= DefTermParamClause | UsingParamClause DefTermParamClause::= [nl] ‘(’ [DefTermParams] ‘)’ @@ -433,7 +433,7 @@ Def ::= ‘val’ PatDef PatDef ::= ids [‘:’ Type] [‘=’ Expr] | Pattern2 [‘:’ Type] [‘=’ Expr] PatDef(_, pats, tpe?, expr) DefDef ::= DefSig [‘:’ Type] [‘=’ Expr] DefDef(_, name, paramss, tpe, expr) - | ‘this’ TypelessClauses [DefImplicitClause] ‘=’ ConstrExpr DefDef(_, , vparamss, EmptyTree, expr | Block) + | ‘this’ ConstrParamClauses [DefImplicitClause] ‘=’ ConstrExpr DefDef(_, , vparamss, EmptyTree, expr | Block) DefSig ::= id [DefParamClauses] [DefImplicitClause] TypeDef ::= id [HkTypeParamClause] {FunParamClause}TypeBounds TypeDefTree(_, name, tparams, bound [‘=’ Type] diff --git a/docs/_spec/TODOreference/other-new-features/type-test.md b/docs/_spec/TODOreference/other-new-features/type-test.md index ec7a87230753..fb2a2e584711 100644 --- a/docs/_spec/TODOreference/other-new-features/type-test.md +++ b/docs/_spec/TODOreference/other-new-features/type-test.md @@ -63,7 +63,7 @@ We could create a type test at call site where the type test can be performed wi val tt: TypeTest[Any, String] = new TypeTest[Any, String]: def unapply(s: Any): Option[s.type & String] = s match - case s: String => Some(s) + case q: (s.type & String) => Some(q) case _ => None f[AnyRef, String]("acb")(using tt) diff --git a/docs/sidebar.yml b/docs/sidebar.yml index ca58e21587eb..f0ca5433d649 100644 --- a/docs/sidebar.yml +++ b/docs/sidebar.yml @@ -88,6 +88,7 @@ subsection: - page: reference/other-new-features/experimental-defs.md - page: reference/other-new-features/preview-defs.md - page: reference/other-new-features/binary-literals.md + - page: reference/other-new-features/toplevel-definitions.md - title: Other Changed Features directory: changed-features index: reference/changed-features/changed-features.md @@ -116,7 +117,6 @@ subsection: - page: reference/changed-features/lazy-vals-init.md - page: reference/changed-features/main-functions.md - page: reference/changed-features/interpolation-escapes.md - - page: reference/changed-features/better-fors.md - title: Dropped Features index: reference/dropped-features/dropped-features.md subsection: @@ -140,6 +140,11 @@ subsection: - page: reference/dropped-features/nonlocal-returns.md - page: reference/dropped-features/this-qualifier.md - page: reference/dropped-features/wildcard-init.md + - title: Preview Features + directory: preview + index: reference/preview/overview.md + subsection: + - page: reference/preview/better-fors.md - title: Experimental Features directory: experimental index: reference/experimental/overview.md @@ -156,8 +161,10 @@ subsection: - page: reference/experimental/numeric-literals.md - page: reference/experimental/explicit-nulls.md - page: reference/experimental/main-annotation.md - - page: reference/experimental/into-modifier.md + - page: reference/experimental/into.md - page: reference/experimental/cc.md + - page: reference/experimental/cc-advanced.md + hidden: true - page: reference/experimental/purefuns.md - page: reference/experimental/tupled-function.md - page: reference/experimental/modularity.md diff --git a/language-server/test/dotty/tools/languageserver/CompletionTest.scala b/language-server/test/dotty/tools/languageserver/CompletionTest.scala index 8f05d6ad11da..528aa1055c6f 100644 --- a/language-server/test/dotty/tools/languageserver/CompletionTest.scala +++ b/language-server/test/dotty/tools/languageserver/CompletionTest.scala @@ -32,7 +32,9 @@ class CompletionTest { @Test def completionFromScalaPackage: Unit = { code"class Foo { val foo: Conv${m1} }" - .completion(("Conversion", Class, "Conversion")) + .completion( + ("Conversion", Class, "Conversion"), + ("Conversion", Module, "Conversion")) } @Test def implicitSearchCrash: Unit = diff --git a/library-js/src/scala/scalajs/runtime/AnonFunctionXXL.scala b/library-js/src/scala/scalajs/runtime/AnonFunctionXXL.scala index 87208573eff9..aa08afdce323 100644 --- a/library-js/src/scala/scalajs/runtime/AnonFunctionXXL.scala +++ b/library-js/src/scala/scalajs/runtime/AnonFunctionXXL.scala @@ -1,8 +1,25 @@ package scala.scalajs.runtime -import scala.scalajs.js - -@inline -final class AnonFunctionXXL(f: js.Function1[IArray[Object], Object]) extends scala.runtime.FunctionXXL { - override def apply(xs: IArray[Object]): Object = f(xs) -} +/* Before Scala.js 1.19, this class was concrete. It had a 1-argument + * constructor taking a js.Function[Array[Object], Object], and its `apply()` + * method called that function. This was similar to the `AnonFunctionN` classes + * of the Scala.js library (shared between Scala 2 and 3). + * + * In Scala.js 1.19, we introduced `NewLambda` nodes, which superseded these + * specialized classes with a compilation mode that is more efficient on Wasm. + * However, libraries compiled with earlier versions still contain references + * to `AnonFunctionXXL`. + * + * The IR deserializer patches allocations of the form + * New(AnonFunctionXXL, ctor, closure :: Nil) + * into + * NewLambda(AnonFunctionXXL, ..., (xs: Array[Object]) => closure(xs)) + * + * When the `closure` is directly a JS `Closure` with the right signature + * (which is supposed to be always, as far as our codegens were concerned), + * it rewrites that as + * NewLambda(AnonFunctionXXL, ..., (closureParam: Array[Object]) => closureBody) + * which provides the best performance for old code. + */ +@deprecated("used by the codegen before Scala.js 1.19", since = "3.7.0") +sealed abstract class AnonFunctionXXL extends scala.runtime.FunctionXXL diff --git a/library/src/scala/CanThrow.scala b/library/src/scala/CanThrow.scala index 91c94229c43c..485dcecb37df 100644 --- a/library/src/scala/CanThrow.scala +++ b/library/src/scala/CanThrow.scala @@ -8,7 +8,7 @@ import annotation.{implicitNotFound, experimental, capability} */ @experimental @implicitNotFound("The capability to throw exception ${E} is missing.\nThe capability can be provided by one of the following:\n - Adding a using clause `(using CanThrow[${E}])` to the definition of the enclosing method\n - Adding `throws ${E}` clause after the result type of the enclosing method\n - Wrapping this piece of code with a `try` block that catches ${E}") -erased class CanThrow[-E <: Exception] extends caps.Capability +erased class CanThrow[-E <: Exception] extends caps.SharedCapability @experimental object unsafeExceptions: diff --git a/library/src/scala/Conversion.scala b/library/src/scala/Conversion.scala index f6267dc79fb2..cbae3448dab9 100644 --- a/library/src/scala/Conversion.scala +++ b/library/src/scala/Conversion.scala @@ -29,3 +29,20 @@ abstract class Conversion[-T, +U] extends Function1[T, U]: extension (x: T) /** `x.convert` converts a value `x` of type `T` to type `U` */ def convert = this(x) + +object Conversion: + import annotation.experimental + + /** An opaque type alias to declare "into" parameter types that allow implicit conversions + * on corresponding arguments. If the expected type of an expression t is into[T], implicit + * conversions are tried from the type of `t` to `T`. `into[T]` types are erased to `T` + * in all covariant positions of the types of parameter symbols. + */ + @experimental + opaque type into[+T] >: T = T + + /** Unwrap an `into` */ + extension [T](x: into[T]) + @experimental def underlying: T = x + +end Conversion \ No newline at end of file diff --git a/library/src/scala/annotation/internal/$into.scala b/library/src/scala/annotation/internal/$into.scala index 4d8788724e25..bad0e399d389 100644 --- a/library/src/scala/annotation/internal/$into.scala +++ b/library/src/scala/annotation/internal/$into.scala @@ -1,15 +1,12 @@ package scala.annotation.internal import annotation.experimental -/** An internal annotation on (part of) a parameter type that allows implicit conversions - * for its arguments. The publicly visible `into` annotation in the parent package - * `annotation` gets mapped to `$into` by the compiler in all places where - * conversions should be allowed. The reason for the split into two annotations - * is that `annotation.into` is given in source code and may propagate in unspecified - * ways through type inference. By contrast `$into` is constrained to occur only - * on parameters of method types. This makes implicit conversion insertion - * predictable and independent of the un-specified aspects of type inference. +/** An internal annotation on (part of) a parameter type that serves as a marker where + * the original type was of the form `into[T]`. These annotated types are mapped back + * to `into[T]` types when forming a method types from the parameter types. The idea is + * that `T @$into` is equivalent to `T`, whereas `into[T]` is only a known supertype of + * `T`. Hence, we don't need to use `.underlying` to go from an into type to its + * underlying type in the types of local parameters. */ @experimental -class $into() extends annotation.StaticAnnotation - +class $into extends annotation.StaticAnnotation \ No newline at end of file diff --git a/library/src/scala/annotation/internal/readOnlyCapability.scala b/library/src/scala/annotation/internal/readOnlyCapability.scala new file mode 100644 index 000000000000..8e939aea6bb9 --- /dev/null +++ b/library/src/scala/annotation/internal/readOnlyCapability.scala @@ -0,0 +1,7 @@ +package scala.annotation +package internal + +/** An annotation that marks a capture ref as a read-only capability. + * `x.rd` is encoded as `x.type @readOnlyCapability` + */ +class readOnlyCapability extends StaticAnnotation diff --git a/library/src/scala/annotation/into.scala b/library/src/scala/annotation/into.scala deleted file mode 100644 index 70a53ff9478d..000000000000 --- a/library/src/scala/annotation/into.scala +++ /dev/null @@ -1,10 +0,0 @@ -package scala.annotation -import annotation.experimental - -/** An annotation on (part of) a parameter type that allows implicit conversions - * for its arguments. The `into` modifier on parameter types in Scala 3 is - * mapped to this annotation. The annotation is intended to be used directly in - * Scala 2 sources only. For Scala 3, the `into` modifier should be preferred. - */ -@experimental -class into() extends annotation.StaticAnnotation diff --git a/library/src/scala/annotation/retains.scala b/library/src/scala/annotation/retains.scala index 909adc13a1c2..9c4af7f2336d 100644 --- a/library/src/scala/annotation/retains.scala +++ b/library/src/scala/annotation/retains.scala @@ -1,12 +1,12 @@ package scala.annotation -/** An annotation that indicates capture of a set of references under -Ycc. +/** An annotation that indicates capture of a set of references under capture checking. * * T @retains(x, y, z) * * is the internal representation used for the capturing type * - * {x, y, z} T + * T ^ {x, y, z} * * The annotation can also be written explicitly if one wants to avoid the * non-standard capturing type syntax. diff --git a/library/src/scala/caps.scala b/library/src/scala/caps.scala deleted file mode 100644 index c35b3b55e813..000000000000 --- a/library/src/scala/caps.scala +++ /dev/null @@ -1,69 +0,0 @@ -package scala - -import annotation.{experimental, compileTimeOnly, retainsCap} - -@experimental object caps: - - trait Capability extends Any - - /** The universal capture reference */ - val cap: Capability = new Capability() {} - - /** The universal capture reference (deprecated) */ - @deprecated("Use `cap` instead") - val `*`: Capability = cap - - @deprecated("Use `Capability` instead") - type Cap = Capability - - /** Carrier trait for capture set type parameters */ - trait CapSet extends Any - - /** A type constraint expressing that the capture set `C` needs to contain - * the capability `R` - */ - sealed trait Contains[+C >: CapSet <: CapSet @retainsCap, R <: Singleton] - - /** The only implementation of `Contains`. The constraint that `{R} <: C` is - * added separately by the capture checker. - */ - given containsImpl[C >: CapSet <: CapSet @retainsCap, R <: Singleton]: Contains[C, R]() - - /** A wrapper indicating a type variable in a capture argument list of a - * @retains annotation. E.g. `^{x, Y^}` is represented as `@retains(x, capsOf[Y])`. - */ - @compileTimeOnly("Should be be used only internally by the Scala compiler") - def capsOf[CS >: CapSet <: CapSet @retainsCap]: Any = ??? - - /** Reach capabilities x* which appear as terms in @retains annotations are encoded - * as `caps.reachCapability(x)`. When converted to CaptureRef types in capture sets - * they are represented as `x.type @annotation.internal.reachCapability`. - */ - extension (x: Any) def reachCapability: Any = x - - /** A trait to allow expressing existential types such as - * - * (x: Exists) => A ->{x} B - */ - sealed trait Exists extends Capability - - /** This should go into annotations. For now it is here, so that we - * can experiment with it quickly between minor releases - */ - final class untrackedCaptures extends annotation.StaticAnnotation - - /** This should go into annotations. For now it is here, so that we - * can experiment with it quickly between minor releases - */ - final class use extends annotation.StaticAnnotation - - object unsafe: - - extension [T](x: T) - /** A specific cast operation to remove a capture set. - * If argument is of type `T^C`, assume it is of type `T` instead. - * Calls to this method are treated specially by the capture checker. - */ - def unsafeAssumePure: T = x - - end unsafe diff --git a/library/src/scala/caps/package.scala b/library/src/scala/caps/package.scala new file mode 100644 index 000000000000..3016cb4ebe14 --- /dev/null +++ b/library/src/scala/caps/package.scala @@ -0,0 +1,156 @@ +package scala +package caps + +import annotation.{experimental, compileTimeOnly, retainsCap} + +/** + * Base trait for classes that represent capabilities in the + * [object-capability model](https://en.wikipedia.org/wiki/Object-capability_model). + * + * A capability is a value representing a permission, access right, resource or effect. + * Capabilities are typically passed to code as parameters; they should not be global objects. + * Often, they come with access restrictions such as scoped lifetimes or limited sharing. + * + * An example is the [[scala.util.boundary.Label Label]] class in [[scala.util.boundary]]. + * It represents a capability in the sense that it gives permission to [[scala.util.boundary.break break]] + * to the enclosing boundary represented by the `Label`. It has a scoped lifetime, since breaking to + * a `Label` after the associated `boundary` was exited gives a runtime exception. + * + * [[Capability]] has a formal meaning when + * [[scala.language.experimental.captureChecking Capture Checking]] + * is turned on. + * But even without capture checking, extending this trait can be useful for documenting the intended purpose + * of a class. + */ +@experimental +trait Capability extends Any + +/** The universal capture reference. */ +@experimental +object cap extends Capability + +/** Marker trait for classes with methods that requires an exclusive reference. */ +@experimental +trait Mutable extends Capability + +/** Marker trait for capabilities that can be safely shared in a concurrent context. + * During separation checking, shared capabilities are not taken into account. + */ +@experimental +trait SharedCapability extends Capability + +/** Carrier trait for capture set type parameters */ +@experimental +trait CapSet extends Any + +/** A type constraint expressing that the capture set `C` needs to contain + * the capability `R` + */ +@experimental +sealed trait Contains[+C >: CapSet <: CapSet @retainsCap, R <: Singleton] + +@experimental +object Contains: + /** The only implementation of `Contains`. The constraint that `{R} <: C` is + * added separately by the capture checker. + */ + @experimental + given containsImpl[C >: CapSet <: CapSet @retainsCap, R <: Singleton]: Contains[C, R]() + +/** An annotation on parameters `x` stating that the method's body makes + * use of the reach capability `x*`. Consequently, when calling the method + * we need to charge the deep capture set of the actual argiment to the + * environment. + * + * Note: This should go into annotations. For now it is here, so that we + * can experiment with it quickly between minor releases + */ +@experimental +final class use extends annotation.StaticAnnotation + +/** An annotations on parameters and update methods. + * On a parameter it states that any capabilties passed in the argument + * are no longer available afterwards, unless they are of class `SharableCapabilitty`. + * On an update method, it states that the `this` of the enclosing class is + * consumed, which means that any capabilities of the method prefix are + * no longer available afterwards. + */ +@experimental +final class consume extends annotation.StaticAnnotation + +/** A trait that used to allow expressing existential types. Replaced by +* root.Result instances. +*/ +@experimental +@deprecated +sealed trait Exists extends Capability + +@experimental +object internal: + + /** A wrapper indicating a type variable in a capture argument list of a + * @retains annotation. E.g. `^{x, Y^}` is represented as `@retains(x, capsOf[Y])`. + */ + @compileTimeOnly("Should be be used only internally by the Scala compiler") + def capsOf[CS >: CapSet <: CapSet @retainsCap]: Any = ??? + + /** Reach capabilities x* which appear as terms in @retains annotations are encoded + * as `caps.reachCapability(x)`. When converted to CaptureRef types in capture sets + * they are represented as `x.type @annotation.internal.reachCapability`. + */ + extension (x: Any) def reachCapability: Any = x + + /** Read-only capabilities x.rd which appear as terms in @retains annotations are encoded + * as `caps.readOnlyCapability(x)`. When converted to CaptureRef types in capture sets + * they are represented as `x.type @annotation.internal.readOnlyCapability`. + */ + extension (x: Any) def readOnlyCapability: Any = x + + /** An internal annotation placed on a refinement created by capture checking. + * Refinements with this annotation unconditionally override any + * info from the parent type, so no intersection needs to be formed. + * This could be useful for tracked parameters as well. + */ + final class refineOverride extends annotation.StaticAnnotation + + /** An annotation used internally for root capability wrappers of `cap` that + * represent either Fresh or Result capabilities. + * A capability is encoded as `caps.cap @rootCapability(...)` where + * `rootCapability(...)` is a special kind of annotation of type `root.Annot` + * that contains either a hidden set for Fresh instances or a method type binder + * for Result instances. + */ + final class rootCapability extends annotation.StaticAnnotation + + /** An annotation used internally to mark a function type that was + * converted to a dependent function type during setup of inferred types. + * Such function types should not map roots to result variables. + */ + final class inferredDepFun extends annotation.StaticAnnotation + +end internal + +@experimental +object unsafe: + /** + * Marks the constructor parameter as untracked. + * The capture set of this parameter will not be included in + * the capture set of the constructed object. + * + * @note This should go into annotations. For now it is here, so that we + * can experiment with it quickly between minor releases + */ + final class untrackedCaptures extends annotation.StaticAnnotation + + extension [T](x: T) + /** A specific cast operation to remove a capture set. + * If argument is of type `T^C`, assume it is of type `T` instead. + * Calls to this method are treated specially by the capture checker. + */ + def unsafeAssumePure: T = x + + /** A wrapper around code for which separation checks are suppressed. + */ + def unsafeAssumeSeparate(op: Any): op.type = op + +end unsafe diff --git a/library/src/scala/quoted/Quotes.scala b/library/src/scala/quoted/Quotes.scala index 8f7a069eaff5..009f1c28fbd9 100644 --- a/library/src/scala/quoted/Quotes.scala +++ b/library/src/scala/quoted/Quotes.scala @@ -3640,7 +3640,10 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching => /** Methods of the module object `val StringConstant` */ trait StringConstantModule { this: StringConstant.type => - /** Create a constant String value */ + /** Create a constant String value + * + * @throw `IllegalArgumentException` if the argument is `null` + */ def apply(x: String): StringConstant /** Match String value constant and extract its value */ def unapply(constant: StringConstant): Some[String] @@ -3815,7 +3818,8 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching => /** The class Symbol of a global class definition */ def classSymbol(fullName: String): Symbol - /** Generates a new class symbol for a class with a parameterless constructor. + /** Generates a new class symbol for a class with a public parameterless constructor. + * For more settings, look to the other newClass methods. * * Example usage: * ``` @@ -3843,7 +3847,7 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching => * } * ``` * - * @param parent The owner of the class + * @param owner The owner of the class * @param name The name of the class * @param parents The parent classes of the class. The first parent must not be a trait. * @param decls The member declarations of the class provided the symbol of this class @@ -3856,8 +3860,181 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching => * @note As a macro can only splice code into the point at which it is expanded, all generated symbols must be * direct or indirect children of the reflection context's owner. */ - // TODO: add flags and privateWithin - @experimental def newClass(parent: Symbol, name: String, parents: List[TypeRepr], decls: Symbol => List[Symbol], selfType: Option[TypeRepr]): Symbol + @experimental def newClass(owner: Symbol, name: String, parents: List[TypeRepr], decls: Symbol => List[Symbol], selfType: Option[TypeRepr]): Symbol + + /** Generates a new class symbol for a class with a public single term clause constructor. + * + * Example usage: + * ``` + * val name = "myClass" + * def decls(cls: Symbol): List[Symbol] = + * List(Symbol.newMethod(cls, "foo", MethodType(Nil)(_ => Nil, _ => TypeRepr.of[Unit]))) + * val parents = List(TypeTree.of[Object]) + * val cls = Symbol.newClass( + * Symbol.spliceOwner, + * name, + * parents = _ => parents.map(_.tpe), + * decls, + * selfType = None, + * clsFlags = Flags.EmptyFlags, + * Symbol.noSymbol, + * List(("idx", TypeRepr.of[Int]), ("str", TypeRepr.of[String])) + * ) + * + * val fooSym = cls.declaredMethod("foo").head + * val idxSym = cls.fieldMember("idx") + * val strSym = cls.fieldMember("str") + * val fooDef = DefDef(fooSym, argss => + * Some('{println(s"Foo method call with (${${Ref(idxSym).asExpr}}, ${${Ref(strSym).asExpr}})")}.asTerm) + * ) + * val clsDef = ClassDef(cls, parents, body = List(fooDef)) + * val newCls = Apply(Select(New(TypeIdent(cls)), cls.primaryConstructor), List('{0}.asTerm, '{string}.asTerm)) + * + * Block(List(clsDef), Apply(Select(newCls, cls.methodMember("foo")(0)), Nil)).asExprOf[Unit] + * ``` + * construct the equivalent to + * ``` + * '{ + * class myClass(idx: Int, str: String) extends Object { + * def foo() = + * println(s"Foo method call with $idx, $str") + * } + * new myClass(0, "string").foo() + * } + * ``` + * @param owner The owner of the class + * @param name The name of the class + * @param parents Function returning the parent classes of the class. The first parent must not be a trait. + * Takes the constructed class symbol as an argument. Calling `cls.typeRef.asType` as part of this function will lead to cyclic reference errors. + * @param clsFlags extra flags with which the class symbol should be constructed. + * @param clsPrivateWithin the symbol within which this new class symbol should be private. May be noSymbol. + * @param conParams constructor parameter pairs of names and types. + * + * Parameters assigned by the constructor can be obtained via `classSymbol.memberField`. + * This symbol starts without an accompanying definition. + * It is the meta-programmer's responsibility to provide exactly one corresponding definition by passing + * this symbol to the ClassDef constructor. + * + * @note As a macro can only splice code into the point at which it is expanded, all generated symbols must be + * direct or indirect children of the reflection context's owner. + */ + @experimental def newClass( + owner: Symbol, + name: String, + parents: Symbol => List[TypeRepr], + decls: Symbol => List[Symbol], + selfType: Option[TypeRepr], + clsFlags: Flags, + clsPrivateWithin: Symbol, + conParams: List[(String, TypeRepr)] + ): Symbol + + /** Generates a new class symbol with a constructor of the shape signified by a passed PolyOrMethod parameter. + * + * Example usage: + * ``` + * val name = "myClass" + * def decls(cls: Symbol): List[Symbol] = + * List(Symbol.newMethod(cls, "getParam", MethodType(Nil)(_ => Nil, _ => cls.typeMember("T").typeRef))) + * val conMethodType = + * (classType: TypeRepr) => PolyType(List("T"))(_ => List(TypeBounds.empty), polyType => + * MethodType(List("param"))((_: MethodType) => List(polyType.param(0)), (_: MethodType) => + * AppliedType(classType, List(polyType.param(0))) + * ) + * ) + * val cls = Symbol.newClass( + * Symbol.spliceOwner, + * name, + * parents = _ => List(TypeRepr.of[Object]), + * decls, + * selfType = None, + * clsFlags = Flags.EmptyFlags, + * clsPrivateWithin = Symbol.noSymbol, + * clsAnnotations = Nil, + * conMethodType, + * conFlags = Flags.EmptyFlags, + * conPrivateWithin = Symbol.noSymbol, + * conParamFlags = List(List(Flags.EmptyFlags), List(Flags.EmptyFlags)), + * conParamPrivateWithins = List(List(Symbol.noSymbol), List(Symbol.noSymbol)) + * ) + * + * val getParamSym = cls.declaredMethod("getParam").head + * def getParamRhs(): Option[Term] = + * val paramValue = This(cls).select(cls.fieldMember("param")).asExpr + * Some('{ println("Calling getParam"); $paramValue }.asTerm) + * val getParamDef = DefDef(getParamSym, _ => getParamRhs()) + * + * val clsDef = ClassDef(cls, List(TypeTree.of[Object]), body = List(getParamDef)) + * val newCls = + * Apply( + * Select( + * Apply( + * TypeApply(Select(New(TypeIdent(cls)), cls.primaryConstructor), List(TypeTree.of[String])), + * List(Expr("test").asTerm) + * ), + * cls.methodMember("getParam").head + * ), + * Nil + * ) + * + * Block(List(clsDef), newCls).asExpr + * ``` + * constructs the equivalent to + * ``` + * '{ + * class myClass[T](val param: T) extends Object { + * def getParam: T = + * println("Calling getParam") + * param + * } + * new myClass[String]("test").getParam() + * } + * ``` + * + * @param owner The owner of the class + * @param name The name of the class + * @param parents Function returning the parent classes of the class. The first parent must not be a trait + * Takes the constructed class symbol as an argument. Calling `cls.typeRef.asType` as part of this function will lead to cyclic reference errors. + * @param decls The member declarations of the class provided the symbol of this class + * @param selfType The self type of the class if it has one + * @param clsFlags extra flags with which the class symbol should be constructed. Can be `Private` | `Protected` | `PrivateLocal` | `Local` | `Final` | `Trait` | `Abstract` | `Open` + * @param clsPrivateWithin the symbol within which this new class symbol should be private. May be noSymbol + * @param clsAnnotations annotations of the class + * @param conMethodType Function returning MethodOrPoly type representing the type of the constructor. + * Takes the result type as parameter which must be returned from the innermost MethodOrPoly and have type parameters applied if those are used. + * PolyType may only represent the first clause of the constructor. + * @param conFlags extra flags with which the constructor symbol should be constructed. Can be `Synthetic` | `Method` | `Private` | `Protected` | `PrivateLocal` | `Local` + * @param conPrivateWithin the symbol within which the constructor for this new class symbol should be private. May be noSymbol. + * @param conParamFlags extra flags with which the constructor parameter symbols should be constructed. Must match the shape of `conMethodType`. + * For type parameters those can be `Param` | `Deferred` | `Private` | `PrivateLocal` | `Local`. + * For term parameters those can be `ParamAccessor` | `Private` | `Protected` | `PrivateLocal` | `Local` + * @param conParamPrivateWithins the symbols within which the constructor parameters should be private. Must match the shape of `conMethodType`. Can consist of noSymbol. + * + * Term and type parameters assigned by the constructor can be obtained via `classSymbol.memberField`/`classSymbol.memberType`. + * This symbol starts without an accompanying definition. + * It is the meta-programmer's responsibility to provide exactly one corresponding definition by passing + * this symbol to the ClassDef constructor. + * + * @note As a macro can only splice code into the point at which it is expanded, all generated symbols must be + * direct or indirect children of the reflection context's owner. + */ + // Keep doc aligned with QuotesImpl's validFlags: `clsFlags` with `validClassFlags`, `conFlags` with `validClassConstructorFlags`, + // conParamFlags with `validClassTypeParamFlags` and `validClassTermParamFlags` + @experimental def newClass( + owner: Symbol, + name: String, + parents: Symbol => List[TypeRepr], + decls: Symbol => List[Symbol], + selfType: Option[TypeRepr], + clsFlags: Flags, + clsPrivateWithin: Symbol, + clsAnnotations: List[Term], + conMethodType: TypeRepr => MethodOrPoly, + conFlags: Flags, + conPrivateWithin: Symbol, + conParamFlags: List[List[Flags]], + conParamPrivateWithins: List[List[Symbol]] + ): Symbol /** Generates a new module symbol with an associated module class symbol, * this is equivalent to an `object` declaration in source code. @@ -3874,7 +4051,7 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching => * def decls(cls: Symbol): List[Symbol] = * List(Symbol.newMethod(cls, "run", MethodType(Nil)(_ => Nil, _ => TypeRepr.of[Unit]), Flags.EmptyFlags, Symbol.noSymbol)) * - * val mod = Symbol.newModule(Symbol.spliceOwner, moduleName, Flags.EmptyFlags, Flags.EmptyFlags, parents.map(_.tpe), decls, Symbol.noSymbol) + * val mod = Symbol.newModule(Symbol.spliceOwner, moduleName, Flags.EmptyFlags, Flags.EmptyFlags, _ => parents.map(_.tpe), decls, Symbol.noSymbol) * val cls = mod.moduleClass * val runSym = cls.declaredMethod("run").head * @@ -3902,7 +4079,7 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching => * @param name The name of the class * @param modFlags extra flags with which the module symbol should be constructed * @param clsFlags extra flags with which the module class symbol should be constructed - * @param parents The parent classes of the class. The first parent must not be a trait. + * @param parents A function that takes the symbol of the module class as input and returns the parent classes of the class. The first parent must not be a trait. * @param decls A function that takes the symbol of the module class as input and return the symbols of its declared members * @param privateWithin the symbol within which this new method symbol should be private. May be noSymbol. * @@ -3915,7 +4092,7 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching => * * @syntax markdown */ - @experimental def newModule(owner: Symbol, name: String, modFlags: Flags, clsFlags: Flags, parents: List[TypeRepr], decls: Symbol => List[Symbol], privateWithin: Symbol): Symbol + @experimental def newModule(owner: Symbol, name: String, modFlags: Flags, clsFlags: Flags, parents: Symbol => List[TypeRepr], decls: Symbol => List[Symbol], privateWithin: Symbol): Symbol /** Generates a new method symbol with the given parent, name and type. * diff --git a/library/src/scala/runtime/LazyVals.scala b/library/src/scala/runtime/LazyVals.scala index 9959f99f6e17..deea2201414a 100644 --- a/library/src/scala/runtime/LazyVals.scala +++ b/library/src/scala/runtime/LazyVals.scala @@ -26,7 +26,7 @@ object LazyVals { } private val base: Int = { - val processors = java.lang.Runtime.getRuntime.nn.availableProcessors() + val processors = java.lang.Runtime.getRuntime.availableProcessors() 8 * processors * processors } diff --git a/library/src/scala/runtime/coverage/Invoker.scala b/library/src/scala/runtime/coverage/Invoker.scala index b3216ec37c67..98f71076b2f6 100644 --- a/library/src/scala/runtime/coverage/Invoker.scala +++ b/library/src/scala/runtime/coverage/Invoker.scala @@ -51,6 +51,6 @@ object Invoker { @nowarn("cat=deprecation") def measurementFile(dataDir: String): File = new File( dataDir, - MeasurementsPrefix + runtimeUUID + "." + Thread.currentThread.nn.getId + MeasurementsPrefix + runtimeUUID + "." + Thread.currentThread.getId ) } diff --git a/library/src/scala/runtime/stdLibPatches/Predef.scala b/library/src/scala/runtime/stdLibPatches/Predef.scala index 996f68d4e122..bfe554b3e735 100644 --- a/library/src/scala/runtime/stdLibPatches/Predef.scala +++ b/library/src/scala/runtime/stdLibPatches/Predef.scala @@ -56,12 +56,12 @@ object Predef: /** Enables an expression of type `T|Null`, where `T` is a subtype of `AnyRef`, to be checked for `null` * using `eq` rather than only `==`. This is needed because `Null` no longer has * `eq` or `ne` methods, only `==` and `!=` inherited from `Any`. */ - inline def eq(inline y: AnyRef | Null): Boolean = + inline infix def eq(inline y: AnyRef | Null): Boolean = x.asInstanceOf[AnyRef] eq y.asInstanceOf[AnyRef] /** Enables an expression of type `T|Null`, where `T` is a subtype of `AnyRef`, to be checked for `null` * using `ne` rather than only `!=`. This is needed because `Null` no longer has * `eq` or `ne` methods, only `==` and `!=` inherited from `Any`. */ - inline def ne(inline y: AnyRef | Null): Boolean = + inline infix def ne(inline y: AnyRef | Null): Boolean = !(x eq y) extension (opt: Option.type) diff --git a/library/src/scala/runtime/stdLibPatches/language.scala b/library/src/scala/runtime/stdLibPatches/language.scala index 556df0e2759a..0f5e904e29bb 100644 --- a/library/src/scala/runtime/stdLibPatches/language.scala +++ b/library/src/scala/runtime/stdLibPatches/language.scala @@ -140,7 +140,7 @@ object language: * @see [[https://github.com/scala/improvement-proposals/pull/79]] */ @compileTimeOnly("`betterFors` can only be used at compile time in import statements") - @deprecated("The `experimental.betterFors` language import is no longer needed since the feature is now standard", since = "3.7") + @deprecated("The `experimental.betterFors` language import no longer has any effect, the feature is being stablised and can be enabled using `-preview` flag", since = "3.7") object betterFors /** Experimental support for package object values @@ -222,6 +222,12 @@ object language: @compileTimeOnly("`future-migration` can only be used at compile time in import statements") object `future-migration` + /** Set source version to 2.13. Effectively, this doesn't change the source language, + * but rather adapts the generated code as if it was compiled with Scala 2.13 + */ + @compileTimeOnly("`2.13` can only be used at compile time in import statements") + private[scala] object `2.13` + /** Set source version to 3.0-migration. * * @see [[https://docs.scala-lang.org/scala3/guides/migration/compatibility-intro.html]] diff --git a/library/src/scala/util/FromDigits.scala b/library/src/scala/util/FromDigits.scala index cb73782829ff..44532649868d 100644 --- a/library/src/scala/util/FromDigits.scala +++ b/library/src/scala/util/FromDigits.scala @@ -135,7 +135,7 @@ object FromDigits { case ex: NumberFormatException => throw MalformedNumber() } if (x.isInfinite) throw NumberTooLarge() - if (x == 0.0f && !zeroFloat.pattern.matcher(digits).nn.matches) throw NumberTooSmall() + if (x == 0.0f && !zeroFloat.pattern.matcher(digits).matches) throw NumberTooSmall() x } diff --git a/presentation-compiler-testcases/src/tests/macros/metals7460.scala b/presentation-compiler-testcases/src/tests/macros/metals7460.scala new file mode 100644 index 000000000000..0b9b1ca494b2 --- /dev/null +++ b/presentation-compiler-testcases/src/tests/macros/metals7460.scala @@ -0,0 +1,20 @@ +package tests.macros + +import scala.quoted.* + +object Macros7460 { + + transparent inline def foo: String = + ${ fooImpl } + + private def fooImpl(using Quotes): Expr[String] = + Expr("foo...") + + transparent inline def bar: String = + ${ barImpl } + + private def barImpl(using Quotes): Expr[String] = + quotes.reflect.Position.ofMacroExpansion.sourceFile.getJPath.get // this line is the culprit + Expr("bar...") + +} diff --git a/presentation-compiler/src/main/dotty/tools/pc/ApplyArgsExtractor.scala b/presentation-compiler/src/main/dotty/tools/pc/ApplyArgsExtractor.scala new file mode 100644 index 000000000000..9384a0b43e8b --- /dev/null +++ b/presentation-compiler/src/main/dotty/tools/pc/ApplyArgsExtractor.scala @@ -0,0 +1,269 @@ +package dotty.tools.pc + +import scala.util.Try + +import dotty.tools.dotc.ast.Trees.ValDef +import dotty.tools.dotc.ast.tpd.* +import dotty.tools.dotc.core.Contexts.Context +import dotty.tools.dotc.core.Flags +import dotty.tools.dotc.core.Flags.Method +import dotty.tools.dotc.core.Names.Name +import dotty.tools.dotc.core.StdNames.* +import dotty.tools.dotc.core.SymDenotations.NoDenotation +import dotty.tools.dotc.core.Symbols.defn +import dotty.tools.dotc.core.Symbols.NoSymbol +import dotty.tools.dotc.core.Symbols.Symbol +import dotty.tools.dotc.core.Types.* +import dotty.tools.pc.IndexedContext +import dotty.tools.pc.utils.InteractiveEnrichments.* +import scala.annotation.tailrec +import dotty.tools.dotc.core.Denotations.SingleDenotation +import dotty.tools.dotc.core.Denotations.MultiDenotation +import dotty.tools.dotc.util.Spans.Span + +object ApplyExtractor: + def unapply(path: List[Tree])(using Context): Option[Apply] = + path match + case ValDef(_, _, _) :: Block(_, app: Apply) :: _ + if !app.fun.isInfix => Some(app) + case rest => + def getApplyForContextFunctionParam(path: List[Tree]): Option[Apply] = + path match + // fun(arg@@) + case (app: Apply) :: _ => Some(app) + // fun(arg@@), where fun(argn: Context ?=> SomeType) + // recursively matched for multiple context arguments, e.g. Context1 ?=> Context2 ?=> SomeType + case (_: DefDef) :: Block(List(_), _: Closure) :: rest => + getApplyForContextFunctionParam(rest) + case _ => None + for + app <- getApplyForContextFunctionParam(rest) + if !app.fun.isInfix + yield app + end match + + +object ApplyArgsExtractor: + def getArgsAndParams( + optIndexedContext: Option[IndexedContext], + apply: Apply, + span: Span + )(using Context): List[(List[Tree], List[ParamSymbol])] = + def collectArgss(a: Apply): List[List[Tree]] = + def stripContextFuntionArgument(argument: Tree): List[Tree] = + argument match + case Block(List(d: DefDef), _: Closure) => + d.rhs match + case app: Apply => + app.args + case b @ Block(List(_: DefDef), _: Closure) => + stripContextFuntionArgument(b) + case _ => Nil + case v => List(v) + + val args = a.args.flatMap(stripContextFuntionArgument) + a.fun match + case app: Apply => collectArgss(app) :+ args + case _ => List(args) + end collectArgss + + val method = apply.fun + + val argss = collectArgss(apply) + + def fallbackFindApply(sym: Symbol) = + sym.info.member(nme.apply) match + case NoDenotation => Nil + case den => List(den.symbol) + + // fallback for when multiple overloaded methods match the supplied args + def fallbackFindMatchingMethods() = + def matchingMethodsSymbols( + indexedContext: IndexedContext, + method: Tree + ): List[Symbol] = + method match + case Ident(name) => indexedContext.findSymbol(name).getOrElse(Nil) + case Select(This(_), name) => indexedContext.findSymbol(name).getOrElse(Nil) + case sel @ Select(from, name) => + val symbol = from.symbol + val ownerSymbol = + if symbol.is(Method) && symbol.owner.isClass then + Some(symbol.owner) + else Try(symbol.info.classSymbol).toOption + ownerSymbol.map(sym => sym.info.member(name)).collect{ + case single: SingleDenotation => List(single.symbol) + case multi: MultiDenotation => multi.allSymbols + }.getOrElse(Nil) + case Apply(fun, _) => matchingMethodsSymbols(indexedContext, fun) + case _ => Nil + val matchingMethods = + for + indexedContext <- optIndexedContext.toList + potentialMatch <- matchingMethodsSymbols(indexedContext, method) + if potentialMatch.is(Flags.Method) && + potentialMatch.vparamss.length >= argss.length && + Try(potentialMatch.isAccessibleFrom(apply.symbol.info)).toOption + .getOrElse(false) && + potentialMatch.vparamss + .zip(argss) + .reverse + .zipWithIndex + .forall { case (pair, index) => + FuzzyArgMatcher(potentialMatch.tparams) + .doMatch(allArgsProvided = index != 0, span) + .tupled(pair) + } + yield potentialMatch + matchingMethods + end fallbackFindMatchingMethods + + val matchingMethods: List[Symbol] = + if method.symbol.paramSymss.nonEmpty then + val allArgsAreSupplied = + val vparamss = method.symbol.vparamss + vparamss.length == argss.length && vparamss + .zip(argss) + .lastOption + .exists { case (baseParams, baseArgs) => + baseArgs.length == baseParams.length + } + // ``` + // m(arg : Int) + // m(arg : Int, anotherArg : Int) + // m(a@@) + // ``` + // complier will choose the first `m`, so we need to manually look for the other one + if allArgsAreSupplied then + val foundPotential = fallbackFindMatchingMethods() + if foundPotential.contains(method.symbol) then foundPotential + else method.symbol :: foundPotential + else List(method.symbol) + else if method.symbol.is(Method) || method.symbol == NoSymbol then + fallbackFindMatchingMethods() + else fallbackFindApply(method.symbol) + end if + end matchingMethods + + matchingMethods.map { methodSym => + val vparamss = methodSym.vparamss + + // get params and args we are interested in + // e.g. + // in the following case, the interesting args and params are + // - params: [apple, banana] + // - args: [apple, b] + // ``` + // def curry(x: Int)(apple: String, banana: String) = ??? + // curry(1)(apple = "test", b@@) + // ``` + val (baseParams0, baseArgs) = + vparamss.zip(argss).lastOption.getOrElse((Nil, Nil)) + + val baseParams: List[ParamSymbol] = + def defaultBaseParams = baseParams0.map(JustSymbol(_)) + @tailrec + def getRefinedParams(refinedType: Type, level: Int): List[ParamSymbol] = + if level > 0 then + val resultTypeOpt = + refinedType match + case RefinedType(AppliedType(_, args), _, _) => args.lastOption + case AppliedType(_, args) => args.lastOption + case _ => None + resultTypeOpt match + case Some(resultType) => getRefinedParams(resultType, level - 1) + case _ => defaultBaseParams + else + refinedType match + case RefinedType(AppliedType(_, args), _, MethodType(ri)) => + baseParams0.zip(ri).zip(args).map { case ((sym, name), arg) => + RefinedSymbol(sym, name, arg) + } + case _ => defaultBaseParams + // finds param refinements for lambda expressions + // val hello: (x: Int, y: Int) => Unit = (x, _) => println(x) + @tailrec + def refineParams(method: Tree, level: Int): List[ParamSymbol] = + method match + case Select(Apply(f, _), _) => refineParams(f, level + 1) + case Select(h, name) => + // for Select(foo, name = apply) we want `foo.symbol` + if name == nme.apply then getRefinedParams(h.symbol.info, level) + else getRefinedParams(method.symbol.info, level) + case Apply(f, _) => + refineParams(f, level + 1) + case _ => getRefinedParams(method.symbol.info, level) + refineParams(method, 0) + end baseParams + (baseArgs, baseParams) + } + + extension (method: Symbol) + def vparamss(using Context) = method.filteredParamss(_.isTerm) + def tparams(using Context) = method.filteredParamss(_.isType).flatten + def filteredParamss(f: Symbol => Boolean)(using Context) = + method.paramSymss.filter(params => params.forall(f)) +sealed trait ParamSymbol: + def name: Name + def info: Type + def symbol: Symbol + def nameBackticked(using Context) = name.decoded.backticked + +case class JustSymbol(symbol: Symbol)(using Context) extends ParamSymbol: + def name: Name = symbol.name + def info: Type = symbol.info + +case class RefinedSymbol(symbol: Symbol, name: Name, info: Type) + extends ParamSymbol + + +class FuzzyArgMatcher(tparams: List[Symbol])(using Context): + + /** + * A heuristic for checking if the passed arguments match the method's arguments' types. + * For non-polymorphic methods we use the subtype relation (`<:<`) + * and for polymorphic methods we use a heuristic. + * We check the args types not the result type. + */ + def doMatch( + allArgsProvided: Boolean, + span: Span + )(expectedArgs: List[Symbol], actualArgs: List[Tree]) = + (expectedArgs.length == actualArgs.length || + (!allArgsProvided && expectedArgs.length >= actualArgs.length)) && + actualArgs.zipWithIndex.forall { + case (arg: Ident, _) if arg.span.contains(span) => true + case (NamedArg(name, arg), _) => + expectedArgs.exists { expected => + expected.name == name && (!arg.hasType || arg.typeOpt.unfold + .fuzzyArg_<:<(expected.info)) + } + case (arg, i) => + !arg.hasType || arg.typeOpt.unfold.fuzzyArg_<:<(expectedArgs(i).info) + } + + extension (arg: Type) + def fuzzyArg_<:<(expected: Type) = + if tparams.isEmpty then arg <:< expected + else arg <:< substituteTypeParams(expected) + def unfold = + arg match + case arg: TermRef => arg.underlying + case e => e + + private def substituteTypeParams(t: Type): Type = + t match + case e if tparams.exists(_ == e.typeSymbol) => + val matchingParam = tparams.find(_ == e.typeSymbol).get + matchingParam.info match + case b @ TypeBounds(_, _) => WildcardType(b) + case _ => WildcardType + case o @ OrType(e1, e2) => + OrType(substituteTypeParams(e1), substituteTypeParams(e2), o.isSoft) + case AndType(e1, e2) => + AndType(substituteTypeParams(e1), substituteTypeParams(e2)) + case AppliedType(et, eparams) => + AppliedType(et, eparams.map(substituteTypeParams)) + case _ => t + +end FuzzyArgMatcher diff --git a/presentation-compiler/src/main/dotty/tools/pc/AutoImports.scala b/presentation-compiler/src/main/dotty/tools/pc/AutoImports.scala index 1b44dce8c642..7b30c745e3ed 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/AutoImports.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/AutoImports.scala @@ -40,7 +40,7 @@ object AutoImports: case class Select(qual: SymbolIdent, name: String) extends SymbolIdent: def value: String = s"${qual.value}.$name" - def direct(name: String): SymbolIdent = Direct(name) + def direct(name: String)(using Context): SymbolIdent = Direct(name) def fullIdent(symbol: Symbol)(using Context): SymbolIdent = val symbols = symbol.ownersIterator.toList @@ -70,7 +70,7 @@ object AutoImports: importSel: Option[ImportSel] ): - def name: String = ident.value + def name(using Context): String = ident.value object SymbolImport: @@ -189,10 +189,13 @@ object AutoImports: ownerImport.importSel, ) else - ( - SymbolIdent.direct(symbol.nameBackticked), - Some(ImportSel.Direct(symbol)), - ) + renames(symbol) match + case Some(rename) => (SymbolIdent.direct(rename), None) + case None => + ( + SymbolIdent.direct(symbol.nameBackticked), + Some(ImportSel.Direct(symbol)), + ) end val SymbolImport( @@ -223,9 +226,13 @@ object AutoImports: importSel ) case None => + val reverse = symbol.ownersIterator.toList.reverse + val fullName = reverse.drop(1).foldLeft(SymbolIdent.direct(reverse.head.nameBackticked)){ + case (acc, sym) => SymbolIdent.Select(acc, sym.nameBackticked(false)) + } SymbolImport( symbol, - SymbolIdent.direct(symbol.fullNameBackticked), + SymbolIdent.Direct(symbol.fullNameBackticked), None ) end match @@ -252,7 +259,6 @@ object AutoImports: val topPadding = if importPosition.padTop then "\n" else "" - val formatted = imports .map { case ImportSel.Direct(sym) => importName(sym) @@ -267,15 +273,16 @@ object AutoImports: end renderImports private def importName(sym: Symbol): String = - if indexedContext.importContext.toplevelClashes(sym) then + if indexedContext.toplevelClashes(sym, inImportScope = true) then s"_root_.${sym.fullNameBackticked(false)}" else sym.ownersIterator.zipWithIndex.foldLeft((List.empty[String], false)) { case ((acc, isDone), (sym, idx)) => if(isDone || sym.isEmptyPackage || sym.isRoot) (acc, true) else indexedContext.rename(sym) match - case Some(renamed) => (renamed :: acc, true) - case None if !sym.isPackageObject => (sym.nameBackticked(false) :: acc, false) - case None => (acc, false) + // we can't import first part + case Some(renamed) if idx != 0 => (renamed :: acc, true) + case _ if !sym.isPackageObject => (sym.nameBackticked(false) :: acc, false) + case _ => (acc, false) }._1.mkString(".") end AutoImportsGenerator diff --git a/presentation-compiler/src/main/dotty/tools/pc/AutoImportsProvider.scala b/presentation-compiler/src/main/dotty/tools/pc/AutoImportsProvider.scala index e35556ad11c9..97ec396abcf1 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/AutoImportsProvider.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/AutoImportsProvider.scala @@ -4,12 +4,13 @@ import java.nio.file.Paths import scala.collection.mutable import scala.jdk.CollectionConverters.* -import scala.meta.internal.metals.ReportContext +import scala.meta.pc.reports.ReportContext import scala.meta.internal.pc.AutoImportsResultImpl import scala.meta.pc.* import dotty.tools.dotc.ast.tpd.* import dotty.tools.dotc.core.Symbols.* +import dotty.tools.dotc.core.StdNames.* import dotty.tools.dotc.interactive.Interactive import dotty.tools.dotc.interactive.InteractiveDriver import dotty.tools.dotc.util.SourceFile @@ -17,6 +18,7 @@ import dotty.tools.pc.completions.CompletionPos import dotty.tools.pc.utils.InteractiveEnrichments.* import org.eclipse.lsp4j as l +import dotty.tools.dotc.core.Flags.Method final class AutoImportsProvider( search: SymbolSearch, @@ -42,11 +44,22 @@ final class AutoImportsProvider( val path = Interactive.pathTo(newctx.compilationUnit.tpdTree, pos.span)(using newctx) - val indexedContext = IndexedContext( - Interactive.contextOfPath(path)(using newctx) + val indexedContext = IndexedContext(pos)( + using Interactive.contextOfPath(path)(using newctx) ) import indexedContext.ctx + + def correctInTreeContext(sym: Symbol) = path match + case (_: Ident) :: (sel: Select) :: _ => + sym.info.allMembers.exists(_.name == sel.name) + case (_: Ident) :: (_: Apply) :: _ if !sym.is(Method) => + def applyInObject = + sym.companionModule.info.allMembers.exists(_.name == nme.apply) + def applyInClass = sym.info.allMembers.exists(_.name == nme.apply) + applyInClass || applyInObject + case _ => true + val isSeen = mutable.Set.empty[String] val symbols = List.newBuilder[Symbol] def visit(sym: Symbol): Boolean = @@ -83,20 +96,31 @@ final class AutoImportsProvider( text, tree, unit.comments, - indexedContext.importContext, + indexedContext, config ) (sym: Symbol) => generator.forSymbol(sym) end match end mkEdit - for + val all = for sym <- results edits <- mkEdit(sym) - yield AutoImportsResultImpl( + yield (AutoImportsResultImpl( sym.owner.showFullName, edits.asJava - ) + ), sym) + + all match + case (onlyResult, _) :: Nil => List(onlyResult) + case Nil => Nil + case moreResults => + val moreExact = moreResults.filter { case (_, sym) => + correctInTreeContext(sym) + } + if moreExact.nonEmpty then moreExact.map(_._1) + else moreResults.map(_._1) + else List.empty end if end autoImports diff --git a/presentation-compiler/src/main/dotty/tools/pc/CompilerSearchVisitor.scala b/presentation-compiler/src/main/dotty/tools/pc/CompilerSearchVisitor.scala index 9fb84ee1f513..f2e17415138a 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/CompilerSearchVisitor.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/CompilerSearchVisitor.scala @@ -4,7 +4,7 @@ import java.util.logging.Level import java.util.logging.Logger import scala.meta.internal.metals.Report -import scala.meta.internal.metals.ReportContext +import scala.meta.pc.reports.ReportContext import scala.meta.pc.* import scala.util.control.NonFatal @@ -35,7 +35,7 @@ class CompilerSearchVisitor( false case NonFatal(e) => reports.incognito.create( - Report( + () => Report( "is_public", s"""Symbol: $sym""".stripMargin, e @@ -105,6 +105,6 @@ class CompilerSearchVisitor( override def isCancelled: Boolean = false private def normalizePackage(pkg: String): String = - pkg.replace("/", ".").nn.stripSuffix(".") + pkg.replace("/", ".").stripSuffix(".") end CompilerSearchVisitor diff --git a/presentation-compiler/src/main/dotty/tools/pc/CompletionItemResolver.scala b/presentation-compiler/src/main/dotty/tools/pc/CompletionItemResolver.scala index ffd7377c8181..f7fdb1c36e6d 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/CompletionItemResolver.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/CompletionItemResolver.scala @@ -62,7 +62,7 @@ object CompletionItemResolver extends ItemResolver: if companion == NoSymbol || gsym.is(JavaDefined) then if gsymDoc.isEmpty() then if gsym.isAliasType then - fullDocstring(gsym.info.deepDealias.typeSymbol, search) + fullDocstring(gsym.info.deepDealiasAndSimplify.typeSymbol, search) else if gsym.is(Method) then gsym.info.finalResultType match case tr @ TermRef(_, sym) => diff --git a/presentation-compiler/src/main/dotty/tools/pc/ExtractMethodProvider.scala b/presentation-compiler/src/main/dotty/tools/pc/ExtractMethodProvider.scala index c72a0602f1ce..bd44878aa11a 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/ExtractMethodProvider.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/ExtractMethodProvider.scala @@ -2,7 +2,7 @@ package dotty.tools.pc import java.nio.file.Paths -import scala.meta.internal.metals.ReportContext +import scala.meta.pc.reports.ReportContext import scala.meta.internal.pc.ExtractMethodUtils import scala.meta.pc.OffsetParams import scala.meta.pc.RangeParams @@ -51,7 +51,7 @@ final class ExtractMethodProvider( given locatedCtx: Context = val newctx = driver.currentCtx.fresh.setCompilationUnit(unit) Interactive.contextOfPath(path)(using newctx) - val indexedCtx = IndexedContext(locatedCtx) + val indexedCtx = IndexedContext(pos)(using locatedCtx) val printer = ShortenedTypePrinter(search, IncludeDefaultParam.Never)(using indexedCtx) def prettyPrint(tpe: Type) = diff --git a/presentation-compiler/src/main/dotty/tools/pc/HoverProvider.scala b/presentation-compiler/src/main/dotty/tools/pc/HoverProvider.scala index 3b2f4d2aa9b0..c55a8a0210be 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/HoverProvider.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/HoverProvider.scala @@ -3,7 +3,7 @@ package dotty.tools.pc import java.util as ju import scala.meta.internal.metals.Report -import scala.meta.internal.metals.ReportContext +import scala.meta.pc.reports.ReportContext import scala.meta.internal.pc.ScalaHover import scala.meta.pc.ContentType import scala.meta.pc.HoverSignature @@ -49,7 +49,7 @@ object HoverProvider: val path = unit .map(unit => Interactive.pathTo(unit.tpdTree, pos.span)) .getOrElse(Interactive.pathTo(driver.openedTrees(uri), pos)) - val indexedContext = IndexedContext(ctx) + val indexedContext = IndexedContext(pos)(using ctx) def typeFromPath(path: List[Tree]) = if path.isEmpty then NoType else path.head.typeOpt @@ -88,7 +88,7 @@ object HoverProvider: s"$uri::$posId" ) end report - reportContext.unsanitized.create(report, ifVerbose = true) + reportContext.unsanitized.create(() => report, /*ifVerbose =*/ true) ju.Optional.empty().nn else val skipCheckOnName = @@ -96,7 +96,7 @@ object HoverProvider: val printerCtx = Interactive.contextOfPath(path) val printer = ShortenedTypePrinter(search, IncludeDefaultParam.Include)( - using IndexedContext(printerCtx) + using IndexedContext(pos)(using printerCtx) ) MetalsInteractive.enclosingSymbolsWithExpressionType( enclosing, @@ -110,22 +110,23 @@ object HoverProvider: if symbol.name == nme.selectDynamic || symbol.name == nme.applyDynamic => fallbackToDynamics(path, printer, contentType) case symbolTpes @ ((symbol, tpe, None) :: _) => - val exprTpw = tpe.widenTermRefExpr.deepDealias + val exprTpw = tpe.widenTermRefExpr.deepDealiasAndSimplify val hoverString = tpw match // https://github.com/scala/scala3/issues/8891 case tpw: ImportType => printer.hoverSymbol(symbol, symbol.paramRef) case _ => - val (tpe, sym) = + val (innerTpe, sym) = if symbol.isType then (symbol.typeRef, symbol) else enclosing.head.seenFrom(symbol) val finalTpe = - if tpe != NoType then tpe + if tpe.isNamedTupleType then tpe.widenTermRefExpr + else if innerTpe != NoType then innerTpe else tpw - printer.hoverSymbol(sym, finalTpe.deepDealias) + printer.hoverSymbol(sym, finalTpe.deepDealiasAndSimplify) end match end hoverString @@ -134,7 +135,7 @@ object HoverProvider: .map(_.docstring()) .mkString("\n") - val expresionTypeOpt = + val expresionTypeOpt = if symbol.name == StdNames.nme.??? then InferExpectedType(search, driver, params).infer() else printer.expressionType(exprTpw) @@ -161,7 +162,7 @@ object HoverProvider: ju.Optional.empty().nn end match case (_, tpe, Some(namedTupleArg)) :: _ => - val exprTpw = tpe.widenTermRefExpr.deepDealias + val exprTpw = tpe.widenTermRefExpr.deepDealiasAndSimplify printer.expressionType(exprTpw) match case Some(tpe) => ju.Optional.of( @@ -194,7 +195,7 @@ object HoverProvider: val resultType = rest match case Select(_, asInstanceOf) :: TypeApply(_, List(tpe)) :: _ if asInstanceOf == nme.asInstanceOfPM => - tpe.tpe.widenTermRefExpr.deepDealias + tpe.tpe.widenTermRefExpr.deepDealiasAndSimplify case _ if n == nme.selectDynamic => tpe.resultType case _ => tpe @@ -220,9 +221,9 @@ object HoverProvider: findRefinement(parent) case _ => None - val refTpe = sel.typeOpt.widen.deepDealias match + val refTpe = sel.typeOpt.widen.deepDealiasAndSimplify match case r: RefinedType => Some(r) - case t: (TermRef | TypeProxy) => Some(t.termSymbol.info.deepDealias) + case t: (TermRef | TypeProxy) => Some(t.termSymbol.info.deepDealiasAndSimplify) case _ => None refTpe.flatMap(findRefinement).asJava diff --git a/presentation-compiler/src/main/dotty/tools/pc/IndexedContext.scala b/presentation-compiler/src/main/dotty/tools/pc/IndexedContext.scala index 7c2c34cf5ebb..cf22ac12a879 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/IndexedContext.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/IndexedContext.scala @@ -4,64 +4,45 @@ import scala.annotation.tailrec import scala.util.control.NonFatal import dotty.tools.dotc.core.Contexts.* +import dotty.tools.dotc.core.Denotations.PreDenotation +import dotty.tools.dotc.core.Denotations.SingleDenotation import dotty.tools.dotc.core.Flags.* -import dotty.tools.dotc.core.NameOps.moduleClassName +import dotty.tools.dotc.core.NameOps.* import dotty.tools.dotc.core.Names.* import dotty.tools.dotc.core.Scopes.EmptyScope import dotty.tools.dotc.core.Symbols.* import dotty.tools.dotc.core.Types.* +import dotty.tools.dotc.interactive.Completion import dotty.tools.dotc.interactive.Interactive import dotty.tools.dotc.typer.ImportInfo +import dotty.tools.dotc.util.SourcePosition import dotty.tools.pc.IndexedContext.Result import dotty.tools.pc.utils.InteractiveEnrichments.* sealed trait IndexedContext: given ctx: Context def scopeSymbols: List[Symbol] - def names: IndexedContext.Names def rename(sym: Symbol): Option[String] - def outer: IndexedContext - - def findSymbol(name: String): Option[List[Symbol]] - - final def findSymbol(name: Name): Option[List[Symbol]] = - findSymbol(name.decoded) - - final def lookupSym(sym: Symbol): Result = - findSymbol(sym.decodedName) match - case Some(symbols) if symbols.exists(_ == sym) => - Result.InScope - case Some(symbols) - if symbols.exists(s => isNotConflictingWithDefault(s, sym) || isTypeAliasOf(s, sym) || isTermAliasOf(s, sym)) => - Result.InScope - // when all the conflicting symbols came from an old version of the file + def findSymbol(name: Name, fromPrefix: Option[Type] = None): Option[List[Symbol]] + def findSymbolInLocalScope(name: String): Option[List[Symbol]] + + final def lookupSym(sym: Symbol, fromPrefix: Option[Type] = None): Result = + def all(symbol: Symbol): Set[Symbol] = Set(symbol, symbol.companionModule, symbol.companionClass, symbol.companion).filter(_ != NoSymbol) + val isRelated = all(sym) ++ all(sym.dealiasType) + findSymbol(sym.name, fromPrefix) match + case Some(symbols) if symbols.exists(isRelated) => Result.InScope + case Some(symbols) if symbols.exists(isTermAliasOf(_, sym)) => Result.InScope + case Some(symbols) if symbols.map(_.dealiasType).exists(isRelated) => Result.InScope case Some(symbols) if symbols.nonEmpty && symbols.forall(_.isStale) => Result.Missing case Some(symbols) if symbols.exists(rename(_).isEmpty) => Result.Conflict + case Some(symbols) => Result.InScope case _ => Result.Missing end lookupSym - /** - * Scala by default imports following packages: - * https://scala-lang.org/files/archive/spec/3.4/02-identifiers-names-and-scopes.html - * import java.lang.* - * { - * import scala.* - * { - * import Predef.* - * { /* source */ } - * } - * } - * - * This check is necessary for proper scope resolution, because when we compare symbols from - * index including the underlying type like scala.collection.immutable.List it actually - * is in current scope in form of type forwarder imported from Predef. - */ - private def isNotConflictingWithDefault(sym: Symbol, queriedSym: Symbol): Boolean = - sym.info.widenDealias =:= queriedSym.info.widenDealias && (Interactive.isImportedByDefault(sym)) - final def hasRename(sym: Symbol, as: String): Boolean = rename(sym) match - case Some(v) => v == as + case Some(v) => + v == as case None => false // detects import scope aliases like @@ -74,73 +55,94 @@ sealed trait IndexedContext: case _ => false ) - private def isTypeAliasOf(alias: Symbol, queriedSym: Symbol): Boolean = - alias.isAliasType && alias.info.deepDealias.typeSymbol == queriedSym - - final def isEmpty: Boolean = this match - case IndexedContext.Empty => true - case _ => false - - final def importContext: IndexedContext = - this match - case IndexedContext.Empty => this - case _ if ctx.owner.is(Package) => this - case _ => outer.importContext - @tailrec - final def toplevelClashes(sym: Symbol): Boolean = + final def toplevelClashes(sym: Symbol, inImportScope: Boolean): Boolean = if sym == NoSymbol || sym.owner == NoSymbol || sym.owner.isRoot then - lookupSym(sym) match - case IndexedContext.Result.Conflict => true + val possibleConflictingSymbols = findSymbolInLocalScope(sym.name.show) + // if it's import scope we only care about toplevel conflicts, not any clashes inside objects etc. + val symbolClashes = if inImportScope then + // It's toplevel if it's parent is a package + possibleConflictingSymbols.filter(_.exists(_.owner.is(Package))) + else + possibleConflictingSymbols + symbolClashes match + case Some(symbols) if !symbols.contains(sym) => true case _ => false - else toplevelClashes(sym.owner) + else toplevelClashes(sym.owner, inImportScope) end IndexedContext object IndexedContext: - def apply(ctx: Context): IndexedContext = + def apply(pos: SourcePosition)(using Context): IndexedContext = ctx match case NoContext => Empty - case _ => LazyWrapper(using ctx) + case _ => LazyWrapper(pos)(using ctx) case object Empty extends IndexedContext: given ctx: Context = NoContext - def findSymbol(name: String): Option[List[Symbol]] = None + def findSymbol(name: Name, fromPrefix: Option[Type]): Option[List[Symbol]] = None + def findSymbolInLocalScope(name: String): Option[List[Symbol]] = None def scopeSymbols: List[Symbol] = List.empty - val names: Names = Names(Map.empty, Map.empty) def rename(sym: Symbol): Option[String] = None - def outer: IndexedContext = this - - class LazyWrapper(using val ctx: Context) extends IndexedContext: - val outer: IndexedContext = IndexedContext(ctx.outer) - val names: Names = extractNames(ctx) - def findSymbol(name: String): Option[List[Symbol]] = - names.symbols - .get(name) - .map(_.toList) - .orElse(outer.findSymbol(name)) + class LazyWrapper(pos: SourcePosition)(using val ctx: Context) extends IndexedContext: + + val completionContext = Completion.scopeContext(pos) + val names: Map[String, Seq[SingleDenotation]] = completionContext.names.toList.groupBy(_._1.show).map{ + case (name, denotations) => + val denots = denotations.flatMap(_._2) + val nonRoot = denots.filter(!_.symbol.owner.isRoot) + val (importedByDefault, conflictingValue) = denots.partition(denot => Interactive.isImportedByDefault(denot.symbol)) + if importedByDefault.nonEmpty && conflictingValue.nonEmpty then + name.trim -> conflictingValue + else + name.trim -> nonRoot + } + val renames = completionContext.renames + + def defaultScopes(name: Name): Option[List[Symbol]] = + List(defn.ScalaPredefModuleClass, defn.ScalaPackageClass, defn.JavaLangPackageClass) + .map(_.membersNamed(name)) + .collect { case denot if denot.exists => denot.first.symbol } + .toList match + case Nil => None + case list => Some(list) + + override def findSymbolInLocalScope(name: String): Option[List[Symbol]] = + names.get(name).map(_.map(_.symbol).toList).filter(_.nonEmpty) + def findSymbol(name: Name, fromPrefix: Option[Type]): Option[List[Symbol]] = + names + .get(name.show) + .map { denots => + def skipThisType(tp: Type): Type = tp match + case ThisType(prefix) => skipThisType(prefix) + case _ => tp + + val filteredDenots = fromPrefix match + case Some(prefix) => + val target = skipThisType(prefix) + denots.filter { denot => + denot.prefix == NoPrefix || + (denot.prefix match + case tref: TermRef => + tref.termSymbol.info <:< target + case otherPrefix => + otherPrefix <:< target + ) + } + case None => denots + + filteredDenots.map(_.symbol).toList + } + .orElse(defaultScopes(name)).filter(_.nonEmpty) def scopeSymbols: List[Symbol] = - val acc = Set.newBuilder[Symbol] - (this :: outers).foreach { ref => - acc ++= ref.names.symbols.values.flatten - } - acc.result.toList + names.values.flatten.map(_.symbol).toList def rename(sym: Symbol): Option[String] = - names.renames - .get(sym) - .orElse(outer.rename(sym)) - - private def outers: List[IndexedContext] = - val builder = List.newBuilder[IndexedContext] - var curr = outer - while !curr.isEmpty do - builder += curr - curr = curr.outer - builder.result + renames.get(sym).orElse(renames.get(sym.companion)).map(_.decoded) + end LazyWrapper enum Result: @@ -149,97 +151,5 @@ object IndexedContext: case InScope | Conflict => true case Missing => false - case class Names( - symbols: Map[String, List[Symbol]], - renames: Map[Symbol, String] - ) - - private def extractNames(ctx: Context): Names = - def isAccessibleFromSafe(sym: Symbol, site: Type): Boolean = - try sym.isAccessibleFrom(site, superAccess = false) - catch - case NonFatal(e) => - false - - def accessibleSymbols(site: Type, tpe: Type)(using - Context - ): List[Symbol] = - tpe.decls.toList.filter(sym => isAccessibleFromSafe(sym, site)) - - def accesibleMembers(site: Type)(using Context): List[Symbol] = - site.allMembers - .filter(denot => - try isAccessibleFromSafe(denot.symbol, site) - catch - case NonFatal(e) => - false - ) - .map(_.symbol) - .toList - - def allAccessibleSymbols( - tpe: Type, - filter: Symbol => Boolean = _ => true - )(using Context): List[Symbol] = - val initial = accessibleSymbols(tpe, tpe).filter(filter) - val fromPackageObjects = - initial - .filter(_.isPackageObject) - .flatMap(sym => accessibleSymbols(tpe, sym.thisType)) - initial ++ fromPackageObjects - - def fromImport(site: Type, name: Name)(using Context): List[Symbol] = - List( - site.member(name.toTypeName), - site.member(name.toTermName), - site.member(name.moduleClassName), - ) - .flatMap(_.alternatives) - .map(_.symbol) - - def fromImportInfo( - imp: ImportInfo - )(using Context): List[(Symbol, Option[TermName])] = - val excludedNames = imp.excluded.map(_.decoded) - - if imp.isWildcardImport then - allAccessibleSymbols( - imp.site, - sym => !excludedNames.contains(sym.name.decoded) - ).map((_, None)) - else - imp.forwardMapping.toList.flatMap { (name, rename) => - val isRename = name != rename - if !isRename && !excludedNames.contains(name.decoded) then - fromImport(imp.site, name).map((_, None)) - else if isRename then - fromImport(imp.site, name).map((_, Some(rename))) - else Nil - } - end if - end fromImportInfo - - given Context = ctx - val (symbols, renames) = - if ctx.isImportContext then - val (syms, renames) = - fromImportInfo(ctx.importInfo.nn) - .map((sym, rename) => (sym, rename.map(r => sym -> r.decoded))) - .unzip - (syms, renames.flatten.toMap) - else if ctx.owner.isClass then - val site = ctx.owner.thisType - (accesibleMembers(site), Map.empty) - else if ctx.scope != EmptyScope then (ctx.scope.toList, Map.empty) - else (List.empty, Map.empty) - - val initial = Map.empty[String, List[Symbol]] - val values = - symbols.foldLeft(initial) { (acc, sym) => - val name = sym.decodedName - val syms = acc.getOrElse(name, List.empty) - acc.updated(name, sym :: syms) - } - Names(values, renames) - end extractNames + end IndexedContext diff --git a/presentation-compiler/src/main/dotty/tools/pc/InferExpectedType.scala b/presentation-compiler/src/main/dotty/tools/pc/InferExpectedType.scala index 3d65f69621e1..2e6c7b39ba65 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/InferExpectedType.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/InferExpectedType.scala @@ -1,15 +1,11 @@ package dotty.tools.pc -import dotty.tools.dotc.ast.tpd import dotty.tools.dotc.ast.tpd.* -import dotty.tools.dotc.core.Constants.Constant import dotty.tools.dotc.core.Contexts.Context import dotty.tools.dotc.core.Flags import dotty.tools.dotc.core.StdNames -import dotty.tools.dotc.core.Symbols import dotty.tools.dotc.core.Symbols.defn import dotty.tools.dotc.core.Types.* -import dotty.tools.dotc.core.Types.Type import dotty.tools.dotc.interactive.Interactive import dotty.tools.dotc.interactive.InteractiveDriver import dotty.tools.dotc.typer.Applications.UnapplyArgs @@ -21,7 +17,7 @@ import dotty.tools.pc.printer.ShortenedTypePrinter import dotty.tools.pc.printer.ShortenedTypePrinter.IncludeDefaultParam import dotty.tools.pc.utils.InteractiveEnrichments.* -import scala.meta.internal.metals.ReportContext +import scala.meta.pc.reports.ReportContext import scala.meta.pc.OffsetParams import scala.meta.pc.SymbolSearch @@ -51,7 +47,7 @@ class InferExpectedType( ) val locatedCtx = Interactive.contextOfPath(tpdPath)(using newctx) - val indexedCtx = IndexedContext(locatedCtx) + val indexedCtx = IndexedContext(pos)(using locatedCtx) val printer = ShortenedTypePrinter(search, IncludeDefaultParam.ResolveLater)(using indexedCtx) InterCompletionType.inferType(path)(using newctx).map{ @@ -76,7 +72,7 @@ object InterCompletionType: case Try(block, _, _) :: rest if block.span.contains(span) => inferType(rest, span) case CaseDef(_, _, body) :: Try(_, cases, _) :: rest if body.span.contains(span) && cases.exists(_.span.contains(span)) => inferType(rest, span) case If(cond, _, _) :: rest if !cond.span.contains(span) => inferType(rest, span) - case If(cond, _, _) :: rest if cond.span.contains(span) => Some(Symbols.defn.BooleanType) + case If(cond, _, _) :: rest if cond.span.contains(span) => Some(defn.BooleanType) case CaseDef(_, _, body) :: Match(_, cases) :: rest if body.span.contains(span) && cases.exists(_.span.contains(span)) => inferType(rest, span) case NamedArg(_, arg) :: rest if arg.span.contains(span) => inferType(rest, span) @@ -97,39 +93,38 @@ object InterCompletionType: if ind < 0 then None else Some(UnapplyArgs(fun.tpe.finalResultType, fun, pats, NoSourcePosition).argTypes(ind)) // f(@@) - case (app: Apply) :: rest => - val param = - for { - ind <- app.args.zipWithIndex.collectFirst { - case (arg, id) if arg.span.contains(span) => id - } - params <- app.symbol.paramSymss.find(!_.exists(_.isTypeParam)) - param <- params.get(ind) - } yield param.info - param match - // def f[T](a: T): T = ??? - // f[Int](@@) - // val _: Int = f(@@) - case Some(t : TypeRef) if t.symbol.is(Flags.TypeParam) => - for { - (typeParams, args) <- - app match - case Apply(TypeApply(fun, args), _) => - val typeParams = fun.symbol.paramSymss.headOption.filter(_.forall(_.isTypeParam)) - typeParams.map((_, args.map(_.tpe))) - // val f: (j: "a") => Int - // f(@@) - case Apply(Select(v, StdNames.nme.apply), _) => - v.symbol.info match - case AppliedType(des, args) => - Some((des.typeSymbol.typeParams, args)) - case _ => None - case _ => None - ind = typeParams.indexOf(t.symbol) - tpe <- args.get(ind) - if !tpe.isErroneous - } yield tpe - case Some(tpe) => Some(tpe) - case _ => None + case ApplyExtractor(app) => + val argsAndParams = ApplyArgsExtractor.getArgsAndParams(None, app, span).headOption + argsAndParams.flatMap: + case (args, params) => + val idx = args.indexWhere(_.span.contains(span)) + val param = + if idx >= 0 && params.length > idx then Some(params(idx).info) + else None + param match + // def f[T](a: T): T = ??? + // f[Int](@@) + // val _: Int = f(@@) + case Some(t : TypeRef) if t.symbol.is(Flags.TypeParam) => + for + (typeParams, args) <- + app match + case Apply(TypeApply(fun, args), _) => + val typeParams = fun.symbol.paramSymss.headOption.filter(_.forall(_.isTypeParam)) + typeParams.map((_, args.map(_.tpe))) + // val f: (j: "a") => Int + // f(@@) + case Apply(Select(v, StdNames.nme.apply), _) => + v.symbol.info match + case AppliedType(des, args) => + Some((des.typeSymbol.typeParams, args)) + case _ => None + case _ => None + ind = typeParams.indexOf(t.symbol) + tpe <- args.get(ind) + if !tpe.isErroneous + yield tpe + case Some(tpe) => Some(tpe) + case _ => None case _ => None diff --git a/presentation-compiler/src/main/dotty/tools/pc/InferredTypeProvider.scala b/presentation-compiler/src/main/dotty/tools/pc/InferredTypeProvider.scala index a0d726d5f382..d019368c7ed6 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/InferredTypeProvider.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/InferredTypeProvider.scala @@ -3,7 +3,7 @@ package dotty.tools.pc import java.nio.file.Paths import scala.annotation.tailrec -import scala.meta.internal.metals.ReportContext +import scala.meta.pc.reports.ReportContext import scala.meta.pc.OffsetParams import scala.meta.pc.PresentationCompilerConfig import scala.meta.pc.SymbolSearch @@ -75,7 +75,7 @@ final class InferredTypeProvider( Interactive.pathTo(driver.openedTrees(uri), pos)(using driver.currentCtx) given locatedCtx: Context = driver.localContext(params) - val indexedCtx = IndexedContext(locatedCtx) + val indexedCtx = IndexedContext(pos)(using locatedCtx) val autoImportsGen = AutoImports.generator( pos, sourceText, @@ -94,14 +94,14 @@ final class InferredTypeProvider( tpe match case tref: TypeRef => indexedCtx.lookupSym( - tref.currentSymbol + tref.currentSymbol, + Some(tref.prefix) ) == IndexedContext.Result.InScope case AppliedType(tycon, args) => isInScope(tycon) && args.forall(isInScope) case _ => true - if isInScope(tpe) - then tpe - else tpe.deepDealias + if isInScope(tpe) then tpe + else tpe.deepDealiasAndSimplify val printer = ShortenedTypePrinter( symbolSearch, @@ -137,7 +137,6 @@ final class InferredTypeProvider( findNamePos(sourceText, vl, keywordOffset).endPos.toLsp adjustOpt.foreach(adjust => endPos.setEnd(adjust.adjustedEndPos)) val spaceBefore = name.isOperatorName - new TextEdit( endPos, printTypeAscription(optDealias(tpt.typeOpt), spaceBefore) + { diff --git a/presentation-compiler/src/main/dotty/tools/pc/MetalsInteractive.scala b/presentation-compiler/src/main/dotty/tools/pc/MetalsInteractive.scala index ef583ea2a225..4e89c687a7b8 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/MetalsInteractive.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/MetalsInteractive.scala @@ -100,9 +100,9 @@ object MetalsInteractive: pos: SourcePosition, indexed: IndexedContext, skipCheckOnName: Boolean = false - ): List[Symbol] = + )(using Context): List[Symbol] = enclosingSymbolsWithExpressionType(path, pos, indexed, skipCheckOnName) - .map(_._1) + .map(_._1.sourceSymbol) /** * Returns the list of tuple enclosing symbol and @@ -135,7 +135,7 @@ object MetalsInteractive: (sym, sym.info, None) ) - case (imp: Import) :: _ => + case (imp: ImportOrExport) :: _ => importedSymbols(imp, _.span.contains(pos.span)).map(sym => (sym, sym.info, None) ) @@ -206,7 +206,7 @@ object MetalsInteractive: // Handle select on named tuples case (Apply(Apply(TypeApply(fun, List(t1, t2)), List(ddef)), List(Literal(Constant(i: Int))))) :: _ if fun.symbol.exists && fun.symbol.name == nme.apply && - fun.symbol.owner.exists && fun.symbol.owner == getModuleIfDefined("scala.NamedTuple").moduleClass => + fun.symbol.owner.exists && fun.symbol.owner == defn.NamedTupleModule.moduleClass => def getIndex(t: Tree): Option[Type] = t.tpe.dealias match case AppliedType(_, args) => args.get(i) @@ -217,17 +217,19 @@ object MetalsInteractive: val tpe = getIndex(t2).getOrElse(NoType) List((ddef.symbol, tpe, Some(name))) + case head :: (sel @ Select(_, name)) :: _ + if head.sourcePos.encloses(sel.sourcePos) && (name == StdNames.nme.apply || name == StdNames.nme.unapply) => + val optObjectSymbol = List(head.symbol).filter(sym => !(sym.is(Synthetic) && sym.is(Module))) + val classSymbol = head.symbol.companionClass + val optApplySymbol = List(sel.symbol).filter(sym => !sym.is(Synthetic)) + val symbols = optObjectSymbol ++ (classSymbol :: optApplySymbol) + symbols.collect: + case sym if sym.exists => (sym, sym.info, None) + case path @ head :: tail => if head.symbol.is(Exported) then val sym = head.symbol.sourceSymbol List((sym, sym.info, None)) - else if head.symbol.is(Synthetic) then - enclosingSymbolsWithExpressionType( - tail, - pos, - indexed, - skipCheckOnName - ) else if head.symbol != NoSymbol then if skipCheckOnName || MetalsInteractive.isOnName( @@ -236,6 +238,13 @@ object MetalsInteractive: indexed.ctx.source ) then List((head.symbol, head.typeOpt, None)) + else if head.symbol.is(Synthetic) then + enclosingSymbolsWithExpressionType( + tail, + pos, + indexed, + skipCheckOnName + ) /* Type tree for List(1) has an Int type variable, which has span * but doesn't exist in code. * https://github.com/scala/scala3/issues/15937 diff --git a/presentation-compiler/src/main/dotty/tools/pc/PcCollector.scala b/presentation-compiler/src/main/dotty/tools/pc/PcCollector.scala index 1ebfd405768e..52a6299c79fe 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/PcCollector.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/PcCollector.scala @@ -63,7 +63,8 @@ trait PcCollector[T]: o.span.exists && o.span.point == named.symbol.owner.span.point ) - def soughtOrOverride(sym: Symbol) = + def soughtOrOverride(sym0: Symbol) = + val sym = if sym0.is(Flags.Exported) then sym0.sourceSymbol else sym0 sought(sym) || sym.allOverriddenSymbols.exists(sought(_)) def soughtTreeFilter(tree: Tree): Boolean = @@ -76,7 +77,7 @@ trait PcCollector[T]: case df: NamedDefTree if soughtOrOverride(df.symbol) && !df.symbol.isSetter => true - case imp: Import if owners(imp.expr.symbol) => true + case imp: ImportOrExport if owners(imp.expr.symbol) => true case _ => false def soughtFilter(f: Symbol => Boolean): Boolean = @@ -115,11 +116,13 @@ trait PcCollector[T]: */ case ident: Ident if ident.isCorrectSpan && filter(ident) => // symbols will differ for params in different ext methods, but source pos will be the same - if soughtFilter(_.sourcePos == ident.symbol.sourcePos) + val symbol = if ident.symbol.is(Flags.Exported) then ident.symbol.sourceSymbol else ident.symbol + if soughtFilter(_.sourcePos == symbol.sourcePos) then occurrences + collect( ident, - ident.sourcePos + ident.sourcePos, + Some(symbol) ) else occurrences /** @@ -160,7 +163,7 @@ trait PcCollector[T]: def collectEndMarker = EndMarker.getPosition(df, pos, sourceText).map: collect(EndMarker(df.symbol), _) - val annots = collectTrees(df.mods.annotations) + val annots = collectTrees(df.symbol.annotations.map(_.tree)) val traverser = new PcCollector.DeepFolderWithParent[Set[T]]( collectNamesWithParent @@ -215,8 +218,8 @@ trait PcCollector[T]: * @<>("") * def params() = ??? */ - case mdf: MemberDef if mdf.mods.annotations.nonEmpty => - val trees = collectTrees(mdf.mods.annotations) + case mdf: MemberDef if mdf.symbol.annotations.nonEmpty => + val trees = collectTrees(mdf.symbol.annotations.map(_.tree)) val traverser = new PcCollector.DeepFolderWithParent[Set[T]]( collectNamesWithParent @@ -228,7 +231,7 @@ trait PcCollector[T]: * For traversing import selectors: * import scala.util.<> */ - case imp: Import if filter(imp) => + case imp: ImportOrExport if filter(imp) => imp.selectors .collect { case sel: ImportSelector @@ -315,7 +318,7 @@ object EndMarker: def getPosition(df: NamedDefTree, pos: SourcePosition, sourceText: String)( implicit ct: Context ): Option[SourcePosition] = - val name = df.name.toString() + val name = df.name.toString().stripSuffix("$") val endMarkerLine = sourceText.slice(df.span.start, df.span.end).split('\n').last val index = endMarkerLine.length() - name.length() diff --git a/presentation-compiler/src/main/dotty/tools/pc/PcConvertToNamedLambdaParameters.scala b/presentation-compiler/src/main/dotty/tools/pc/PcConvertToNamedLambdaParameters.scala new file mode 100644 index 000000000000..2ca50107c36b --- /dev/null +++ b/presentation-compiler/src/main/dotty/tools/pc/PcConvertToNamedLambdaParameters.scala @@ -0,0 +1,153 @@ +package dotty.tools.pc + +import java.nio.file.Paths +import java.util as ju + +import scala.jdk.CollectionConverters.* +import scala.meta.pc.OffsetParams + +import dotty.tools.dotc.ast.tpd +import dotty.tools.dotc.core.Contexts.Context +import dotty.tools.dotc.core.Flags +import dotty.tools.dotc.interactive.Interactive +import dotty.tools.dotc.interactive.InteractiveDriver +import dotty.tools.dotc.util.SourceFile +import dotty.tools.dotc.util.SourcePosition +import org.eclipse.lsp4j as l +import dotty.tools.pc.utils.InteractiveEnrichments.* +import dotty.tools.pc.utils.TermNameInference.* + +/** + * Facilitates the code action that converts a wildcard lambda to a lambda with named parameters + * e.g. + * + * List(1, 2).map(<<_>> + 1) => List(1, 2).map(i => i + 1) + */ +final class PcConvertToNamedLambdaParameters( + driver: InteractiveDriver, + params: OffsetParams +): + import PcConvertToNamedLambdaParameters._ + + def convertToNamedLambdaParameters: ju.List[l.TextEdit] = { + val uri = params.uri + val filePath = Paths.get(uri) + driver.run( + uri, + SourceFile.virtual(filePath.toString, params.text), + ) + given newctx: Context = driver.localContext(params) + val pos = driver.sourcePosition(params) + val trees = driver.openedTrees(uri) + val treeList = Interactive.pathTo(trees, pos) + // Extractor for a lambda function (needs context, so has to be defined here) + val LambdaExtractor = Lambda(using newctx) + // select the most inner wildcard lambda + val firstLambda = treeList.collectFirst { + case LambdaExtractor(params, rhsFn) if params.forall(isWildcardParam) => + params -> rhsFn + } + + firstLambda match { + case Some((params, lambda)) => + // avoid names that are either defined or referenced in the lambda + val namesToAvoid = allDefAndRefNamesInTree(lambda) + // compute parameter names based on the type of the parameter + val computedParamNames: List[String] = + params.foldLeft(List.empty[String]) { (acc, param) => + val name = singleLetterNameStream(param.tpe.typeSymbol.name.toString()) + .find(n => !namesToAvoid.contains(n) && !acc.contains(n)) + acc ++ name.toList + } + if computedParamNames.size == params.size then + val paramReferenceEdits = params.zip(computedParamNames).flatMap { (param, paramName) => + val paramReferencePosition = findParamReferencePosition(param, lambda) + paramReferencePosition.toList.map { pos => + val position = pos.toLsp + val range = new l.Range( + position.getStart(), + position.getEnd() + ) + new l.TextEdit(range, paramName) + } + } + val paramNamesStr = computedParamNames.mkString(", ") + val paramDefsStr = + if params.size == 1 then paramNamesStr + else s"($paramNamesStr)" + val defRange = new l.Range( + lambda.sourcePos.toLsp.getStart(), + lambda.sourcePos.toLsp.getStart() + ) + val paramDefinitionEdits = List( + new l.TextEdit(defRange, s"$paramDefsStr => ") + ) + (paramDefinitionEdits ++ paramReferenceEdits).asJava + else + List.empty.asJava + case _ => + List.empty.asJava + } + } + +end PcConvertToNamedLambdaParameters + +object PcConvertToNamedLambdaParameters: + val codeActionId = "ConvertToNamedLambdaParameters" + + class Lambda(using Context): + def unapply(tree: tpd.Block): Option[(List[tpd.ValDef], tpd.Tree)] = tree match { + case tpd.Block((ddef @ tpd.DefDef(_, tpd.ValDefs(params) :: Nil, _, body: tpd.Tree)) :: Nil, tpd.Closure(_, meth, _)) + if ddef.symbol == meth.symbol => + params match { + case List(param) => + // lambdas with multiple wildcard parameters are represented as a single parameter function and a block with wildcard valdefs + Some(multipleUnderscoresFromBody(param, body)) + case _ => Some(params -> body) + } + case _ => None + } + end Lambda + + private def multipleUnderscoresFromBody(param: tpd.ValDef, body: tpd.Tree)(using Context): (List[tpd.ValDef], tpd.Tree) = body match { + case tpd.Block(defs, expr) if param.symbol.is(Flags.Synthetic) => + val wildcardParamDefs = defs.collect { + case valdef: tpd.ValDef if isWildcardParam(valdef) => valdef + } + if wildcardParamDefs.size == defs.size then wildcardParamDefs -> expr + else List(param) -> body + case _ => List(param) -> body + } + + def isWildcardParam(param: tpd.ValDef)(using Context): Boolean = + param.name.toString.startsWith("_$") && param.symbol.is(Flags.Synthetic) + + def findParamReferencePosition(param: tpd.ValDef, lambda: tpd.Tree)(using Context): Option[SourcePosition] = + var pos: Option[SourcePosition] = None + object FindParamReference extends tpd.TreeTraverser: + override def traverse(tree: tpd.Tree)(using Context): Unit = + tree match + case ident @ tpd.Ident(_) if ident.symbol == param.symbol => + pos = Some(tree.sourcePos) + case _ => + traverseChildren(tree) + FindParamReference.traverse(lambda) + pos + end findParamReferencePosition + + def allDefAndRefNamesInTree(tree: tpd.Tree)(using Context): List[String] = + object FindDefinitionsAndRefs extends tpd.TreeAccumulator[List[String]]: + override def apply(x: List[String], tree: tpd.Tree)(using Context): List[String] = + tree match + case tpd.DefDef(name, _, _, _) => + super.foldOver(x :+ name.toString, tree) + case tpd.ValDef(name, _, _) => + super.foldOver(x :+ name.toString, tree) + case tpd.Ident(name) => + super.foldOver(x :+ name.toString, tree) + case _ => + super.foldOver(x, tree) + FindDefinitionsAndRefs.foldOver(Nil, tree) + end allDefAndRefNamesInTree + +end PcConvertToNamedLambdaParameters diff --git a/presentation-compiler/src/main/dotty/tools/pc/PcDefinitionProvider.scala b/presentation-compiler/src/main/dotty/tools/pc/PcDefinitionProvider.scala index 8ff43ba07358..ca5a36cefad0 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/PcDefinitionProvider.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/PcDefinitionProvider.scala @@ -51,7 +51,7 @@ class PcDefinitionProvider( Interactive.pathTo(driver.openedTrees(uri), pos)(using driver.currentCtx) given ctx: Context = driver.localContext(params) - val indexedContext = IndexedContext(ctx) + val indexedContext = IndexedContext(pos)(using ctx) val result = if findTypeDef then findTypeDefinitions(path, pos, indexedContext, uri) else findDefinitions(path, pos, indexedContext, uri) @@ -79,7 +79,7 @@ class PcDefinitionProvider( .untypedPath(pos.span) .collect { case t: untpd.Tree => t } - definitionsForSymbol(untpdPath.headOption.map(_.symbol).toList, uri, pos) + definitionsForSymbols(untpdPath.headOption.map(_.symbol).toList, uri, pos) end fallbackToUntyped private def findDefinitions( @@ -89,7 +89,7 @@ class PcDefinitionProvider( uri: URI, ): DefinitionResult = import indexed.ctx - definitionsForSymbol( + definitionsForSymbols( MetalsInteractive.enclosingSymbols(path, pos, indexed), uri, pos @@ -113,68 +113,58 @@ class PcDefinitionProvider( case Nil => path.headOption match case Some(value: Literal) => - definitionsForSymbol(List(value.typeOpt.widen.typeSymbol), uri, pos) + definitionsForSymbols(List(value.typeOpt.widen.typeSymbol), uri, pos) case _ => DefinitionResultImpl.empty case _ => - definitionsForSymbol(typeSymbols, uri, pos) - + definitionsForSymbols(typeSymbols, uri, pos) end findTypeDefinitions - private def definitionsForSymbol( + private def definitionsForSymbols( symbols: List[Symbol], uri: URI, pos: SourcePosition )(using ctx: Context): DefinitionResult = - symbols match - case symbols @ (sym :: other) => - val isLocal = sym.source == pos.source - if isLocal then - val include = Include.definitions | Include.local - val (exportedDefs, otherDefs) = - Interactive.findTreesMatching(driver.openedTrees(uri), include, sym) - .partition(_.tree.symbol.is(Exported)) - - otherDefs.headOption.orElse(exportedDefs.headOption) match - case Some(srcTree) => - val pos = srcTree.namePos - if pos.exists then - val loc = new Location(params.uri().toString(), pos.toLsp) - DefinitionResultImpl( - SemanticdbSymbols.symbolName(sym), - List(loc).asJava, - ) - else DefinitionResultImpl.empty - case None => - DefinitionResultImpl.empty - else - val res = new ArrayList[Location]() - semanticSymbolsSorted(symbols) - .foreach { sym => - res.addAll(search.definition(sym, params.uri())) - } - DefinitionResultImpl( - SemanticdbSymbols.symbolName(sym), - res - ) - end if + semanticSymbolsSorted(symbols) match case Nil => DefinitionResultImpl.empty - end match - end definitionsForSymbol + case syms @ ((_, headSym) :: tail) => + val locations = syms.flatMap: + case (sym, semanticdbSymbol) => + locationsForSymbol(sym, semanticdbSymbol, uri, pos) + DefinitionResultImpl(headSym, locations.asJava) + + private def locationsForSymbol( + symbol: Symbol, + semanticdbSymbol: String, + uri: URI, + pos: SourcePosition + )(using ctx: Context): List[Location] = + val isLocal = symbol.source == pos.source + if isLocal then + val trees = driver.openedTrees(uri) + val include = Include.definitions | Include.local + val (exportedDefs, otherDefs) = + Interactive.findTreesMatching(trees, include, symbol) + .partition(_.tree.symbol.is(Exported)) + otherDefs.headOption.orElse(exportedDefs.headOption).collect: + case srcTree if srcTree.namePos.exists => + new Location(params.uri().toString(), srcTree.namePos.toLsp) + .toList + else search.definition(semanticdbSymbol, uri).asScala.toList def semanticSymbolsSorted( syms: List[Symbol] - )(using ctx: Context): List[String] = + )(using ctx: Context): List[(Symbol, String)] = syms - .map { sym => + .collect { case sym if sym.exists => // in case of having the same type and teerm symbol // term comes first // used only for ordering symbols that come from `Import` val termFlag = if sym.is(ModuleClass) then sym.sourceModule.isTerm else sym.isTerm - (termFlag, SemanticdbSymbols.symbolName(sym)) + (termFlag, sym.sourceSymbol, SemanticdbSymbols.symbolName(sym)) } - .sorted - .map(_._2) + .sortBy { case (termFlag, _, name) => (termFlag, name) } + .map(_.tail) end PcDefinitionProvider diff --git a/presentation-compiler/src/main/dotty/tools/pc/PcInlayHintsProvider.scala b/presentation-compiler/src/main/dotty/tools/pc/PcInlayHintsProvider.scala index cf4929dfc91d..db92772291b4 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/PcInlayHintsProvider.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/PcInlayHintsProvider.scala @@ -5,7 +5,7 @@ import java.nio.file.Paths import scala.annotation.tailrec -import scala.meta.internal.metals.ReportContext +import scala.meta.pc.reports.ReportContext import dotty.tools.pc.utils.InteractiveEnrichments.* import dotty.tools.pc.printer.ShortenedTypePrinter import scala.meta.internal.pc.InlayHints @@ -116,6 +116,29 @@ class PcInlayHintsProvider( InlayHintKind.Type, ) .addDefinition(adjustedPos.start) + case ByNameParameters(byNameParams) => + def adjustByNameParameterPos(pos: SourcePosition): SourcePosition = + val adjusted = adjustPos(pos) + val start = text.indexWhere(!_.isWhitespace, adjusted.start) + val end = text.lastIndexWhere(!_.isWhitespace, adjusted.end - 1) + + val startsWithBrace = text.lift(start).contains('{') + val endsWithBrace = text.lift(end).contains('}') + + if startsWithBrace && endsWithBrace then + adjusted.withStart(start + 1) + else + adjusted + + byNameParams.foldLeft(inlayHints) { + case (ih, pos) => + val adjusted = adjustByNameParameterPos(pos) + ih.add( + adjusted.startPos.toLsp, + List(LabelPart("=> ")), + InlayHintKind.Parameter + ) + } case _ => inlayHints private def toLabelParts( @@ -125,7 +148,7 @@ class PcInlayHintsProvider( val tpdPath = Interactive.pathTo(unit.tpdTree, pos.span) - val indexedCtx = IndexedContext(Interactive.contextOfPath(tpdPath)) + val indexedCtx = IndexedContext(pos)(using Interactive.contextOfPath(tpdPath)) val printer = ShortenedTypePrinter( symbolSearch )(using indexedCtx) @@ -140,7 +163,7 @@ class PcInlayHintsProvider( isInScope(tycon) && args.forall(isInScope) case _ => true if isInScope(tpe) then tpe - else tpe.deepDealias(using indexedCtx.ctx) + else tpe.deepDealiasAndSimplify(using indexedCtx.ctx) val dealiased = optDealias(tpe) val tpeStr = printer.tpe(dealiased) @@ -149,7 +172,7 @@ class PcInlayHintsProvider( InlayHints.makeLabelParts(parts, tpeStr) end toLabelParts - private val definitions = IndexedContext(ctx).ctx.definitions + private val definitions = IndexedContext(pos)(using ctx).ctx.definitions private def syntheticTupleApply(tree: Tree): Boolean = tree match case sel: Select => @@ -388,3 +411,28 @@ object InferredType: index >= 0 && index < afterDef.size && afterDef(index) == '@' end InferredType + +object ByNameParameters: + def unapply(tree: Tree)(using params: InlayHintsParams, ctx: Context): Option[List[SourcePosition]] = + def shouldSkipSelect(sel: Select) = + isForComprehensionMethod(sel) || sel.symbol.name == nme.unapply + + if (params.byNameParameters()){ + tree match + case Apply(TypeApply(sel: Select, _), _) if shouldSkipSelect(sel) => + None + case Apply(sel: Select, _) if shouldSkipSelect(sel) => + None + case Apply(fun, args) => + val funTp = fun.typeOpt.widenTermRefExpr + val params = funTp.paramInfoss.flatten + Some( + args + .zip(params) + .collect { + case (tree, param) if param.isByName => tree.sourcePos + } + ) + case _ => None + } else None +end ByNameParameters diff --git a/presentation-compiler/src/main/dotty/tools/pc/PcInlineValueProviderImpl.scala b/presentation-compiler/src/main/dotty/tools/pc/PcInlineValueProvider.scala similarity index 58% rename from presentation-compiler/src/main/dotty/tools/pc/PcInlineValueProviderImpl.scala rename to presentation-compiler/src/main/dotty/tools/pc/PcInlineValueProvider.scala index fc4b53e60bbd..c35046db2fc4 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/PcInlineValueProviderImpl.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/PcInlineValueProvider.scala @@ -16,17 +16,76 @@ import dotty.tools.dotc.core.StdNames import dotty.tools.dotc.core.Symbols.Symbol import dotty.tools.dotc.interactive.Interactive import dotty.tools.dotc.interactive.InteractiveDriver +import dotty.tools.dotc.util.SourceFile import dotty.tools.dotc.util.SourcePosition import dotty.tools.pc.utils.InteractiveEnrichments.* import dotty.tools.pc.IndexedContext.Result import org.eclipse.lsp4j as l -final class PcInlineValueProviderImpl( +final class PcInlineValueProvider( driver: InteractiveDriver, val params: OffsetParams -) extends WithSymbolSearchCollector[Option[Occurence]](driver, params) - with InlineValueProvider: +) extends WithSymbolSearchCollector[Option[Occurence]](driver, params): + + // We return a result or an error + def getInlineTextEdits(): Either[String, List[l.TextEdit]] = + defAndRefs() match { + case Right((defn, refs)) => + val edits = + if (defn.shouldBeRemoved) { + val defEdit = definitionTextEdit(defn) + val refsEdits = refs.map(referenceTextEdit(defn)) + defEdit :: refsEdits + } else refs.map(referenceTextEdit(defn)) + Right(edits) + case Left(error) => Left(error) + } + + private def referenceTextEdit( + definition: Definition + )(ref: Reference): l.TextEdit = + if (definition.requiresBrackets && ref.requiresBrackets) + new l.TextEdit( + ref.range, + s"""(${ref.rhs})""" + ) + else new l.TextEdit(ref.range, ref.rhs) + + private def definitionTextEdit(definition: Definition): l.TextEdit = + new l.TextEdit( + extend( + definition.rangeOffsets.start, + definition.rangeOffsets.end, + definition.range + ), + "" + ) + + private def extend( + startOffset: Int, + endOffset: Int, + range: l.Range + ): l.Range = { + val (startWithSpace, endWithSpace): (Int, Int) = + extendRangeToIncludeWhiteCharsAndTheFollowingNewLine( + text + )(startOffset, endOffset) + val startPos = new l.Position( + range.getStart.getLine, + range.getStart.getCharacter - (startOffset - startWithSpace) + ) + val endPos = + if (endWithSpace - 1 >= 0 && text(endWithSpace - 1) == '\n') + new l.Position(range.getEnd.getLine + 1, 0) + else + new l.Position( + range.getEnd.getLine, + range.getEnd.getCharacter + endWithSpace - endOffset + ) + + new l.Range(startPos, endPos) + } val position: l.Position = pos.toLsp.getStart().nn @@ -41,7 +100,7 @@ final class PcInlineValueProviderImpl( Some(Occurence(tree, parent, adjustedPos)) case _ => None - override def defAndRefs(): Either[String, (Definition, List[Reference])] = + def defAndRefs(): Either[String, (Definition, List[Reference])] = val newctx = driver.currentCtx.fresh.setCompilationUnit(unit) val allOccurences = result().flatten for @@ -51,8 +110,8 @@ final class PcInlineValueProviderImpl( } .toRight(Errors.didNotFindDefinition) path = Interactive.pathTo(unit.tpdTree, definition.tree.rhs.span)(using newctx) - indexedContext = IndexedContext(Interactive.contextOfPath(path)(using newctx)) - symbols = symbolsUsedInDefn(definition.tree.rhs).filter(indexedContext.lookupSym(_) == Result.InScope) + indexedContext = IndexedContext(definition.tree.namePos)(using Interactive.contextOfPath(path)(using newctx)) + symbols = symbolsUsedInDefn(definition.tree.rhs, indexedContext) references <- getReferencesToInline(definition, allOccurences, symbols) yield val (deleteDefinition, refsEdits) = references @@ -60,7 +119,6 @@ final class PcInlineValueProviderImpl( val defPos = definition.tree.sourcePos val defEdit = Definition( defPos.toLsp, - adjustRhs(definition.tree.rhs.sourcePos), RangeOffset(defPos.start, defPos.end), definitionRequiresBrackets(definition.tree.rhs)(using newctx), deleteDefinition @@ -70,6 +128,15 @@ final class PcInlineValueProviderImpl( end for end defAndRefs + private def stripIndentPrefix(rhs: String, refIndent: String, defIndent: String, hasNextLineAfterEqualsSign: Boolean): String = + val rhsLines = rhs.split("\n").toList + rhsLines match + case h :: Nil => rhs + case h :: t => + val header = if !hasNextLineAfterEqualsSign then h else "\n" ++ refIndent ++ " " ++ h + header ++ t.map(refIndent ++ _.stripPrefix(defIndent)).mkString("\n", "\n", "") + case Nil => rhs + private def definitionRequiresBrackets(tree: Tree)(using Context): Boolean = NavigateAST .untypedPath(tree.span) @@ -102,27 +169,31 @@ final class PcInlineValueProviderImpl( end referenceRequiresBrackets - private def adjustRhs(pos: SourcePosition) = + private def extendWithSurroundingParens(pos: SourcePosition) = + /** Move `point` by `step` as long as the character at `point` is `acceptedChar` */ def extend(point: Int, acceptedChar: Char, step: Int): Int = val newPoint = point + step - if newPoint > 0 && newPoint < text.length && text( - newPoint - ) == acceptedChar + if newPoint > 0 && newPoint < text.length && + text(newPoint) == acceptedChar then extend(newPoint, acceptedChar, step) else point val adjustedStart = extend(pos.start, '(', -1) val adjustedEnd = extend(pos.end - 1, ')', 1) + 1 text.slice(adjustedStart, adjustedEnd).mkString - private def symbolsUsedInDefn(rhs: Tree): Set[Symbol] = + private def symbolsUsedInDefn(rhs: Tree, indexedContext: IndexedContext): Set[Symbol] = def collectNames( symbols: Set[Symbol], tree: Tree ): Set[Symbol] = tree match - case id: (Ident | Select) + case id: Ident if !id.symbol.is(Synthetic) && !id.symbol.is(Implicit) => symbols + tree.symbol + case sel: Select => + indexedContext.lookupSym(sel.symbol) match + case IndexedContext.Result.InScope => symbols + sel.symbol + case _ => symbols case _ => symbols val traverser = new DeepFolder[Set[Symbol]](collectNames) @@ -139,7 +210,7 @@ final class PcInlineValueProviderImpl( .exists(e => e.isTerm) def allreferences = allOccurences.filterNot(_.isDefn) def inlineAll() = - makeRefsEdits(allreferences, symbols).map((true, _)) + makeRefsEdits(allreferences, symbols, definition).map((true, _)) if definition.tree.sourcePos.toLsp.encloses(position) then if defIsLocal then inlineAll() else Left(Errors.notLocal) else @@ -150,21 +221,35 @@ final class PcInlineValueProviderImpl( ref <- list .find(_.pos.toLsp.encloses(position)) .toRight(Errors.didNotFindReference) - refEdits <- makeRefsEdits(List(ref), symbols) + refEdits <- makeRefsEdits(List(ref), symbols, definition) yield (false, refEdits) end if end getReferencesToInline + extension (pos: SourcePosition) + def startColumnIndentPadding: String = { + val source = pos.source + val offset = pos.start + var idx = source.startOfLine(offset) + val pad = new StringBuilder + while (idx != offset && idx < source.content().length && source.content()(idx).isWhitespace) { + pad.append(source.content()(idx)) + idx += 1 + } + pad.result() + } + private def makeRefsEdits( refs: List[Occurence], - symbols: Set[Symbol] + symbols: Set[Symbol], + definition: DefinitionTree ): Either[String, List[Reference]] = val newctx = driver.currentCtx.fresh.setCompilationUnit(unit) def buildRef(occurrence: Occurence): Either[String, Reference] = val path = Interactive.pathTo(unit.tpdTree, occurrence.pos.span)(using newctx) - val indexedContext = IndexedContext( - Interactive.contextOfPath(path)(using newctx) + val indexedContext = IndexedContext(pos)( + using Interactive.contextOfPath(path)(using newctx) ) import indexedContext.ctx val conflictingSymbols = symbols @@ -174,10 +259,18 @@ final class PcInlineValueProviderImpl( case _ => false } .map(_.fullNameBackticked) + val hasNextLineAfterEqualsSign = + definition.tree.sourcePos.startLine != definition.tree.rhs.sourcePos.startLine if conflictingSymbols.isEmpty then Right( Reference( occurrence.pos.toLsp, + stripIndentPrefix( + extendWithSurroundingParens(definition.tree.rhs.sourcePos), + occurrence.tree.startPos.startColumnIndentPadding, + definition.tree.startPos.startColumnIndentPadding, + hasNextLineAfterEqualsSign + ), occurrence.parent.map(p => RangeOffset(p.sourcePos.start, p.sourcePos.end) ), @@ -196,7 +289,7 @@ final class PcInlineValueProviderImpl( ) end makeRefsEdits -end PcInlineValueProviderImpl +end PcInlineValueProvider case class Occurence(tree: Tree, parent: Option[Tree], pos: SourcePosition): def isDefn = @@ -205,3 +298,19 @@ case class Occurence(tree: Tree, parent: Option[Tree], pos: SourcePosition): case _ => false case class DefinitionTree(tree: ValDef, pos: SourcePosition) + +case class RangeOffset(start: Int, end: Int) + +case class Definition( + range: l.Range, + rangeOffsets: RangeOffset, + requiresBrackets: Boolean, + shouldBeRemoved: Boolean +) + +case class Reference( + range: l.Range, + rhs: String, + parentOffsets: Option[RangeOffset], + requiresBrackets: Boolean +) diff --git a/presentation-compiler/src/main/dotty/tools/pc/PcSymbolSearch.scala b/presentation-compiler/src/main/dotty/tools/pc/PcSymbolSearch.scala index fd3d74f16c16..7d1e53e1ddb2 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/PcSymbolSearch.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/PcSymbolSearch.scala @@ -49,7 +49,7 @@ trait PcSymbolSearch: lazy val soughtSymbols: Option[(Set[Symbol], SourcePosition)] = soughtSymbols(path) - def soughtSymbols(path: List[Tree]): Option[(Set[Symbol], SourcePosition)] = + private def soughtSymbols(path: List[Tree]): Option[(Set[Symbol], SourcePosition)] = val sought = path match /* reference of an extension paramter * extension [EF](<>: List[EF]) @@ -148,7 +148,7 @@ trait PcSymbolSearch: /* Import selectors: * import scala.util.Tr@@y */ - case (imp: Import) :: _ if imp.span.contains(pos.span) => + case (imp: ImportOrExport) :: _ if imp.span.contains(pos.span) => imp .selector(pos.span) .map(sym => (symbolAlternatives(sym), sym.sourcePos)) diff --git a/presentation-compiler/src/main/dotty/tools/pc/Scala3CompilerAccess.scala b/presentation-compiler/src/main/dotty/tools/pc/Scala3CompilerAccess.scala index 1443fbcf37cc..f6fc48e5ae67 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/Scala3CompilerAccess.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/Scala3CompilerAccess.scala @@ -3,7 +3,7 @@ package dotty.tools.pc import java.util.concurrent.ScheduledExecutorService import scala.concurrent.ExecutionContextExecutor -import scala.meta.internal.metals.ReportContext +import scala.meta.pc.reports.ReportContext import scala.meta.internal.pc.CompilerAccess import scala.meta.pc.PresentationCompilerConfig diff --git a/presentation-compiler/src/main/dotty/tools/pc/ScalaPresentationCompiler.scala b/presentation-compiler/src/main/dotty/tools/pc/ScalaPresentationCompiler.scala index dc53525480c3..2f218687296f 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/ScalaPresentationCompiler.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/ScalaPresentationCompiler.scala @@ -15,11 +15,10 @@ import scala.jdk.CollectionConverters._ import scala.language.unsafeNulls import scala.meta.internal.metals.CompilerVirtualFileParams import scala.meta.internal.metals.EmptyCancelToken -import scala.meta.internal.metals.EmptyReportContext +import scala.meta.pc.reports.EmptyReportContext import scala.meta.internal.metals.PcQueryContext -import scala.meta.internal.metals.ReportContext +import scala.meta.pc.reports.ReportContext import scala.meta.internal.metals.ReportLevel -import scala.meta.internal.metals.StdReportContext import scala.meta.internal.mtags.CommonMtagsEnrichments.* import scala.meta.internal.pc.CompilerAccess import scala.meta.internal.pc.DefinitionResultImpl @@ -54,14 +53,18 @@ case class ScalaPresentationCompiler( folderPath: Option[Path] = None, reportsLevel: ReportLevel = ReportLevel.Info, completionItemPriority: CompletionItemPriority = (_: String) => 0, + reportContext: ReportContext = EmptyReportContext() ) extends PresentationCompiler: + given ReportContext = reportContext + override def supportedCodeActions(): ju.List[String] = List( CodeActionId.ConvertToNamedArguments, CodeActionId.ImplementAbstractMembers, CodeActionId.ExtractMethod, CodeActionId.InlineValue, - CodeActionId.InsertInferredType + CodeActionId.InsertInferredType, + PcConvertToNamedLambdaParameters.codeActionId ).asJava def this() = this("", None, Nil, Nil) @@ -71,10 +74,6 @@ case class ScalaPresentationCompiler( private val forbiddenOptions = Set("-print-lines", "-print-tasty") private val forbiddenDoubleOptions = Set.empty[String] - given ReportContext = - folderPath - .map(StdReportContext(_, _ => buildTargetName, reportsLevel)) - .getOrElse(EmptyReportContext) override def codeAction[T]( params: OffsetParams, @@ -82,26 +81,30 @@ case class ScalaPresentationCompiler( codeActionPayload: Optional[T] ): CompletableFuture[ju.List[TextEdit]] = (codeActionId, codeActionPayload.asScala) match - case ( - CodeActionId.ConvertToNamedArguments, - Some(argIndices: ju.List[_]) - ) => - val payload = - argIndices.asScala.collect { case i: Integer => i.toInt }.toSet - convertToNamedArguments(params, payload) - case (CodeActionId.ImplementAbstractMembers, _) => - implementAbstractMembers(params) - case (CodeActionId.InsertInferredType, _) => - insertInferredType(params) - case (CodeActionId.InlineValue, _) => - inlineValue(params) - case (CodeActionId.ExtractMethod, Some(extractionPos: OffsetParams)) => - params match { - case range: RangeParams => - extractMethod(range, extractionPos) - case _ => failedFuture(new IllegalArgumentException(s"Expected range parameters")) - } - case (id, _) => failedFuture(new IllegalArgumentException(s"Unsupported action id $id")) + case ( + CodeActionId.ConvertToNamedArguments, + Some(argIndices: ju.List[_]) + ) => + val payload = + argIndices.asScala.collect { case i: Integer => i.toInt }.toSet + convertToNamedArguments(params, payload) + case (CodeActionId.ImplementAbstractMembers, _) => + implementAbstractMembers(params) + case (CodeActionId.InsertInferredType, _) => + insertInferredType(params) + case (CodeActionId.InlineValue, _) => + inlineValue(params) + case (CodeActionId.ExtractMethod, Some(extractionPos: OffsetParams)) => + params match { + case range: RangeParams => + extractMethod(range, extractionPos) + case _ => failedFuture(new IllegalArgumentException(s"Expected range parameters")) + } + case (PcConvertToNamedLambdaParameters.codeActionId, _) => + compilerAccess.withNonInterruptableCompiler(List.empty[l.TextEdit].asJava, params.token) { + access => PcConvertToNamedLambdaParameters(access.compiler(), params).convertToNamedLambdaParameters + }(params.toQueryContext) + case (id, _) => failedFuture(new IllegalArgumentException(s"Unsupported action id $id")) private def failedFuture[T](e: Throwable): CompletableFuture[T] = val f = new CompletableFuture[T]() @@ -355,7 +358,7 @@ case class ScalaPresentationCompiler( val empty: Either[String, List[l.TextEdit]] = Right(List()) (compilerAccess .withInterruptableCompiler(empty, params.token()) { pc => - new PcInlineValueProviderImpl(pc.compiler(), params) + new PcInlineValueProvider(pc.compiler(), params) .getInlineTextEdits() }(params.toQueryContext)) .thenApply { @@ -508,6 +511,9 @@ case class ScalaPresentationCompiler( def withSearch(search: SymbolSearch): PresentationCompiler = copy(search = search) + override def withReportContext(reportContext: ReportContext): PresentationCompiler = + copy(reportContext = reportContext) + def withWorkspace(workspace: Path): PresentationCompiler = copy(folderPath = Some(workspace)) diff --git a/presentation-compiler/src/main/dotty/tools/pc/SelectionRangeProvider.scala b/presentation-compiler/src/main/dotty/tools/pc/SelectionRangeProvider.scala index 7973f4103ff6..09c44b105555 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/SelectionRangeProvider.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/SelectionRangeProvider.scala @@ -6,7 +6,8 @@ import java.util as ju import scala.jdk.CollectionConverters._ import scala.meta.pc.OffsetParams -import dotty.tools.dotc.ast.tpd +import dotty.tools.dotc.ast.untpd.* +import dotty.tools.dotc.ast.NavigateAST import dotty.tools.dotc.core.Contexts.Context import dotty.tools.dotc.interactive.Interactive import dotty.tools.dotc.interactive.InteractiveDriver @@ -23,10 +24,7 @@ import org.eclipse.lsp4j.SelectionRange * @param compiler Metals Global presentation compiler wrapper. * @param params offset params converted from the selectionRange params. */ -class SelectionRangeProvider( - driver: InteractiveDriver, - params: ju.List[OffsetParams] -): +class SelectionRangeProvider(driver: InteractiveDriver, params: ju.List[OffsetParams]): /** * Get the seletion ranges for the provider params @@ -44,10 +42,13 @@ class SelectionRangeProvider( val source = SourceFile.virtual(filePath.toString, text) driver.run(uri, source) val pos = driver.sourcePosition(param) - val path = - Interactive.pathTo(driver.openedTrees(uri), pos)(using ctx) + val unit = driver.compilationUnits(uri) - val bareRanges = path + val untpdPath: List[Tree] = NavigateAST + .pathTo(pos.span, List(unit.untpdTree), true).collect: + case untpdTree: Tree => untpdTree + + val bareRanges = untpdPath .flatMap(selectionRangesFromTree(pos)) val comments = @@ -78,31 +79,43 @@ class SelectionRangeProvider( end selectionRange /** Given a tree, create a seq of [[SelectionRange]]s corresponding to that tree. */ - private def selectionRangesFromTree(pos: SourcePosition)(tree: tpd.Tree)(using Context) = + private def selectionRangesFromTree(pos: SourcePosition)(tree: Tree)(using Context) = def toSelectionRange(srcPos: SourcePosition) = val selectionRange = new SelectionRange() selectionRange.setRange(srcPos.toLsp) selectionRange - val treeSelectionRange = toSelectionRange(tree.sourcePos) - - tree match - case tpd.DefDef(name, paramss, tpt, rhs) => - // If source position is within a parameter list, add a selection range covering that whole list. - val selectedParams = - paramss - .iterator - .flatMap: // parameter list to a sourcePosition covering the whole list - case Seq(param) => Some(param.sourcePos) - case params @ Seq(head, tail*) => - val srcPos = head.sourcePos - val lastSpan = tail.last.span - Some(SourcePosition(srcPos.source, srcPos.span union lastSpan, srcPos.outer)) - case Seq() => None - .find(_.contains(pos)) - .map(toSelectionRange) - selectedParams ++ Seq(treeSelectionRange) - case _ => Seq(treeSelectionRange) + def maybeToSelectionRange(srcPos: SourcePosition): Option[SelectionRange] = + if srcPos.contains(pos) then Some(toSelectionRange(srcPos)) else None + + val treeSelectionRange = Seq(toSelectionRange(tree.sourcePos)) + + def allArgsSelectionRange(args: List[Tree]): Option[SelectionRange] = + args match + case Nil => None + case list => + val srcPos = list.head.sourcePos + val lastSpan = list.last.span + val allArgsSrcPos = SourcePosition(srcPos.source, srcPos.span union lastSpan, srcPos.outer) + maybeToSelectionRange(allArgsSrcPos) + + val allSelectionRanges: Iterable[SelectionRange] = tree match + case vdef @ ValDef(_, _, _) => + maybeToSelectionRange(vdef.namePos) + case tdef @ TypeDef(_, _) => + maybeToSelectionRange(tdef.namePos) + case mdef @ ModuleDef(_, _) => + maybeToSelectionRange(mdef.namePos) + case DefDef(_, paramss, _, _) => + paramss.flatMap(allArgsSelectionRange) + case Apply(_, args) => + allArgsSelectionRange(args) + case TypeApply(_, args) => + allArgsSelectionRange(args) + case Function(args, _) => + allArgsSelectionRange(args) + case _ => Seq.empty + allSelectionRanges ++ treeSelectionRange private def setParent( child: SelectionRange, diff --git a/presentation-compiler/src/main/dotty/tools/pc/SignatureHelpProvider.scala b/presentation-compiler/src/main/dotty/tools/pc/SignatureHelpProvider.scala index bd16d2ce2aa9..423ca5d8db89 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/SignatureHelpProvider.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/SignatureHelpProvider.scala @@ -13,7 +13,7 @@ import dotty.tools.pc.utils.InteractiveEnrichments.* import org.eclipse.lsp4j as l import scala.jdk.CollectionConverters.* -import scala.meta.internal.metals.ReportContext +import scala.meta.pc.reports.ReportContext import scala.meta.pc.OffsetParams import scala.meta.pc.SymbolDocumentation import scala.meta.pc.SymbolSearch @@ -37,7 +37,7 @@ object SignatureHelpProvider: val path = Interactive.pathTo(unit.tpdTree, pos.span)(using driver.currentCtx) val localizedContext = Interactive.contextOfPath(path)(using driver.currentCtx) - val indexedContext = IndexedContext(driver.currentCtx) + val indexedContext = IndexedContext(pos)(using driver.currentCtx) given Context = localizedContext.fresh .setCompilationUnit(unit) diff --git a/presentation-compiler/src/main/dotty/tools/pc/SymbolInformationProvider.scala b/presentation-compiler/src/main/dotty/tools/pc/SymbolInformationProvider.scala index 8bed605a87f8..49e6973af1b1 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/SymbolInformationProvider.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/SymbolInformationProvider.scala @@ -11,7 +11,7 @@ import dotty.tools.dotc.core.Flags import dotty.tools.dotc.core.Names.* import dotty.tools.dotc.core.StdNames.nme import dotty.tools.dotc.core.Symbols.* -import dotty.tools.pc.utils.InteractiveEnrichments.deepDealias +import dotty.tools.pc.utils.InteractiveEnrichments.deepDealiasAndSimplify import dotty.tools.pc.SemanticdbSymbols import dotty.tools.pc.utils.InteractiveEnrichments.allSymbols import dotty.tools.pc.utils.InteractiveEnrichments.stripBackticks @@ -51,7 +51,7 @@ class SymbolInformationProvider(using Context): collect(classSym) visited.toList.map(SemanticdbSymbols.symbolName) val dealisedSymbol = - if sym.isAliasType then sym.info.deepDealias.typeSymbol else sym + if sym.isAliasType then sym.info.deepDealiasAndSimplify.typeSymbol else sym val classOwner = sym.ownersIterator.drop(1).find(s => s.isClass || s.is(Flags.Module)) val overridden = sym.denot.allOverriddenSymbols.toList @@ -111,7 +111,7 @@ object SymbolProvider: catch case NonFatal(e) => Nil private def normalizePackage(pkg: String): String = - pkg.replace("/", ".").nn.stripSuffix(".") + pkg.replace("/", ".").stripSuffix(".") private def toSymbols(info: SymbolInfo.SymbolParts)(using Context): List[Symbol] = def collectSymbols(denotation: Denotation): List[Symbol] = diff --git a/presentation-compiler/src/main/dotty/tools/pc/TastyUtils.scala b/presentation-compiler/src/main/dotty/tools/pc/TastyUtils.scala index 62a947aeb50b..0c2afad5f92a 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/TastyUtils.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/TastyUtils.scala @@ -40,7 +40,7 @@ object TastyUtils: end htmlTasty private def tastyHtmlPageTitle(file: URI) = - val filename = Paths.get(file).nn.getFileName().toString + val filename = Paths.get(file).getFileName().toString s"TASTy for $filename" private val standaloneHtmlStyles = diff --git a/presentation-compiler/src/main/dotty/tools/pc/WithCompilationUnit.scala b/presentation-compiler/src/main/dotty/tools/pc/WithCompilationUnit.scala index 8110db269b3b..56be6614bbd4 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/WithCompilationUnit.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/WithCompilationUnit.scala @@ -76,7 +76,9 @@ class WithCompilationUnit( } else Set.empty val all = - if sym.is(Flags.ModuleClass) then + if sym.is(Flags.Exported) then + Set(sym, sym.sourceSymbol) + else if sym.is(Flags.ModuleClass) then Set(sym, sym.companionModule, sym.companionModule.companion) else if sym.isClass then Set(sym, sym.companionModule, sym.companion.moduleClass) diff --git a/presentation-compiler/src/main/dotty/tools/pc/completions/AmmoniteFileCompletions.scala b/presentation-compiler/src/main/dotty/tools/pc/completions/AmmoniteFileCompletions.scala index 81337c7d8dcb..7c25f8a69174 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/completions/AmmoniteFileCompletions.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/completions/AmmoniteFileCompletions.scala @@ -39,10 +39,10 @@ object AmmoniteFileCompletions: rawFileName: String )(using Context): List[CompletionValue] = val fileName: Option[String] = Option(rawFileName) - .flatMap(_.split("/").nn.lastOption.map(_.nn.stripSuffix(".amm.sc.scala"))) + .flatMap(_.split("/").lastOption.map(_.stripSuffix(".amm.sc.scala"))) val split: List[String] = Option(rawPath) - .fold(Nil)(_.split("\\$file").nn.toList.map(_.nn)) + .fold(Nil)(_.split("\\$file").toList.map(_.nn)) val editRange = selector.headOption.map { sel => if sel.sourcePos.span.isZeroExtent then posRange @@ -63,7 +63,7 @@ object AmmoniteFileCompletions: ) def matches(file: Path): Boolean = - (Files.isDirectory(file) || file.toAbsolutePath().toString.isAmmoniteScript) && + (Files.isDirectory(file) || file.toAbsolutePath().toString.isScalaScript) && query.exists(q => CompletionFuzzy.matches(q.nn, file.getFileName().toString)) (split, workspace) match @@ -71,10 +71,10 @@ object AmmoniteFileCompletions: // drop / or \ val current = workspace.resolve(script.drop(1)) val importPath = translateImportToPath(select).drop(1) - val currentPath = current.nn.getParent().nn.resolve(importPath).nn.toAbsolutePath() + val currentPath = current.getParent().resolve(importPath).toAbsolutePath() val parentTextEdit = - if query.exists(_.nn.isEmpty()) && - Files.exists(currentPath.nn.getParent()) && Files.isDirectory( + if query.exists(_.isEmpty()) && + Files.exists(currentPath.getParent()) && Files.isDirectory( currentPath ) then List(parent) @@ -84,7 +84,7 @@ object AmmoniteFileCompletions: .iterator().nn .asScala .toList - .filter(path => !fileName.contains(path.nn.getFileName().toString.stripSuffix(".sc"))) + .filter(path => !fileName.contains(path.getFileName().toString.stripSuffix(".sc"))) .collect { case file if matches(file) => CompletionValue.FileSystemMember( diff --git a/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionProvider.scala b/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionProvider.scala index 2a63d6a92a81..f1645f76cf97 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionProvider.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionProvider.scala @@ -4,7 +4,7 @@ package completions import java.nio.file.Path import scala.jdk.CollectionConverters._ -import scala.meta.internal.metals.ReportContext +import scala.meta.pc.reports.ReportContext import scala.meta.pc.OffsetParams import scala.meta.pc.PresentationCompilerConfig import scala.meta.pc.SymbolSearch @@ -107,7 +107,7 @@ class CompletionProvider( val locatedCtx = Interactive.contextOfPath(tpdPath)(using newctx) - val indexedCtx = IndexedContext(locatedCtx) + val indexedCtx = IndexedContext(pos)(using locatedCtx) val completionPos = CompletionPos.infer(pos, params, adjustedPath, wasCursorApplied)(using locatedCtx) @@ -222,7 +222,6 @@ class CompletionProvider( if config.isDetailIncludedInLabel then completion.labelWithDescription(printer) else completion.label val ident = underlyingCompletion.insertText.getOrElse(underlyingCompletion.label) - lazy val isInStringInterpolation = path match // s"My name is $name" diff --git a/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionValue.scala b/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionValue.scala index 90b285bffb3a..05d97972d76e 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionValue.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionValue.scala @@ -261,13 +261,13 @@ object CompletionValue: end NamedArg case class Autofill( - value: String + value: String, + override val label: String, ) extends CompletionValue: override def completionItemKind(using Context): CompletionItemKind = CompletionItemKind.Enum override def completionItemDataKind: Integer = CompletionSource.OverrideKind.ordinal override def insertText: Option[String] = Some(value) - override def label: String = "Autofill with default values" case class Keyword(label: String, override val insertText: Option[String]) extends CompletionValue: diff --git a/presentation-compiler/src/main/dotty/tools/pc/completions/Completions.scala b/presentation-compiler/src/main/dotty/tools/pc/completions/Completions.scala index 05dbe1ef5a43..6e79f5a293e5 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/completions/Completions.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/completions/Completions.scala @@ -5,7 +5,7 @@ import java.nio.file.Path import java.nio.file.Paths import scala.collection.mutable -import scala.meta.internal.metals.ReportContext +import scala.meta.pc.reports.ReportContext import scala.meta.internal.mtags.CoursierComplete import scala.meta.internal.pc.{IdentifierComparator, MemberOrdering, CompletionFuzzy} import scala.meta.pc.* @@ -569,6 +569,7 @@ class Completions( then indexedContext.lookupSym(sym) match case IndexedContext.Result.InScope => false + case IndexedContext.Result.Missing if indexedContext.rename(sym).isDefined => false case _ if completionMode.is(Mode.ImportOrExport) => visit( CompletionValue.Workspace( diff --git a/presentation-compiler/src/main/dotty/tools/pc/completions/InterpolatorCompletions.scala b/presentation-compiler/src/main/dotty/tools/pc/completions/InterpolatorCompletions.scala index da46e5167834..9cceff7310c6 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/completions/InterpolatorCompletions.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/completions/InterpolatorCompletions.scala @@ -1,7 +1,7 @@ package dotty.tools.pc.completions import scala.collection.mutable.ListBuffer -import scala.meta.internal.metals.ReportContext +import scala.meta.pc.reports.ReportContext import scala.meta.internal.pc.CompletionFuzzy import scala.meta.internal.pc.InterpolationSplice import scala.meta.pc.PresentationCompilerConfig diff --git a/presentation-compiler/src/main/dotty/tools/pc/completions/MatchCaseCompletions.scala b/presentation-compiler/src/main/dotty/tools/pc/completions/MatchCaseCompletions.scala index 2efcba48e82d..2e89b4e5bb99 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/completions/MatchCaseCompletions.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/completions/MatchCaseCompletions.scala @@ -6,7 +6,7 @@ import java.net.URI import scala.collection.mutable import scala.collection.mutable.ListBuffer import scala.jdk.CollectionConverters._ -import scala.meta.internal.metals.ReportContext +import scala.meta.pc.reports.ReportContext import scala.meta.internal.pc.CompletionFuzzy import scala.meta.pc.PresentationCompilerConfig import scala.meta.pc.SymbolSearch @@ -93,7 +93,7 @@ object CaseKeywordCompletion: case (Ident(v), tpe) => v.decoded == value case (Select(_, v), tpe) => v.decoded == value case t => false - .map((_, id) => argPts(id).widen.deepDealias) + .map((_, id) => argPts(id).widen.deepDealiasAndSimplify) /* Parent is a function expecting a case match expression */ case TreeApply(fun, _) if !fun.tpe.isErroneous => fun.tpe.paramInfoss match @@ -103,12 +103,12 @@ object CaseKeywordCompletion: ) => val args = head.argTypes.init if args.length > 1 then - Some(definitions.tupleType(args).widen.deepDealias) - else args.headOption.map(_.widen.deepDealias) + Some(definitions.tupleType(args).widen.deepDealiasAndSimplify) + else args.headOption.map(_.widen.deepDealiasAndSimplify) case _ => None case _ => None case sel => - Some(sel.tpe.widen.deepDealias) + Some(sel.tpe.widen.deepDealiasAndSimplify) selTpe .collect { case selTpe if selTpe != NoType => @@ -147,7 +147,7 @@ object CaseKeywordCompletion: definitions.NullClass, definitions.NothingClass, ) - val tpes = Set(selectorSym, selectorSym.companion) + val tpes = Set(selectorSym, selectorSym.companion).filter(_ != NoSymbol) def isSubclass(sym: Symbol) = tpes.exists(par => sym.isSubClass(par)) def visit(symImport: SymbolImport): Unit = @@ -174,8 +174,9 @@ object CaseKeywordCompletion: indexedContext.scopeSymbols .foreach(s => - val ts = s.info.deepDealias.typeSymbol - if isValid(ts) then visit(autoImportsGen.inferSymbolImport(ts)) + val ts = if s.is(Flags.Module) then s.info.typeSymbol else s.dealiasType + if isValid(ts) then + visit(autoImportsGen.inferSymbolImport(ts)) ) // Step 2: walk through known subclasses of sealed types. val sealedDescs = subclassesForType( @@ -185,6 +186,7 @@ object CaseKeywordCompletion: val symbolImport = autoImportsGen.inferSymbolImport(sym) visit(symbolImport) } + val res = result.result().flatMap { case si @ SymbolImport(sym, name, importSel) => completionGenerator.labelForCaseMember(sym, name.value).map { @@ -277,8 +279,8 @@ object CaseKeywordCompletion: clientSupportsSnippets ) - val tpeStr = printer.tpe(selector.tpe.widen.deepDealias.bounds.hi) - val tpe = selector.typeOpt.widen.deepDealias.bounds.hi match + val tpeStr = printer.tpe(selector.tpe.widen.deepDealiasAndSimplify.bounds.hi) + val tpe = selector.typeOpt.widen.deepDealiasAndSimplify.bounds.hi match case tr @ TypeRef(_, _) => tr.underlying case t => t @@ -293,7 +295,6 @@ object CaseKeywordCompletion: val (labels, imports) = sortedSubclasses.map((si, label) => (label, si.importSel)).unzip - val (obracket, cbracket) = if noIndent then (" {", "}") else ("", "") val basicMatch = CompletionValue.MatchCompletion( "match", diff --git a/presentation-compiler/src/main/dotty/tools/pc/completions/NamedArgCompletions.scala b/presentation-compiler/src/main/dotty/tools/pc/completions/NamedArgCompletions.scala index dd3a910beb4f..faf6d715d8cf 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/completions/NamedArgCompletions.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/completions/NamedArgCompletions.scala @@ -2,34 +2,23 @@ package dotty.tools.pc.completions import scala.util.Try -import dotty.tools.dotc.ast.Trees.ValDef import dotty.tools.dotc.ast.tpd.* import dotty.tools.dotc.ast.untpd import dotty.tools.dotc.core.Constants.Constant -import dotty.tools.dotc.core.ContextOps.localContext import dotty.tools.dotc.core.Contexts.Context import dotty.tools.dotc.core.Flags -import dotty.tools.dotc.core.Flags.Method import dotty.tools.dotc.core.NameKinds.DefaultGetterName import dotty.tools.dotc.core.Names.Name import dotty.tools.dotc.core.StdNames.* -import dotty.tools.dotc.core.SymDenotations.NoDenotation -import dotty.tools.dotc.core.Symbols import dotty.tools.dotc.core.Symbols.defn -import dotty.tools.dotc.core.Symbols.NoSymbol import dotty.tools.dotc.core.Symbols.Symbol -import dotty.tools.dotc.core.Types.AndType -import dotty.tools.dotc.core.Types.AppliedType -import dotty.tools.dotc.core.Types.MethodType -import dotty.tools.dotc.core.Types.OrType -import dotty.tools.dotc.core.Types.RefinedType -import dotty.tools.dotc.core.Types.TermRef -import dotty.tools.dotc.core.Types.Type -import dotty.tools.dotc.core.Types.TypeBounds -import dotty.tools.dotc.core.Types.WildcardType +import dotty.tools.dotc.core.Types.* import dotty.tools.pc.IndexedContext import dotty.tools.pc.utils.InteractiveEnrichments.* import scala.annotation.tailrec +import dotty.tools.pc.ApplyArgsExtractor +import dotty.tools.pc.ParamSymbol +import dotty.tools.pc.ApplyExtractor object NamedArgCompletions: @@ -40,36 +29,13 @@ object NamedArgCompletions: clientSupportsSnippets: Boolean, )(using ctx: Context): List[CompletionValue] = path match - case (ident: Ident) :: ValDef(_, _, _) :: Block(_, app: Apply) :: _ - if !app.fun.isInfix => + case (ident: Ident) :: ApplyExtractor(app) => contribute( - Some(ident), + ident, app, indexedContext, clientSupportsSnippets, ) - case (ident: Ident) :: rest => - def getApplyForContextFunctionParam(path: List[Tree]): Option[Apply] = - path match - // fun(arg@@) - case (app: Apply) :: _ => Some(app) - // fun(arg@@), where fun(argn: Context ?=> SomeType) - // recursively matched for multiple context arguments, e.g. Context1 ?=> Context2 ?=> SomeType - case (_: DefDef) :: Block(List(_), _: Closure) :: rest => - getApplyForContextFunctionParam(rest) - case _ => None - val contribution = - for - app <- getApplyForContextFunctionParam(rest) - if !app.fun.isInfix - yield - contribute( - Some(ident), - app, - indexedContext, - clientSupportsSnippets, - ) - contribution.getOrElse(Nil) case (app: Apply) :: _ => /** * def foo(aaa: Int, bbb: Int, ccc: Int) = ??? @@ -83,7 +49,7 @@ object NamedArgCompletions: untypedPath match case (ident: Ident) :: (app: Apply) :: _ => contribute( - Some(ident), + ident, app, indexedContext, clientSupportsSnippets, @@ -96,7 +62,7 @@ object NamedArgCompletions: end contribute private def contribute( - ident: Option[Ident], + ident: Ident, apply: Apply, indexedContext: IndexedContext, clientSupportsSnippets: Boolean, @@ -107,159 +73,14 @@ object NamedArgCompletions: case Literal(Constant(null)) => true // nullLiteral case _ => false - def collectArgss(a: Apply): List[List[Tree]] = - def stripContextFuntionArgument(argument: Tree): List[Tree] = - argument match - case Block(List(d: DefDef), _: Closure) => - d.rhs match - case app: Apply => - app.args - case b @ Block(List(_: DefDef), _: Closure) => - stripContextFuntionArgument(b) - case _ => Nil - case v => List(v) - - val args = a.args.flatMap(stripContextFuntionArgument) - a.fun match - case app: Apply => collectArgss(app) :+ args - case _ => List(args) - end collectArgss - - val method = apply.fun - - val argss = collectArgss(apply) - - def fallbackFindApply(sym: Symbol) = - sym.info.member(nme.apply) match - case NoDenotation => Nil - case den => List(den.symbol) - - // fallback for when multiple overloaded methods match the supplied args - def fallbackFindMatchingMethods() = - def maybeNameAndIndexedContext( - method: Tree - ): Option[(Name, IndexedContext)] = - method match - case Ident(name) => Some((name, indexedContext)) - case Select(This(_), name) => Some((name, indexedContext)) - case Select(from, name) => - val symbol = from.symbol - val ownerSymbol = - if symbol.is(Method) && symbol.owner.isClass then - Some(symbol.owner) - else Try(symbol.info.classSymbol).toOption - ownerSymbol.map(sym => - (name, IndexedContext(context.localContext(from, sym))) - ) - case Apply(fun, _) => maybeNameAndIndexedContext(fun) - case _ => None - val matchingMethods = - for - (name, indexedContext) <- maybeNameAndIndexedContext(method) - potentialMatches <- indexedContext.findSymbol(name) - yield - potentialMatches.collect { - case m - if m.is(Flags.Method) && - m.vparamss.length >= argss.length && - Try(m.isAccessibleFrom(apply.symbol.info)).toOption - .getOrElse(false) && - m.vparamss - .zip(argss) - .reverse - .zipWithIndex - .forall { case (pair, index) => - FuzzyArgMatcher(m.tparams) - .doMatch(allArgsProvided = index != 0, ident) - .tupled(pair) - } => - m - } - matchingMethods.getOrElse(Nil) - end fallbackFindMatchingMethods - - val matchingMethods: List[Symbols.Symbol] = - if method.symbol.paramSymss.nonEmpty then - val allArgsAreSupplied = - val vparamss = method.symbol.vparamss - vparamss.length == argss.length && vparamss - .zip(argss) - .lastOption - .exists { case (baseParams, baseArgs) => - baseArgs.length == baseParams.length - } - // ``` - // m(arg : Int) - // m(arg : Int, anotherArg : Int) - // m(a@@) - // ``` - // complier will choose the first `m`, so we need to manually look for the other one - if allArgsAreSupplied then - val foundPotential = fallbackFindMatchingMethods() - if foundPotential.contains(method.symbol) then foundPotential - else method.symbol :: foundPotential - else List(method.symbol) - else if method.symbol.is(Method) || method.symbol == NoSymbol then - fallbackFindMatchingMethods() - else fallbackFindApply(method.symbol) - end if - end matchingMethods - - val allParams = matchingMethods.flatMap { methodSym => - val vparamss = methodSym.vparamss - - // get params and args we are interested in - // e.g. - // in the following case, the interesting args and params are - // - params: [apple, banana] - // - args: [apple, b] - // ``` - // def curry(x: Int)(apple: String, banana: String) = ??? - // curry(1)(apple = "test", b@@) - // ``` - val (baseParams0, baseArgs) = - vparamss.zip(argss).lastOption.getOrElse((Nil, Nil)) + val argsAndParams = ApplyArgsExtractor.getArgsAndParams( + Some(indexedContext), + apply, + ident.span + ) - val baseParams: List[ParamSymbol] = - def defaultBaseParams = baseParams0.map(JustSymbol(_)) - @tailrec - def getRefinedParams(refinedType: Type, level: Int): List[ParamSymbol] = - if level > 0 then - val resultTypeOpt = - refinedType match - case RefinedType(AppliedType(_, args), _, _) => args.lastOption - case AppliedType(_, args) => args.lastOption - case _ => None - resultTypeOpt match - case Some(resultType) => getRefinedParams(resultType, level - 1) - case _ => defaultBaseParams - else - refinedType match - case RefinedType(AppliedType(_, args), _, MethodType(ri)) => - baseParams0.zip(ri).zip(args).map { case ((sym, name), arg) => - RefinedSymbol(sym, name, arg) - } - case _ => defaultBaseParams - // finds param refinements for lambda expressions - // val hello: (x: Int, y: Int) => Unit = (x, _) => println(x) - @tailrec - def refineParams(method: Tree, level: Int): List[ParamSymbol] = - method match - case Select(Apply(f, _), _) => refineParams(f, level + 1) - case Select(h, name) => - // for Select(foo, name = apply) we want `foo.symbol` - if name == nme.apply then getRefinedParams(h.symbol.info, level) - else getRefinedParams(method.symbol.info, level) - case Apply(f, _) => - refineParams(f, level + 1) - case _ => getRefinedParams(method.symbol.info, level) - refineParams(method, 0) - end baseParams - - val args = ident - .map(i => baseArgs.filterNot(_ == i)) - .getOrElse(baseArgs) - .filterNot(isUselessLiteral) + val allParams = argsAndParams.flatMap { case (baseArgs, baseParams) => + val args = baseArgs.filterNot( a => a == ident || isUselessLiteral(a)) @tailrec def isDefaultArg(t: Tree): Boolean = t match @@ -294,9 +115,8 @@ object NamedArgCompletions: ) } - val prefix = ident - .map(_.name.toString) - .getOrElse("") + val prefix = + ident.name.toString .replace(Cursor.value, "") .nn @@ -331,7 +151,7 @@ object NamedArgCompletions: allParams.exists(param => param.name.startsWith(prefix)) def isExplicitlyCalled = suffix.startsWith(prefix) def hasParamsToFill = allParams.count(!_.symbol.is(Flags.HasDefault)) > 1 - if clientSupportsSnippets && matchingMethods.length == 1 && (shouldShow || isExplicitlyCalled) && hasParamsToFill + if clientSupportsSnippets && argsAndParams.length == 1 && (shouldShow || isExplicitlyCalled) && hasParamsToFill then val editText = allParams.zipWithIndex .collect { @@ -339,9 +159,16 @@ object NamedArgCompletions: s"${param.nameBackticked.replace("$", "$$")} = $${${index + 1}${findDefaultValue(param)}}" } .mkString(", ") + val labelText = allParams + .collect { + case param if !param.symbol.is(Flags.HasDefault) => + s"${param.nameBackticked.replace("$", "$$")} = ???" + } + .mkString(", ") List( CompletionValue.Autofill( - editText + editText, + labelText, ) ) else List.empty @@ -369,73 +196,4 @@ object NamedArgCompletions: ) ::: findPossibleDefaults() ::: fillAllFields() end contribute - extension (method: Symbols.Symbol) - def vparamss(using Context) = method.filteredParamss(_.isTerm) - def tparams(using Context) = method.filteredParamss(_.isType).flatten - def filteredParamss(f: Symbols.Symbol => Boolean)(using Context) = - method.paramSymss.filter(params => params.forall(f)) end NamedArgCompletions - -class FuzzyArgMatcher(tparams: List[Symbols.Symbol])(using Context): - - /** - * A heuristic for checking if the passed arguments match the method's arguments' types. - * For non-polymorphic methods we use the subtype relation (`<:<`) - * and for polymorphic methods we use a heuristic. - * We check the args types not the result type. - */ - def doMatch( - allArgsProvided: Boolean, - ident: Option[Ident] - )(expectedArgs: List[Symbols.Symbol], actualArgs: List[Tree]) = - (expectedArgs.length == actualArgs.length || - (!allArgsProvided && expectedArgs.length >= actualArgs.length)) && - actualArgs.zipWithIndex.forall { - case (arg: Ident, _) if ident.contains(arg) => true - case (NamedArg(name, arg), _) => - expectedArgs.exists { expected => - expected.name == name && (!arg.hasType || arg.typeOpt.unfold - .fuzzyArg_<:<(expected.info)) - } - case (arg, i) => - !arg.hasType || arg.typeOpt.unfold.fuzzyArg_<:<(expectedArgs(i).info) - } - - extension (arg: Type) - def fuzzyArg_<:<(expected: Type) = - if tparams.isEmpty then arg <:< expected - else arg <:< substituteTypeParams(expected) - def unfold = - arg match - case arg: TermRef => arg.underlying - case e => e - - private def substituteTypeParams(t: Type): Type = - t match - case e if tparams.exists(_ == e.typeSymbol) => - val matchingParam = tparams.find(_ == e.typeSymbol).get - matchingParam.info match - case b @ TypeBounds(_, _) => WildcardType(b) - case _ => WildcardType - case o @ OrType(e1, e2) => - OrType(substituteTypeParams(e1), substituteTypeParams(e2), o.isSoft) - case AndType(e1, e2) => - AndType(substituteTypeParams(e1), substituteTypeParams(e2)) - case AppliedType(et, eparams) => - AppliedType(et, eparams.map(substituteTypeParams)) - case _ => t - -end FuzzyArgMatcher - -sealed trait ParamSymbol: - def name: Name - def info: Type - def symbol: Symbol - def nameBackticked(using Context) = name.decoded.backticked - -case class JustSymbol(symbol: Symbol)(using Context) extends ParamSymbol: - def name: Name = symbol.name - def info: Type = symbol.info - -case class RefinedSymbol(symbol: Symbol, name: Name, info: Type) - extends ParamSymbol diff --git a/presentation-compiler/src/main/dotty/tools/pc/completions/OverrideCompletions.scala b/presentation-compiler/src/main/dotty/tools/pc/completions/OverrideCompletions.scala index f5c15ca6df0e..807f959a2406 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/completions/OverrideCompletions.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/completions/OverrideCompletions.scala @@ -4,7 +4,7 @@ package completions import java.util as ju import scala.jdk.CollectionConverters._ -import scala.meta.internal.metals.ReportContext +import scala.meta.pc.reports.ReportContext import scala.meta.pc.OffsetParams import scala.meta.pc.PresentationCompilerConfig import scala.meta.pc.PresentationCompilerConfig.OverrideDefFormat @@ -191,7 +191,7 @@ object OverrideCompletions: template :: path case path => path - val indexedContext = IndexedContext( + val indexedContext = IndexedContext(pos)(using Interactive.contextOfPath(path)(using newctx) ) import indexedContext.ctx diff --git a/presentation-compiler/src/main/dotty/tools/pc/completions/ScalaCliCompletions.scala b/presentation-compiler/src/main/dotty/tools/pc/completions/ScalaCliCompletions.scala index e2a0a033ee6b..8df727b14155 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/completions/ScalaCliCompletions.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/completions/ScalaCliCompletions.scala @@ -13,14 +13,20 @@ class ScalaCliCompletions( ): def unapply(path: List[Tree]) = def scalaCliDep = CoursierComplete.isScalaCliDep( - pos.lineContent.take(pos.column).stripPrefix("/*